Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.11/site-packages/google/cloud/bigquery/table.py: 37%

Shortcuts on this page

r m x   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1178 statements  

1# Copyright 2015 Google LLC 

2# 

3# Licensed under the Apache License, Version 2.0 (the "License"); 

4# you may not use this file except in compliance with the License. 

5# You may obtain a copy of the License at 

6# 

7# http://www.apache.org/licenses/LICENSE-2.0 

8# 

9# Unless required by applicable law or agreed to in writing, software 

10# distributed under the License is distributed on an "AS IS" BASIS, 

11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 

12# See the License for the specific language governing permissions and 

13# limitations under the License. 

14 

15"""Define API Tables.""" 

16 

17from __future__ import absolute_import 

18 

19import copy 

20import datetime 

21import functools 

22import operator 

23import typing 

24from typing import Any, Dict, Iterable, Iterator, List, Optional, Tuple, Union, Sequence 

25 

26import warnings 

27 

28try: 

29 import pandas # type: ignore 

30except ImportError: 

31 pandas = None 

32 

33try: 

34 import pyarrow # type: ignore 

35except ImportError: 

36 pyarrow = None 

37 

38try: 

39 import db_dtypes # type: ignore 

40except ImportError: 

41 db_dtypes = None 

42 

43try: 

44 import geopandas # type: ignore 

45except ImportError: 

46 geopandas = None 

47finally: 

48 _COORDINATE_REFERENCE_SYSTEM = "EPSG:4326" 

49 

50try: 

51 import shapely # type: ignore 

52 from shapely import wkt # type: ignore 

53except ImportError: 

54 shapely = None 

55else: 

56 _read_wkt = wkt.loads 

57 

58import google.api_core.exceptions 

59from google.api_core.page_iterator import HTTPIterator 

60 

61import google.cloud._helpers # type: ignore 

62from google.cloud.bigquery import _helpers 

63from google.cloud.bigquery import _pandas_helpers 

64from google.cloud.bigquery import _versions_helpers 

65from google.cloud.bigquery import exceptions as bq_exceptions 

66from google.cloud.bigquery._tqdm_helpers import get_progress_bar 

67from google.cloud.bigquery.encryption_configuration import EncryptionConfiguration 

68from google.cloud.bigquery.enums import DefaultPandasDTypes 

69from google.cloud.bigquery.external_config import ExternalConfig 

70from google.cloud.bigquery import schema as _schema 

71from google.cloud.bigquery.schema import _build_schema_resource 

72from google.cloud.bigquery.schema import _parse_schema_resource 

73from google.cloud.bigquery.schema import _to_schema_fields 

74from google.cloud.bigquery import external_config 

75 

76if typing.TYPE_CHECKING: # pragma: NO COVER 

77 # Unconditionally import optional dependencies again to tell pytype that 

78 # they are not None, avoiding false "no attribute" errors. 

79 import pandas 

80 import pyarrow 

81 import geopandas # type: ignore 

82 from google.cloud import bigquery_storage # type: ignore 

83 from google.cloud.bigquery.dataset import DatasetReference 

84 

85 

86_NO_GEOPANDAS_ERROR = ( 

87 "The geopandas library is not installed, please install " 

88 "geopandas to use the to_geodataframe() function." 

89) 

90_NO_PYARROW_ERROR = ( 

91 "The pyarrow library is not installed, please install " 

92 "pyarrow to use the to_arrow() function." 

93) 

94_NO_SHAPELY_ERROR = ( 

95 "The shapely library is not installed, please install " 

96 "shapely to use the geography_as_object option." 

97) 

98 

99_TABLE_HAS_NO_SCHEMA = 'Table has no schema: call "client.get_table()"' 

100 

101_NO_SUPPORTED_DTYPE = ( 

102 "The dtype cannot to be converted to a pandas ExtensionArray " 

103 "because the necessary `__from_arrow__` attribute is missing." 

104) 

105 

106_RANGE_PYARROW_WARNING = ( 

107 "Unable to represent RANGE schema as struct using pandas ArrowDtype. Using " 

108 "`object` instead. To use ArrowDtype, use pandas >= 1.5 and " 

109 "pyarrow >= 10.0.1." 

110) 

111 

112# How many of the total rows need to be downloaded already for us to skip 

113# calling the BQ Storage API? 

114# 

115# In microbenchmarks on 2024-05-21, I (tswast@) measure that at about 2 MB of 

116# remaining results, it's faster to use the BQ Storage Read API to download 

117# the results than use jobs.getQueryResults. Since we don't have a good way to 

118# know the remaining bytes, we estimate by remaining number of rows. 

119# 

120# Except when rows themselves are larger, I observe that the a single page of 

121# results will be around 10 MB. Therefore, the proportion of rows already 

122# downloaded should be 10 (first page) / 12 (all results) or less for it to be 

123# worth it to make a call to jobs.getQueryResults. 

124ALMOST_COMPLETELY_CACHED_RATIO = 0.833333 

125 

126 

127def _reference_getter(table): 

128 """A :class:`~google.cloud.bigquery.table.TableReference` pointing to 

129 this table. 

130 

131 Returns: 

132 google.cloud.bigquery.table.TableReference: pointer to this table. 

133 """ 

134 from google.cloud.bigquery import dataset 

135 

136 dataset_ref = dataset.DatasetReference(table.project, table.dataset_id) 

137 return TableReference(dataset_ref, table.table_id) 

138 

139 

140def _view_use_legacy_sql_getter( 

141 table: Union["Table", "TableListItem"] 

142) -> Optional[bool]: 

143 """bool: Specifies whether to execute the view with Legacy or Standard SQL. 

144 

145 This boolean specifies whether to execute the view with Legacy SQL 

146 (:data:`True`) or Standard SQL (:data:`False`). The client side default is 

147 :data:`False`. The server-side default is :data:`True`. If this table is 

148 not a view, :data:`None` is returned. 

149 

150 Raises: 

151 ValueError: For invalid value types. 

152 """ 

153 

154 view: Optional[Dict[str, Any]] = table._properties.get("view") 

155 if view is not None: 

156 # The server-side default for useLegacySql is True. 

157 return view.get("useLegacySql", True) if view is not None else True 

158 # In some cases, such as in a table list no view object is present, but the 

159 # resource still represents a view. Use the type as a fallback. 

160 if table.table_type == "VIEW": 

161 # The server-side default for useLegacySql is True. 

162 return True 

163 return None # explicit return statement to appease mypy 

164 

165 

166class _TableBase: 

167 """Base class for Table-related classes with common functionality.""" 

168 

169 _PROPERTY_TO_API_FIELD: Dict[str, Union[str, List[str]]] = { 

170 "dataset_id": ["tableReference", "datasetId"], 

171 "project": ["tableReference", "projectId"], 

172 "table_id": ["tableReference", "tableId"], 

173 } 

174 

175 def __init__(self): 

176 self._properties = {} 

177 

178 @property 

179 def project(self) -> str: 

180 """Project bound to the table.""" 

181 return _helpers._get_sub_prop( 

182 self._properties, self._PROPERTY_TO_API_FIELD["project"] 

183 ) 

184 

185 @property 

186 def dataset_id(self) -> str: 

187 """ID of dataset containing the table.""" 

188 return _helpers._get_sub_prop( 

189 self._properties, self._PROPERTY_TO_API_FIELD["dataset_id"] 

190 ) 

191 

192 @property 

193 def table_id(self) -> str: 

194 """The table ID.""" 

195 return _helpers._get_sub_prop( 

196 self._properties, self._PROPERTY_TO_API_FIELD["table_id"] 

197 ) 

198 

199 @property 

200 def path(self) -> str: 

201 """URL path for the table's APIs.""" 

202 return ( 

203 f"/projects/{self.project}/datasets/{self.dataset_id}" 

204 f"/tables/{self.table_id}" 

205 ) 

206 

207 def __eq__(self, other): 

208 if isinstance(other, _TableBase): 

209 return ( 

210 self.project == other.project 

211 and self.dataset_id == other.dataset_id 

212 and self.table_id == other.table_id 

213 ) 

214 else: 

215 return NotImplemented 

216 

217 def __hash__(self): 

218 return hash((self.project, self.dataset_id, self.table_id)) 

219 

220 

221class TableReference(_TableBase): 

222 """TableReferences are pointers to tables. 

223 

224 See 

225 https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#tablereference 

226 

227 Args: 

228 dataset_ref: A pointer to the dataset 

229 table_id: The ID of the table 

230 """ 

231 

232 _PROPERTY_TO_API_FIELD = { 

233 "dataset_id": "datasetId", 

234 "project": "projectId", 

235 "table_id": "tableId", 

236 } 

237 

238 def __init__(self, dataset_ref: "DatasetReference", table_id: str): 

239 self._properties = {} 

240 

241 _helpers._set_sub_prop( 

242 self._properties, 

243 self._PROPERTY_TO_API_FIELD["project"], 

244 dataset_ref.project, 

245 ) 

246 _helpers._set_sub_prop( 

247 self._properties, 

248 self._PROPERTY_TO_API_FIELD["dataset_id"], 

249 dataset_ref.dataset_id, 

250 ) 

251 _helpers._set_sub_prop( 

252 self._properties, 

253 self._PROPERTY_TO_API_FIELD["table_id"], 

254 table_id, 

255 ) 

256 

257 @classmethod 

258 def from_string( 

259 cls, table_id: str, default_project: Optional[str] = None 

260 ) -> "TableReference": 

261 """Construct a table reference from table ID string. 

262 

263 Args: 

264 table_id (str): 

265 A table ID in standard SQL format. If ``default_project`` 

266 is not specified, this must included a project ID, dataset 

267 ID, and table ID, each separated by ``.``. 

268 default_project (Optional[str]): 

269 The project ID to use when ``table_id`` does not 

270 include a project ID. 

271 

272 Returns: 

273 TableReference: Table reference parsed from ``table_id``. 

274 

275 Examples: 

276 >>> TableReference.from_string('my-project.mydataset.mytable') 

277 TableRef...(DatasetRef...('my-project', 'mydataset'), 'mytable') 

278 

279 Raises: 

280 ValueError: 

281 If ``table_id`` is not a fully-qualified table ID in 

282 standard SQL format. 

283 """ 

284 from google.cloud.bigquery.dataset import DatasetReference 

285 

286 ( 

287 output_project_id, 

288 output_dataset_id, 

289 output_table_id, 

290 ) = _helpers._parse_3_part_id( 

291 table_id, default_project=default_project, property_name="table_id" 

292 ) 

293 

294 return cls( 

295 DatasetReference(output_project_id, output_dataset_id), output_table_id 

296 ) 

297 

298 @classmethod 

299 def from_api_repr(cls, resource: dict) -> "TableReference": 

300 """Factory: construct a table reference given its API representation 

301 

302 Args: 

303 resource (Dict[str, object]): 

304 Table reference representation returned from the API 

305 

306 Returns: 

307 google.cloud.bigquery.table.TableReference: 

308 Table reference parsed from ``resource``. 

309 """ 

310 from google.cloud.bigquery.dataset import DatasetReference 

311 

312 project = resource["projectId"] 

313 dataset_id = resource["datasetId"] 

314 table_id = resource["tableId"] 

315 

316 return cls(DatasetReference(project, dataset_id), table_id) 

317 

318 def to_api_repr(self) -> dict: 

319 """Construct the API resource representation of this table reference. 

320 

321 Returns: 

322 Dict[str, object]: Table reference represented as an API resource 

323 """ 

324 return copy.deepcopy(self._properties) 

325 

326 def to_bqstorage(self) -> str: 

327 """Construct a BigQuery Storage API representation of this table. 

328 

329 Install the ``google-cloud-bigquery-storage`` package to use this 

330 feature. 

331 

332 If the ``table_id`` contains a partition identifier (e.g. 

333 ``my_table$201812``) or a snapshot identifier (e.g. 

334 ``mytable@1234567890``), it is ignored. Use 

335 :class:`google.cloud.bigquery_storage.types.ReadSession.TableReadOptions` 

336 to filter rows by partition. Use 

337 :class:`google.cloud.bigquery_storage.types.ReadSession.TableModifiers` 

338 to select a specific snapshot to read from. 

339 

340 Returns: 

341 str: A reference to this table in the BigQuery Storage API. 

342 """ 

343 

344 table_id, _, _ = self.table_id.partition("@") 

345 table_id, _, _ = table_id.partition("$") 

346 

347 table_ref = ( 

348 f"projects/{self.project}/datasets/{self.dataset_id}/tables/{table_id}" 

349 ) 

350 return table_ref 

351 

352 def __str__(self): 

353 return f"{self.project}.{self.dataset_id}.{self.table_id}" 

354 

355 def __repr__(self): 

356 from google.cloud.bigquery.dataset import DatasetReference 

357 

358 dataset_ref = DatasetReference(self.project, self.dataset_id) 

359 return f"TableReference({dataset_ref!r}, '{self.table_id}')" 

360 

361 

362class Table(_TableBase): 

363 """Tables represent a set of rows whose values correspond to a schema. 

364 

365 See 

366 https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#resource-table 

367 

368 Args: 

369 table_ref (Union[google.cloud.bigquery.table.TableReference, str]): 

370 A pointer to a table. If ``table_ref`` is a string, it must 

371 included a project ID, dataset ID, and table ID, each separated 

372 by ``.``. 

373 schema (Optional[Sequence[Union[ \ 

374 :class:`~google.cloud.bigquery.schema.SchemaField`, \ 

375 Mapping[str, Any] \ 

376 ]]]): 

377 The table's schema. If any item is a mapping, its content must be 

378 compatible with 

379 :meth:`~google.cloud.bigquery.schema.SchemaField.from_api_repr`. 

380 """ 

381 

382 _PROPERTY_TO_API_FIELD: Dict[str, Any] = { 

383 **_TableBase._PROPERTY_TO_API_FIELD, 

384 "biglake_configuration": "biglakeConfiguration", 

385 "clustering_fields": "clustering", 

386 "created": "creationTime", 

387 "description": "description", 

388 "encryption_configuration": "encryptionConfiguration", 

389 "etag": "etag", 

390 "expires": "expirationTime", 

391 "external_data_configuration": "externalDataConfiguration", 

392 "friendly_name": "friendlyName", 

393 "full_table_id": "id", 

394 "labels": "labels", 

395 "location": "location", 

396 "modified": "lastModifiedTime", 

397 "mview_enable_refresh": "materializedView", 

398 "mview_last_refresh_time": ["materializedView", "lastRefreshTime"], 

399 "mview_query": "materializedView", 

400 "mview_refresh_interval": "materializedView", 

401 "mview_allow_non_incremental_definition": "materializedView", 

402 "num_bytes": "numBytes", 

403 "num_rows": "numRows", 

404 "partition_expiration": "timePartitioning", 

405 "partitioning_type": "timePartitioning", 

406 "range_partitioning": "rangePartitioning", 

407 "time_partitioning": "timePartitioning", 

408 "schema": ["schema", "fields"], 

409 "snapshot_definition": "snapshotDefinition", 

410 "clone_definition": "cloneDefinition", 

411 "streaming_buffer": "streamingBuffer", 

412 "self_link": "selfLink", 

413 "type": "type", 

414 "view_use_legacy_sql": "view", 

415 "view_query": "view", 

416 "require_partition_filter": "requirePartitionFilter", 

417 "table_constraints": "tableConstraints", 

418 "max_staleness": "maxStaleness", 

419 "resource_tags": "resourceTags", 

420 "external_catalog_table_options": "externalCatalogTableOptions", 

421 "foreign_type_info": ["schema", "foreignTypeInfo"], 

422 } 

423 

424 def __init__(self, table_ref, schema=None) -> None: 

425 table_ref = _table_arg_to_table_ref(table_ref) 

426 self._properties: Dict[str, Any] = { 

427 "tableReference": table_ref.to_api_repr(), 

428 "labels": {}, 

429 } 

430 # Let the @property do validation. 

431 if schema is not None: 

432 self.schema = schema 

433 

434 reference = property(_reference_getter) 

435 

436 @property 

437 def biglake_configuration(self): 

438 """google.cloud.bigquery.table.BigLakeConfiguration: Configuration 

439 for managed tables for Apache Iceberg. 

440 

441 See https://cloud.google.com/bigquery/docs/iceberg-tables for more information. 

442 """ 

443 prop = self._properties.get( 

444 self._PROPERTY_TO_API_FIELD["biglake_configuration"] 

445 ) 

446 if prop is not None: 

447 prop = BigLakeConfiguration.from_api_repr(prop) 

448 return prop 

449 

450 @biglake_configuration.setter 

451 def biglake_configuration(self, value): 

452 api_repr = value 

453 if value is not None: 

454 api_repr = value.to_api_repr() 

455 self._properties[ 

456 self._PROPERTY_TO_API_FIELD["biglake_configuration"] 

457 ] = api_repr 

458 

459 @property 

460 def require_partition_filter(self): 

461 """bool: If set to true, queries over the partitioned table require a 

462 partition filter that can be used for partition elimination to be 

463 specified. 

464 """ 

465 return self._properties.get( 

466 self._PROPERTY_TO_API_FIELD["require_partition_filter"] 

467 ) 

468 

469 @require_partition_filter.setter 

470 def require_partition_filter(self, value): 

471 self._properties[ 

472 self._PROPERTY_TO_API_FIELD["require_partition_filter"] 

473 ] = value 

474 

475 @property 

476 def schema(self): 

477 """Sequence[Union[ \ 

478 :class:`~google.cloud.bigquery.schema.SchemaField`, \ 

479 Mapping[str, Any] \ 

480 ]]: 

481 Table's schema. 

482 

483 Raises: 

484 Exception: 

485 If ``schema`` is not a sequence, or if any item in the sequence 

486 is not a :class:`~google.cloud.bigquery.schema.SchemaField` 

487 instance or a compatible mapping representation of the field. 

488 

489 .. Note:: 

490 If you are referencing a schema for an external catalog table such 

491 as a Hive table, it will also be necessary to populate the foreign_type_info 

492 attribute. This is not necessary if defining the schema for a BigQuery table. 

493 

494 For details, see: 

495 https://cloud.google.com/bigquery/docs/external-tables 

496 https://cloud.google.com/bigquery/docs/datasets-intro#external_datasets 

497 

498 """ 

499 prop = _helpers._get_sub_prop( 

500 self._properties, self._PROPERTY_TO_API_FIELD["schema"] 

501 ) 

502 if not prop: 

503 return [] 

504 else: 

505 return _parse_schema_resource(prop) 

506 

507 @schema.setter 

508 def schema(self, value): 

509 api_field = self._PROPERTY_TO_API_FIELD["schema"] 

510 

511 if value is None: 

512 _helpers._set_sub_prop( 

513 self._properties, 

514 api_field, 

515 None, 

516 ) 

517 elif isinstance(value, Sequence): 

518 value = _to_schema_fields(value) 

519 value = _build_schema_resource(value) 

520 _helpers._set_sub_prop( 

521 self._properties, 

522 api_field, 

523 value, 

524 ) 

525 else: 

526 raise TypeError("Schema must be a Sequence (e.g. a list) or None.") 

527 

528 @property 

529 def labels(self): 

530 """Dict[str, str]: Labels for the table. 

531 

532 This method always returns a dict. To change a table's labels, 

533 modify the dict, then call ``Client.update_table``. To delete a 

534 label, set its value to :data:`None` before updating. 

535 

536 Raises: 

537 ValueError: If ``value`` type is invalid. 

538 """ 

539 return self._properties.setdefault(self._PROPERTY_TO_API_FIELD["labels"], {}) 

540 

541 @labels.setter 

542 def labels(self, value): 

543 if not isinstance(value, dict): 

544 raise ValueError("Pass a dict") 

545 self._properties[self._PROPERTY_TO_API_FIELD["labels"]] = value 

546 

547 @property 

548 def encryption_configuration(self): 

549 """google.cloud.bigquery.encryption_configuration.EncryptionConfiguration: Custom 

550 encryption configuration for the table. 

551 

552 Custom encryption configuration (e.g., Cloud KMS keys) or :data:`None` 

553 if using default encryption. 

554 

555 See `protecting data with Cloud KMS keys 

556 <https://cloud.google.com/bigquery/docs/customer-managed-encryption>`_ 

557 in the BigQuery documentation. 

558 """ 

559 prop = self._properties.get( 

560 self._PROPERTY_TO_API_FIELD["encryption_configuration"] 

561 ) 

562 if prop is not None: 

563 prop = EncryptionConfiguration.from_api_repr(prop) 

564 return prop 

565 

566 @encryption_configuration.setter 

567 def encryption_configuration(self, value): 

568 api_repr = value 

569 if value is not None: 

570 api_repr = value.to_api_repr() 

571 self._properties[ 

572 self._PROPERTY_TO_API_FIELD["encryption_configuration"] 

573 ] = api_repr 

574 

575 @property 

576 def created(self): 

577 """Union[datetime.datetime, None]: Datetime at which the table was 

578 created (:data:`None` until set from the server). 

579 """ 

580 creation_time = self._properties.get(self._PROPERTY_TO_API_FIELD["created"]) 

581 if creation_time is not None: 

582 # creation_time will be in milliseconds. 

583 return google.cloud._helpers._datetime_from_microseconds( 

584 1000.0 * float(creation_time) 

585 ) 

586 

587 @property 

588 def etag(self): 

589 """Union[str, None]: ETag for the table resource (:data:`None` until 

590 set from the server). 

591 """ 

592 return self._properties.get(self._PROPERTY_TO_API_FIELD["etag"]) 

593 

594 @property 

595 def modified(self): 

596 """Union[datetime.datetime, None]: Datetime at which the table was last 

597 modified (:data:`None` until set from the server). 

598 """ 

599 modified_time = self._properties.get(self._PROPERTY_TO_API_FIELD["modified"]) 

600 if modified_time is not None: 

601 # modified_time will be in milliseconds. 

602 return google.cloud._helpers._datetime_from_microseconds( 

603 1000.0 * float(modified_time) 

604 ) 

605 

606 @property 

607 def num_bytes(self): 

608 """Union[int, None]: The size of the table in bytes (:data:`None` until 

609 set from the server). 

610 """ 

611 return _helpers._int_or_none( 

612 self._properties.get(self._PROPERTY_TO_API_FIELD["num_bytes"]) 

613 ) 

614 

615 @property 

616 def num_rows(self): 

617 """Union[int, None]: The number of rows in the table (:data:`None` 

618 until set from the server). 

619 """ 

620 return _helpers._int_or_none( 

621 self._properties.get(self._PROPERTY_TO_API_FIELD["num_rows"]) 

622 ) 

623 

624 @property 

625 def self_link(self): 

626 """Union[str, None]: URL for the table resource (:data:`None` until set 

627 from the server). 

628 """ 

629 return self._properties.get(self._PROPERTY_TO_API_FIELD["self_link"]) 

630 

631 @property 

632 def full_table_id(self): 

633 """Union[str, None]: ID for the table (:data:`None` until set from the 

634 server). 

635 

636 In the format ``project-id:dataset_id.table_id``. 

637 """ 

638 return self._properties.get(self._PROPERTY_TO_API_FIELD["full_table_id"]) 

639 

640 @property 

641 def table_type(self): 

642 """Union[str, None]: The type of the table (:data:`None` until set from 

643 the server). 

644 

645 Possible values are ``'TABLE'``, ``'VIEW'``, ``'MATERIALIZED_VIEW'`` or 

646 ``'EXTERNAL'``. 

647 """ 

648 return self._properties.get(self._PROPERTY_TO_API_FIELD["type"]) 

649 

650 @property 

651 def range_partitioning(self): 

652 """Optional[google.cloud.bigquery.table.RangePartitioning]: 

653 Configures range-based partitioning for a table. 

654 

655 .. note:: 

656 **Beta**. The integer range partitioning feature is in a 

657 pre-release state and might change or have limited support. 

658 

659 Only specify at most one of 

660 :attr:`~google.cloud.bigquery.table.Table.time_partitioning` or 

661 :attr:`~google.cloud.bigquery.table.Table.range_partitioning`. 

662 

663 Raises: 

664 ValueError: 

665 If the value is not 

666 :class:`~google.cloud.bigquery.table.RangePartitioning` or 

667 :data:`None`. 

668 """ 

669 resource = self._properties.get( 

670 self._PROPERTY_TO_API_FIELD["range_partitioning"] 

671 ) 

672 if resource is not None: 

673 return RangePartitioning(_properties=resource) 

674 

675 @range_partitioning.setter 

676 def range_partitioning(self, value): 

677 resource = value 

678 if isinstance(value, RangePartitioning): 

679 resource = value._properties 

680 elif value is not None: 

681 raise ValueError( 

682 "Expected value to be RangePartitioning or None, got {}.".format(value) 

683 ) 

684 self._properties[self._PROPERTY_TO_API_FIELD["range_partitioning"]] = resource 

685 

686 @property 

687 def time_partitioning(self): 

688 """Optional[google.cloud.bigquery.table.TimePartitioning]: Configures time-based 

689 partitioning for a table. 

690 

691 Only specify at most one of 

692 :attr:`~google.cloud.bigquery.table.Table.time_partitioning` or 

693 :attr:`~google.cloud.bigquery.table.Table.range_partitioning`. 

694 

695 Raises: 

696 ValueError: 

697 If the value is not 

698 :class:`~google.cloud.bigquery.table.TimePartitioning` or 

699 :data:`None`. 

700 """ 

701 prop = self._properties.get(self._PROPERTY_TO_API_FIELD["time_partitioning"]) 

702 if prop is not None: 

703 return TimePartitioning.from_api_repr(prop) 

704 

705 @time_partitioning.setter 

706 def time_partitioning(self, value): 

707 api_repr = value 

708 if isinstance(value, TimePartitioning): 

709 api_repr = value.to_api_repr() 

710 elif value is not None: 

711 raise ValueError( 

712 "value must be google.cloud.bigquery.table.TimePartitioning " "or None" 

713 ) 

714 self._properties[self._PROPERTY_TO_API_FIELD["time_partitioning"]] = api_repr 

715 

716 @property 

717 def partitioning_type(self): 

718 """Union[str, None]: Time partitioning of the table if it is 

719 partitioned (Defaults to :data:`None`). 

720 

721 """ 

722 warnings.warn( 

723 "This method will be deprecated in future versions. Please use " 

724 "Table.time_partitioning.type_ instead.", 

725 PendingDeprecationWarning, 

726 stacklevel=2, 

727 ) 

728 if self.time_partitioning is not None: 

729 return self.time_partitioning.type_ 

730 

731 @partitioning_type.setter 

732 def partitioning_type(self, value): 

733 warnings.warn( 

734 "This method will be deprecated in future versions. Please use " 

735 "Table.time_partitioning.type_ instead.", 

736 PendingDeprecationWarning, 

737 stacklevel=2, 

738 ) 

739 api_field = self._PROPERTY_TO_API_FIELD["partitioning_type"] 

740 if self.time_partitioning is None: 

741 self._properties[api_field] = {} 

742 self._properties[api_field]["type"] = value 

743 

744 @property 

745 def partition_expiration(self): 

746 """Union[int, None]: Expiration time in milliseconds for a partition. 

747 

748 If :attr:`partition_expiration` is set and :attr:`type_` is 

749 not set, :attr:`type_` will default to 

750 :attr:`~google.cloud.bigquery.table.TimePartitioningType.DAY`. 

751 """ 

752 warnings.warn( 

753 "This method will be deprecated in future versions. Please use " 

754 "Table.time_partitioning.expiration_ms instead.", 

755 PendingDeprecationWarning, 

756 stacklevel=2, 

757 ) 

758 if self.time_partitioning is not None: 

759 return self.time_partitioning.expiration_ms 

760 

761 @partition_expiration.setter 

762 def partition_expiration(self, value): 

763 warnings.warn( 

764 "This method will be deprecated in future versions. Please use " 

765 "Table.time_partitioning.expiration_ms instead.", 

766 PendingDeprecationWarning, 

767 stacklevel=2, 

768 ) 

769 api_field = self._PROPERTY_TO_API_FIELD["partition_expiration"] 

770 

771 if self.time_partitioning is None: 

772 self._properties[api_field] = {"type": TimePartitioningType.DAY} 

773 

774 if value is None: 

775 self._properties[api_field]["expirationMs"] = None 

776 else: 

777 self._properties[api_field]["expirationMs"] = str(value) 

778 

779 @property 

780 def clustering_fields(self): 

781 """Union[List[str], None]: Fields defining clustering for the table 

782 

783 (Defaults to :data:`None`). 

784 

785 Clustering fields are immutable after table creation. 

786 

787 .. note:: 

788 

789 BigQuery supports clustering for both partitioned and 

790 non-partitioned tables. 

791 """ 

792 prop = self._properties.get(self._PROPERTY_TO_API_FIELD["clustering_fields"]) 

793 if prop is not None: 

794 return list(prop.get("fields", ())) 

795 

796 @clustering_fields.setter 

797 def clustering_fields(self, value): 

798 """Union[List[str], None]: Fields defining clustering for the table 

799 

800 (Defaults to :data:`None`). 

801 """ 

802 api_field = self._PROPERTY_TO_API_FIELD["clustering_fields"] 

803 

804 if value is not None: 

805 prop = self._properties.setdefault(api_field, {}) 

806 prop["fields"] = value 

807 else: 

808 # In order to allow unsetting clustering fields completely, we explicitly 

809 # set this property to None (as oposed to merely removing the key). 

810 self._properties[api_field] = None 

811 

812 @property 

813 def description(self): 

814 """Union[str, None]: Description of the table (defaults to 

815 :data:`None`). 

816 

817 Raises: 

818 ValueError: For invalid value types. 

819 """ 

820 return self._properties.get(self._PROPERTY_TO_API_FIELD["description"]) 

821 

822 @description.setter 

823 def description(self, value): 

824 if not isinstance(value, str) and value is not None: 

825 raise ValueError("Pass a string, or None") 

826 self._properties[self._PROPERTY_TO_API_FIELD["description"]] = value 

827 

828 @property 

829 def expires(self): 

830 """Union[datetime.datetime, None]: Datetime at which the table will be 

831 deleted. 

832 

833 Raises: 

834 ValueError: For invalid value types. 

835 """ 

836 expiration_time = self._properties.get(self._PROPERTY_TO_API_FIELD["expires"]) 

837 if expiration_time is not None: 

838 # expiration_time will be in milliseconds. 

839 return google.cloud._helpers._datetime_from_microseconds( 

840 1000.0 * float(expiration_time) 

841 ) 

842 

843 @expires.setter 

844 def expires(self, value): 

845 if not isinstance(value, datetime.datetime) and value is not None: 

846 raise ValueError("Pass a datetime, or None") 

847 value_ms = google.cloud._helpers._millis_from_datetime(value) 

848 self._properties[ 

849 self._PROPERTY_TO_API_FIELD["expires"] 

850 ] = _helpers._str_or_none(value_ms) 

851 

852 @property 

853 def friendly_name(self): 

854 """Union[str, None]: Title of the table (defaults to :data:`None`). 

855 

856 Raises: 

857 ValueError: For invalid value types. 

858 """ 

859 return self._properties.get(self._PROPERTY_TO_API_FIELD["friendly_name"]) 

860 

861 @friendly_name.setter 

862 def friendly_name(self, value): 

863 if not isinstance(value, str) and value is not None: 

864 raise ValueError("Pass a string, or None") 

865 self._properties[self._PROPERTY_TO_API_FIELD["friendly_name"]] = value 

866 

867 @property 

868 def location(self): 

869 """Union[str, None]: Location in which the table is hosted 

870 

871 Defaults to :data:`None`. 

872 """ 

873 return self._properties.get(self._PROPERTY_TO_API_FIELD["location"]) 

874 

875 @property 

876 def view_query(self): 

877 """Union[str, None]: SQL query defining the table as a view (defaults 

878 to :data:`None`). 

879 

880 By default, the query is treated as Standard SQL. To use Legacy 

881 SQL, set :attr:`view_use_legacy_sql` to :data:`True`. 

882 

883 Raises: 

884 ValueError: For invalid value types. 

885 """ 

886 api_field = self._PROPERTY_TO_API_FIELD["view_query"] 

887 return _helpers._get_sub_prop(self._properties, [api_field, "query"]) 

888 

889 @view_query.setter 

890 def view_query(self, value): 

891 if not isinstance(value, str): 

892 raise ValueError("Pass a string") 

893 

894 api_field = self._PROPERTY_TO_API_FIELD["view_query"] 

895 _helpers._set_sub_prop(self._properties, [api_field, "query"], value) 

896 view = self._properties[api_field] 

897 # The service defaults useLegacySql to True, but this 

898 # client uses Standard SQL by default. 

899 if view.get("useLegacySql") is None: 

900 view["useLegacySql"] = False 

901 

902 @view_query.deleter 

903 def view_query(self): 

904 """Delete SQL query defining the table as a view.""" 

905 self._properties.pop(self._PROPERTY_TO_API_FIELD["view_query"], None) 

906 

907 view_use_legacy_sql = property(_view_use_legacy_sql_getter) 

908 

909 @view_use_legacy_sql.setter # type: ignore # (redefinition from above) 

910 def view_use_legacy_sql(self, value): 

911 if not isinstance(value, bool): 

912 raise ValueError("Pass a boolean") 

913 

914 api_field = self._PROPERTY_TO_API_FIELD["view_query"] 

915 if self._properties.get(api_field) is None: 

916 self._properties[api_field] = {} 

917 self._properties[api_field]["useLegacySql"] = value 

918 

919 @property 

920 def mview_query(self): 

921 """Optional[str]: SQL query defining the table as a materialized 

922 view (defaults to :data:`None`). 

923 """ 

924 api_field = self._PROPERTY_TO_API_FIELD["mview_query"] 

925 return _helpers._get_sub_prop(self._properties, [api_field, "query"]) 

926 

927 @mview_query.setter 

928 def mview_query(self, value): 

929 api_field = self._PROPERTY_TO_API_FIELD["mview_query"] 

930 _helpers._set_sub_prop(self._properties, [api_field, "query"], str(value)) 

931 

932 @mview_query.deleter 

933 def mview_query(self): 

934 """Delete SQL query defining the table as a materialized view.""" 

935 self._properties.pop(self._PROPERTY_TO_API_FIELD["mview_query"], None) 

936 

937 @property 

938 def mview_last_refresh_time(self): 

939 """Optional[datetime.datetime]: Datetime at which the materialized view was last 

940 refreshed (:data:`None` until set from the server). 

941 """ 

942 refresh_time = _helpers._get_sub_prop( 

943 self._properties, self._PROPERTY_TO_API_FIELD["mview_last_refresh_time"] 

944 ) 

945 if refresh_time is not None: 

946 # refresh_time will be in milliseconds. 

947 return google.cloud._helpers._datetime_from_microseconds( 

948 1000 * int(refresh_time) 

949 ) 

950 

951 @property 

952 def mview_enable_refresh(self): 

953 """Optional[bool]: Enable automatic refresh of the materialized view 

954 when the base table is updated. The default value is :data:`True`. 

955 """ 

956 api_field = self._PROPERTY_TO_API_FIELD["mview_enable_refresh"] 

957 return _helpers._get_sub_prop(self._properties, [api_field, "enableRefresh"]) 

958 

959 @mview_enable_refresh.setter 

960 def mview_enable_refresh(self, value): 

961 api_field = self._PROPERTY_TO_API_FIELD["mview_enable_refresh"] 

962 return _helpers._set_sub_prop( 

963 self._properties, [api_field, "enableRefresh"], value 

964 ) 

965 

966 @property 

967 def mview_refresh_interval(self): 

968 """Optional[datetime.timedelta]: The maximum frequency at which this 

969 materialized view will be refreshed. The default value is 1800000 

970 milliseconds (30 minutes). 

971 """ 

972 api_field = self._PROPERTY_TO_API_FIELD["mview_refresh_interval"] 

973 refresh_interval = _helpers._get_sub_prop( 

974 self._properties, [api_field, "refreshIntervalMs"] 

975 ) 

976 if refresh_interval is not None: 

977 return datetime.timedelta(milliseconds=int(refresh_interval)) 

978 

979 @mview_refresh_interval.setter 

980 def mview_refresh_interval(self, value): 

981 if value is None: 

982 refresh_interval_ms = None 

983 else: 

984 refresh_interval_ms = str(value // datetime.timedelta(milliseconds=1)) 

985 

986 api_field = self._PROPERTY_TO_API_FIELD["mview_refresh_interval"] 

987 _helpers._set_sub_prop( 

988 self._properties, 

989 [api_field, "refreshIntervalMs"], 

990 refresh_interval_ms, 

991 ) 

992 

993 @property 

994 def mview_allow_non_incremental_definition(self): 

995 """Optional[bool]: This option declares the intention to construct a 

996 materialized view that isn't refreshed incrementally. 

997 The default value is :data:`False`. 

998 """ 

999 api_field = self._PROPERTY_TO_API_FIELD[ 

1000 "mview_allow_non_incremental_definition" 

1001 ] 

1002 return _helpers._get_sub_prop( 

1003 self._properties, [api_field, "allowNonIncrementalDefinition"] 

1004 ) 

1005 

1006 @mview_allow_non_incremental_definition.setter 

1007 def mview_allow_non_incremental_definition(self, value): 

1008 api_field = self._PROPERTY_TO_API_FIELD[ 

1009 "mview_allow_non_incremental_definition" 

1010 ] 

1011 _helpers._set_sub_prop( 

1012 self._properties, [api_field, "allowNonIncrementalDefinition"], value 

1013 ) 

1014 

1015 @property 

1016 def streaming_buffer(self): 

1017 """google.cloud.bigquery.StreamingBuffer: Information about a table's 

1018 streaming buffer. 

1019 """ 

1020 sb = self._properties.get(self._PROPERTY_TO_API_FIELD["streaming_buffer"]) 

1021 if sb is not None: 

1022 return StreamingBuffer(sb) 

1023 

1024 @property 

1025 def external_data_configuration(self): 

1026 """Union[google.cloud.bigquery.ExternalConfig, None]: Configuration for 

1027 an external data source (defaults to :data:`None`). 

1028 

1029 Raises: 

1030 ValueError: For invalid value types. 

1031 """ 

1032 prop = self._properties.get( 

1033 self._PROPERTY_TO_API_FIELD["external_data_configuration"] 

1034 ) 

1035 if prop is not None: 

1036 prop = ExternalConfig.from_api_repr(prop) 

1037 return prop 

1038 

1039 @external_data_configuration.setter 

1040 def external_data_configuration(self, value): 

1041 if not (value is None or isinstance(value, ExternalConfig)): 

1042 raise ValueError("Pass an ExternalConfig or None") 

1043 api_repr = value 

1044 if value is not None: 

1045 api_repr = value.to_api_repr() 

1046 self._properties[ 

1047 self._PROPERTY_TO_API_FIELD["external_data_configuration"] 

1048 ] = api_repr 

1049 

1050 @property 

1051 def snapshot_definition(self) -> Optional["SnapshotDefinition"]: 

1052 """Information about the snapshot. This value is set via snapshot creation. 

1053 

1054 See: https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#Table.FIELDS.snapshot_definition 

1055 """ 

1056 snapshot_info = self._properties.get( 

1057 self._PROPERTY_TO_API_FIELD["snapshot_definition"] 

1058 ) 

1059 if snapshot_info is not None: 

1060 snapshot_info = SnapshotDefinition(snapshot_info) 

1061 return snapshot_info 

1062 

1063 @property 

1064 def clone_definition(self) -> Optional["CloneDefinition"]: 

1065 """Information about the clone. This value is set via clone creation. 

1066 

1067 See: https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#Table.FIELDS.clone_definition 

1068 """ 

1069 clone_info = self._properties.get( 

1070 self._PROPERTY_TO_API_FIELD["clone_definition"] 

1071 ) 

1072 if clone_info is not None: 

1073 clone_info = CloneDefinition(clone_info) 

1074 return clone_info 

1075 

1076 @property 

1077 def table_constraints(self) -> Optional["TableConstraints"]: 

1078 """Tables Primary Key and Foreign Key information.""" 

1079 table_constraints = self._properties.get( 

1080 self._PROPERTY_TO_API_FIELD["table_constraints"] 

1081 ) 

1082 if table_constraints is not None: 

1083 table_constraints = TableConstraints.from_api_repr(table_constraints) 

1084 return table_constraints 

1085 

1086 @table_constraints.setter 

1087 def table_constraints(self, value): 

1088 """Tables Primary Key and Foreign Key information.""" 

1089 api_repr = value 

1090 if not isinstance(value, TableConstraints) and value is not None: 

1091 raise ValueError( 

1092 "value must be google.cloud.bigquery.table.TableConstraints or None" 

1093 ) 

1094 api_repr = value.to_api_repr() if value else None 

1095 self._properties[self._PROPERTY_TO_API_FIELD["table_constraints"]] = api_repr 

1096 

1097 @property 

1098 def resource_tags(self): 

1099 """Dict[str, str]: Resource tags for the table. 

1100 

1101 See: https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#Table.FIELDS.resource_tags 

1102 """ 

1103 return self._properties.setdefault( 

1104 self._PROPERTY_TO_API_FIELD["resource_tags"], {} 

1105 ) 

1106 

1107 @resource_tags.setter 

1108 def resource_tags(self, value): 

1109 if not isinstance(value, dict) and value is not None: 

1110 raise ValueError("resource_tags must be a dict or None") 

1111 self._properties[self._PROPERTY_TO_API_FIELD["resource_tags"]] = value 

1112 

1113 @property 

1114 def external_catalog_table_options( 

1115 self, 

1116 ) -> Optional[external_config.ExternalCatalogTableOptions]: 

1117 """Options defining open source compatible datasets living in the 

1118 BigQuery catalog. Contains metadata of open source database, schema 

1119 or namespace represented by the current dataset.""" 

1120 

1121 prop = self._properties.get( 

1122 self._PROPERTY_TO_API_FIELD["external_catalog_table_options"] 

1123 ) 

1124 if prop is not None: 

1125 return external_config.ExternalCatalogTableOptions.from_api_repr(prop) 

1126 return None 

1127 

1128 @external_catalog_table_options.setter 

1129 def external_catalog_table_options( 

1130 self, value: Union[external_config.ExternalCatalogTableOptions, dict, None] 

1131 ): 

1132 value = _helpers._isinstance_or_raise( 

1133 value, 

1134 (external_config.ExternalCatalogTableOptions, dict), 

1135 none_allowed=True, 

1136 ) 

1137 if isinstance(value, external_config.ExternalCatalogTableOptions): 

1138 self._properties[ 

1139 self._PROPERTY_TO_API_FIELD["external_catalog_table_options"] 

1140 ] = value.to_api_repr() 

1141 else: 

1142 self._properties[ 

1143 self._PROPERTY_TO_API_FIELD["external_catalog_table_options"] 

1144 ] = value 

1145 

1146 @property 

1147 def foreign_type_info(self) -> Optional[_schema.ForeignTypeInfo]: 

1148 """Optional. Specifies metadata of the foreign data type definition in 

1149 field schema (TableFieldSchema.foreign_type_definition). 

1150 Returns: 

1151 Optional[schema.ForeignTypeInfo]: 

1152 Foreign type information, or :data:`None` if not set. 

1153 .. Note:: 

1154 foreign_type_info is only required if you are referencing an 

1155 external catalog such as a Hive table. 

1156 For details, see: 

1157 https://cloud.google.com/bigquery/docs/external-tables 

1158 https://cloud.google.com/bigquery/docs/datasets-intro#external_datasets 

1159 """ 

1160 

1161 prop = _helpers._get_sub_prop( 

1162 self._properties, self._PROPERTY_TO_API_FIELD["foreign_type_info"] 

1163 ) 

1164 if prop is not None: 

1165 return _schema.ForeignTypeInfo.from_api_repr(prop) 

1166 return None 

1167 

1168 @foreign_type_info.setter 

1169 def foreign_type_info(self, value: Union[_schema.ForeignTypeInfo, dict, None]): 

1170 value = _helpers._isinstance_or_raise( 

1171 value, 

1172 (_schema.ForeignTypeInfo, dict), 

1173 none_allowed=True, 

1174 ) 

1175 if isinstance(value, _schema.ForeignTypeInfo): 

1176 value = value.to_api_repr() 

1177 _helpers._set_sub_prop( 

1178 self._properties, self._PROPERTY_TO_API_FIELD["foreign_type_info"], value 

1179 ) 

1180 

1181 @classmethod 

1182 def from_string(cls, full_table_id: str) -> "Table": 

1183 """Construct a table from fully-qualified table ID. 

1184 

1185 Args: 

1186 full_table_id (str): 

1187 A fully-qualified table ID in standard SQL format. Must 

1188 included a project ID, dataset ID, and table ID, each 

1189 separated by ``.``. 

1190 

1191 Returns: 

1192 Table: Table parsed from ``full_table_id``. 

1193 

1194 Examples: 

1195 >>> Table.from_string('my-project.mydataset.mytable') 

1196 Table(TableRef...(D...('my-project', 'mydataset'), 'mytable')) 

1197 

1198 Raises: 

1199 ValueError: 

1200 If ``full_table_id`` is not a fully-qualified table ID in 

1201 standard SQL format. 

1202 """ 

1203 return cls(TableReference.from_string(full_table_id)) 

1204 

1205 @classmethod 

1206 def from_api_repr(cls, resource: dict) -> "Table": 

1207 """Factory: construct a table given its API representation 

1208 

1209 Args: 

1210 resource (Dict[str, object]): 

1211 Table resource representation from the API 

1212 

1213 Returns: 

1214 google.cloud.bigquery.table.Table: Table parsed from ``resource``. 

1215 

1216 Raises: 

1217 KeyError: 

1218 If the ``resource`` lacks the key ``'tableReference'``, or if 

1219 the ``dict`` stored within the key ``'tableReference'`` lacks 

1220 the keys ``'tableId'``, ``'projectId'``, or ``'datasetId'``. 

1221 """ 

1222 from google.cloud.bigquery import dataset 

1223 

1224 if ( 

1225 "tableReference" not in resource 

1226 or "tableId" not in resource["tableReference"] 

1227 ): 

1228 raise KeyError( 

1229 "Resource lacks required identity information:" 

1230 '["tableReference"]["tableId"]' 

1231 ) 

1232 project_id = _helpers._get_sub_prop( 

1233 resource, cls._PROPERTY_TO_API_FIELD["project"] 

1234 ) 

1235 table_id = _helpers._get_sub_prop( 

1236 resource, cls._PROPERTY_TO_API_FIELD["table_id"] 

1237 ) 

1238 dataset_id = _helpers._get_sub_prop( 

1239 resource, cls._PROPERTY_TO_API_FIELD["dataset_id"] 

1240 ) 

1241 dataset_ref = dataset.DatasetReference(project_id, dataset_id) 

1242 

1243 table = cls(dataset_ref.table(table_id)) 

1244 table._properties = resource 

1245 

1246 return table 

1247 

1248 def to_api_repr(self) -> dict: 

1249 """Constructs the API resource of this table 

1250 

1251 Returns: 

1252 Dict[str, object]: Table represented as an API resource 

1253 """ 

1254 return copy.deepcopy(self._properties) 

1255 

1256 def to_bqstorage(self) -> str: 

1257 """Construct a BigQuery Storage API representation of this table. 

1258 

1259 Returns: 

1260 str: A reference to this table in the BigQuery Storage API. 

1261 """ 

1262 return self.reference.to_bqstorage() 

1263 

1264 def _build_resource(self, filter_fields): 

1265 """Generate a resource for ``update``.""" 

1266 return _helpers._build_resource_from_properties(self, filter_fields) 

1267 

1268 def __repr__(self): 

1269 return "Table({})".format(repr(self.reference)) 

1270 

1271 def __str__(self): 

1272 return f"{self.project}.{self.dataset_id}.{self.table_id}" 

1273 

1274 @property 

1275 def max_staleness(self): 

1276 """Union[str, None]: The maximum staleness of data that could be returned when the table is queried. 

1277 

1278 Staleness encoded as a string encoding of sql IntervalValue type. 

1279 This property is optional and defaults to None. 

1280 

1281 According to the BigQuery API documentation, maxStaleness specifies the maximum time 

1282 interval for which stale data can be returned when querying the table. 

1283 It helps control data freshness in scenarios like metadata-cached external tables. 

1284 

1285 Returns: 

1286 Optional[str]: A string representing the maximum staleness interval 

1287 (e.g., '1h', '30m', '15s' for hours, minutes, seconds respectively). 

1288 """ 

1289 return self._properties.get(self._PROPERTY_TO_API_FIELD["max_staleness"]) 

1290 

1291 @max_staleness.setter 

1292 def max_staleness(self, value): 

1293 """Set the maximum staleness for the table. 

1294 

1295 Args: 

1296 value (Optional[str]): A string representing the maximum staleness interval. 

1297 Must be a valid time interval string. 

1298 Examples include '1h' (1 hour), '30m' (30 minutes), '15s' (15 seconds). 

1299 

1300 Raises: 

1301 ValueError: If the value is not None and not a string. 

1302 """ 

1303 if value is not None and not isinstance(value, str): 

1304 raise ValueError("max_staleness must be a string or None") 

1305 

1306 self._properties[self._PROPERTY_TO_API_FIELD["max_staleness"]] = value 

1307 

1308 

1309class TableListItem(_TableBase): 

1310 """A read-only table resource from a list operation. 

1311 

1312 For performance reasons, the BigQuery API only includes some of the table 

1313 properties when listing tables. Notably, 

1314 :attr:`~google.cloud.bigquery.table.Table.schema` and 

1315 :attr:`~google.cloud.bigquery.table.Table.num_rows` are missing. 

1316 

1317 For a full list of the properties that the BigQuery API returns, see the 

1318 `REST documentation for tables.list 

1319 <https://cloud.google.com/bigquery/docs/reference/rest/v2/tables/list>`_. 

1320 

1321 

1322 Args: 

1323 resource (Dict[str, object]): 

1324 A table-like resource object from a table list response. A 

1325 ``tableReference`` property is required. 

1326 

1327 Raises: 

1328 ValueError: 

1329 If ``tableReference`` or one of its required members is missing 

1330 from ``resource``. 

1331 """ 

1332 

1333 def __init__(self, resource): 

1334 if "tableReference" not in resource: 

1335 raise ValueError("resource must contain a tableReference value") 

1336 if "projectId" not in resource["tableReference"]: 

1337 raise ValueError( 

1338 "resource['tableReference'] must contain a projectId value" 

1339 ) 

1340 if "datasetId" not in resource["tableReference"]: 

1341 raise ValueError( 

1342 "resource['tableReference'] must contain a datasetId value" 

1343 ) 

1344 if "tableId" not in resource["tableReference"]: 

1345 raise ValueError("resource['tableReference'] must contain a tableId value") 

1346 

1347 self._properties = resource 

1348 

1349 @property 

1350 def created(self): 

1351 """Union[datetime.datetime, None]: Datetime at which the table was 

1352 created (:data:`None` until set from the server). 

1353 """ 

1354 creation_time = self._properties.get("creationTime") 

1355 if creation_time is not None: 

1356 # creation_time will be in milliseconds. 

1357 return google.cloud._helpers._datetime_from_microseconds( 

1358 1000.0 * float(creation_time) 

1359 ) 

1360 

1361 @property 

1362 def expires(self): 

1363 """Union[datetime.datetime, None]: Datetime at which the table will be 

1364 deleted. 

1365 """ 

1366 expiration_time = self._properties.get("expirationTime") 

1367 if expiration_time is not None: 

1368 # expiration_time will be in milliseconds. 

1369 return google.cloud._helpers._datetime_from_microseconds( 

1370 1000.0 * float(expiration_time) 

1371 ) 

1372 

1373 reference = property(_reference_getter) 

1374 

1375 @property 

1376 def labels(self): 

1377 """Dict[str, str]: Labels for the table. 

1378 

1379 This method always returns a dict. To change a table's labels, 

1380 modify the dict, then call ``Client.update_table``. To delete a 

1381 label, set its value to :data:`None` before updating. 

1382 """ 

1383 return self._properties.setdefault("labels", {}) 

1384 

1385 @property 

1386 def full_table_id(self): 

1387 """Union[str, None]: ID for the table (:data:`None` until set from the 

1388 server). 

1389 

1390 In the format ``project_id:dataset_id.table_id``. 

1391 """ 

1392 return self._properties.get("id") 

1393 

1394 @property 

1395 def table_type(self): 

1396 """Union[str, None]: The type of the table (:data:`None` until set from 

1397 the server). 

1398 

1399 Possible values are ``'TABLE'``, ``'VIEW'``, or ``'EXTERNAL'``. 

1400 """ 

1401 return self._properties.get("type") 

1402 

1403 @property 

1404 def time_partitioning(self): 

1405 """google.cloud.bigquery.table.TimePartitioning: Configures time-based 

1406 partitioning for a table. 

1407 """ 

1408 prop = self._properties.get("timePartitioning") 

1409 if prop is not None: 

1410 return TimePartitioning.from_api_repr(prop) 

1411 

1412 @property 

1413 def partitioning_type(self): 

1414 """Union[str, None]: Time partitioning of the table if it is 

1415 partitioned (Defaults to :data:`None`). 

1416 """ 

1417 warnings.warn( 

1418 "This method will be deprecated in future versions. Please use " 

1419 "TableListItem.time_partitioning.type_ instead.", 

1420 PendingDeprecationWarning, 

1421 stacklevel=2, 

1422 ) 

1423 if self.time_partitioning is not None: 

1424 return self.time_partitioning.type_ 

1425 

1426 @property 

1427 def partition_expiration(self): 

1428 """Union[int, None]: Expiration time in milliseconds for a partition. 

1429 

1430 If this property is set and :attr:`type_` is not set, :attr:`type_` 

1431 will default to :attr:`TimePartitioningType.DAY`. 

1432 """ 

1433 warnings.warn( 

1434 "This method will be deprecated in future versions. Please use " 

1435 "TableListItem.time_partitioning.expiration_ms instead.", 

1436 PendingDeprecationWarning, 

1437 stacklevel=2, 

1438 ) 

1439 if self.time_partitioning is not None: 

1440 return self.time_partitioning.expiration_ms 

1441 

1442 @property 

1443 def friendly_name(self): 

1444 """Union[str, None]: Title of the table (defaults to :data:`None`).""" 

1445 return self._properties.get("friendlyName") 

1446 

1447 view_use_legacy_sql = property(_view_use_legacy_sql_getter) 

1448 

1449 @property 

1450 def clustering_fields(self): 

1451 """Union[List[str], None]: Fields defining clustering for the table 

1452 

1453 (Defaults to :data:`None`). 

1454 

1455 Clustering fields are immutable after table creation. 

1456 

1457 .. note:: 

1458 

1459 BigQuery supports clustering for both partitioned and 

1460 non-partitioned tables. 

1461 """ 

1462 prop = self._properties.get("clustering") 

1463 if prop is not None: 

1464 return list(prop.get("fields", ())) 

1465 

1466 @classmethod 

1467 def from_string(cls, full_table_id: str) -> "TableListItem": 

1468 """Construct a table from fully-qualified table ID. 

1469 

1470 Args: 

1471 full_table_id (str): 

1472 A fully-qualified table ID in standard SQL format. Must 

1473 included a project ID, dataset ID, and table ID, each 

1474 separated by ``.``. 

1475 

1476 Returns: 

1477 Table: Table parsed from ``full_table_id``. 

1478 

1479 Examples: 

1480 >>> Table.from_string('my-project.mydataset.mytable') 

1481 Table(TableRef...(D...('my-project', 'mydataset'), 'mytable')) 

1482 

1483 Raises: 

1484 ValueError: 

1485 If ``full_table_id`` is not a fully-qualified table ID in 

1486 standard SQL format. 

1487 """ 

1488 return cls( 

1489 {"tableReference": TableReference.from_string(full_table_id).to_api_repr()} 

1490 ) 

1491 

1492 def to_bqstorage(self) -> str: 

1493 """Construct a BigQuery Storage API representation of this table. 

1494 

1495 Returns: 

1496 str: A reference to this table in the BigQuery Storage API. 

1497 """ 

1498 return self.reference.to_bqstorage() 

1499 

1500 def to_api_repr(self) -> dict: 

1501 """Constructs the API resource of this table 

1502 

1503 Returns: 

1504 Dict[str, object]: Table represented as an API resource 

1505 """ 

1506 return copy.deepcopy(self._properties) 

1507 

1508 

1509def _row_from_mapping(mapping, schema): 

1510 """Convert a mapping to a row tuple using the schema. 

1511 

1512 Args: 

1513 mapping (Dict[str, object]) 

1514 Mapping of row data: must contain keys for all required fields in 

1515 the schema. Keys which do not correspond to a field in the schema 

1516 are ignored. 

1517 schema (List[google.cloud.bigquery.schema.SchemaField]): 

1518 The schema of the table destination for the rows 

1519 

1520 Returns: 

1521 Tuple[object]: 

1522 Tuple whose elements are ordered according to the schema. 

1523 

1524 Raises: 

1525 ValueError: If schema is empty. 

1526 """ 

1527 if len(schema) == 0: 

1528 raise ValueError(_TABLE_HAS_NO_SCHEMA) 

1529 

1530 row = [] 

1531 for field in schema: 

1532 if field.mode == "REQUIRED": 

1533 row.append(mapping[field.name]) 

1534 elif field.mode == "REPEATED": 

1535 row.append(mapping.get(field.name, ())) 

1536 elif field.mode == "NULLABLE": 

1537 row.append(mapping.get(field.name)) 

1538 else: 

1539 raise ValueError("Unknown field mode: {}".format(field.mode)) 

1540 return tuple(row) 

1541 

1542 

1543class StreamingBuffer(object): 

1544 """Information about a table's streaming buffer. 

1545 

1546 See https://cloud.google.com/bigquery/streaming-data-into-bigquery. 

1547 

1548 Args: 

1549 resource (Dict[str, object]): 

1550 streaming buffer representation returned from the API 

1551 """ 

1552 

1553 def __init__(self, resource): 

1554 self.estimated_bytes = None 

1555 if "estimatedBytes" in resource: 

1556 self.estimated_bytes = int(resource["estimatedBytes"]) 

1557 self.estimated_rows = None 

1558 if "estimatedRows" in resource: 

1559 self.estimated_rows = int(resource["estimatedRows"]) 

1560 self.oldest_entry_time = None 

1561 if "oldestEntryTime" in resource: 

1562 self.oldest_entry_time = google.cloud._helpers._datetime_from_microseconds( 

1563 1000.0 * int(resource["oldestEntryTime"]) 

1564 ) 

1565 

1566 

1567class SnapshotDefinition: 

1568 """Information about base table and snapshot time of the snapshot. 

1569 

1570 See https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#snapshotdefinition 

1571 

1572 Args: 

1573 resource: Snapshot definition representation returned from the API. 

1574 """ 

1575 

1576 def __init__(self, resource: Dict[str, Any]): 

1577 self.base_table_reference = None 

1578 if "baseTableReference" in resource: 

1579 self.base_table_reference = TableReference.from_api_repr( 

1580 resource["baseTableReference"] 

1581 ) 

1582 

1583 self.snapshot_time = None 

1584 if "snapshotTime" in resource: 

1585 self.snapshot_time = google.cloud._helpers._rfc3339_to_datetime( 

1586 resource["snapshotTime"] 

1587 ) 

1588 

1589 

1590class CloneDefinition: 

1591 """Information about base table and clone time of the clone. 

1592 

1593 See https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#clonedefinition 

1594 

1595 Args: 

1596 resource: Clone definition representation returned from the API. 

1597 """ 

1598 

1599 def __init__(self, resource: Dict[str, Any]): 

1600 self.base_table_reference = None 

1601 if "baseTableReference" in resource: 

1602 self.base_table_reference = TableReference.from_api_repr( 

1603 resource["baseTableReference"] 

1604 ) 

1605 

1606 self.clone_time = None 

1607 if "cloneTime" in resource: 

1608 self.clone_time = google.cloud._helpers._rfc3339_to_datetime( 

1609 resource["cloneTime"] 

1610 ) 

1611 

1612 

1613class Row(object): 

1614 """A BigQuery row. 

1615 

1616 Values can be accessed by position (index), by key like a dict, 

1617 or as properties. 

1618 

1619 Args: 

1620 values (Sequence[object]): The row values 

1621 field_to_index (Dict[str, int]): 

1622 A mapping from schema field names to indexes 

1623 """ 

1624 

1625 # Choose unusual field names to try to avoid conflict with schema fields. 

1626 __slots__ = ("_xxx_values", "_xxx_field_to_index") 

1627 

1628 def __init__(self, values, field_to_index) -> None: 

1629 self._xxx_values = values 

1630 self._xxx_field_to_index = field_to_index 

1631 

1632 def values(self): 

1633 """Return the values included in this row. 

1634 

1635 Returns: 

1636 Sequence[object]: A sequence of length ``len(row)``. 

1637 """ 

1638 return copy.deepcopy(self._xxx_values) 

1639 

1640 def keys(self) -> Iterable[str]: 

1641 """Return the keys for using a row as a dict. 

1642 

1643 Returns: 

1644 Iterable[str]: The keys corresponding to the columns of a row 

1645 

1646 Examples: 

1647 

1648 >>> list(Row(('a', 'b'), {'x': 0, 'y': 1}).keys()) 

1649 ['x', 'y'] 

1650 """ 

1651 return self._xxx_field_to_index.keys() 

1652 

1653 def items(self) -> Iterable[Tuple[str, Any]]: 

1654 """Return items as ``(key, value)`` pairs. 

1655 

1656 Returns: 

1657 Iterable[Tuple[str, object]]: 

1658 The ``(key, value)`` pairs representing this row. 

1659 

1660 Examples: 

1661 

1662 >>> list(Row(('a', 'b'), {'x': 0, 'y': 1}).items()) 

1663 [('x', 'a'), ('y', 'b')] 

1664 """ 

1665 for key, index in self._xxx_field_to_index.items(): 

1666 yield (key, copy.deepcopy(self._xxx_values[index])) 

1667 

1668 def get(self, key: str, default: Any = None) -> Any: 

1669 """Return a value for key, with a default value if it does not exist. 

1670 

1671 Args: 

1672 key (str): The key of the column to access 

1673 default (object): 

1674 The default value to use if the key does not exist. (Defaults 

1675 to :data:`None`.) 

1676 

1677 Returns: 

1678 object: 

1679 The value associated with the provided key, or a default value. 

1680 

1681 Examples: 

1682 When the key exists, the value associated with it is returned. 

1683 

1684 >>> Row(('a', 'b'), {'x': 0, 'y': 1}).get('x') 

1685 'a' 

1686 

1687 The default value is :data:`None` when the key does not exist. 

1688 

1689 >>> Row(('a', 'b'), {'x': 0, 'y': 1}).get('z') 

1690 None 

1691 

1692 The default value can be overridden with the ``default`` parameter. 

1693 

1694 >>> Row(('a', 'b'), {'x': 0, 'y': 1}).get('z', '') 

1695 '' 

1696 

1697 >>> Row(('a', 'b'), {'x': 0, 'y': 1}).get('z', default = '') 

1698 '' 

1699 """ 

1700 index = self._xxx_field_to_index.get(key) 

1701 if index is None: 

1702 return default 

1703 return self._xxx_values[index] 

1704 

1705 def __getattr__(self, name): 

1706 value = self._xxx_field_to_index.get(name) 

1707 if value is None: 

1708 raise AttributeError("no row field {!r}".format(name)) 

1709 return self._xxx_values[value] 

1710 

1711 def __len__(self): 

1712 return len(self._xxx_values) 

1713 

1714 def __getitem__(self, key): 

1715 if isinstance(key, str): 

1716 value = self._xxx_field_to_index.get(key) 

1717 if value is None: 

1718 raise KeyError("no row field {!r}".format(key)) 

1719 key = value 

1720 return self._xxx_values[key] 

1721 

1722 def __eq__(self, other): 

1723 if not isinstance(other, Row): 

1724 return NotImplemented 

1725 return ( 

1726 self._xxx_values == other._xxx_values 

1727 and self._xxx_field_to_index == other._xxx_field_to_index 

1728 ) 

1729 

1730 def __ne__(self, other): 

1731 return not self == other 

1732 

1733 def __repr__(self): 

1734 # sort field dict by value, for determinism 

1735 items = sorted(self._xxx_field_to_index.items(), key=operator.itemgetter(1)) 

1736 f2i = "{" + ", ".join("%r: %d" % item for item in items) + "}" 

1737 return "Row({}, {})".format(self._xxx_values, f2i) 

1738 

1739 

1740class _NoopProgressBarQueue(object): 

1741 """A fake Queue class that does nothing. 

1742 

1743 This is used when there is no progress bar to send updates to. 

1744 """ 

1745 

1746 def put_nowait(self, item): 

1747 """Don't actually do anything with the item.""" 

1748 

1749 

1750class RowIterator(HTTPIterator): 

1751 """A class for iterating through HTTP/JSON API row list responses. 

1752 

1753 Args: 

1754 client (Optional[google.cloud.bigquery.Client]): 

1755 The API client instance. This should always be non-`None`, except for 

1756 subclasses that do not use it, namely the ``_EmptyRowIterator``. 

1757 api_request (Callable[google.cloud._http.JSONConnection.api_request]): 

1758 The function to use to make API requests. 

1759 path (str): The method path to query for the list of items. 

1760 schema (Sequence[Union[ \ 

1761 :class:`~google.cloud.bigquery.schema.SchemaField`, \ 

1762 Mapping[str, Any] \ 

1763 ]]): 

1764 The table's schema. If any item is a mapping, its content must be 

1765 compatible with 

1766 :meth:`~google.cloud.bigquery.schema.SchemaField.from_api_repr`. 

1767 page_token (str): A token identifying a page in a result set to start 

1768 fetching results from. 

1769 max_results (Optional[int]): The maximum number of results to fetch. 

1770 page_size (Optional[int]): The maximum number of rows in each page 

1771 of results from this request. Non-positive values are ignored. 

1772 Defaults to a sensible value set by the API. 

1773 extra_params (Optional[Dict[str, object]]): 

1774 Extra query string parameters for the API call. 

1775 table (Optional[Union[ \ 

1776 google.cloud.bigquery.table.Table, \ 

1777 google.cloud.bigquery.table.TableReference, \ 

1778 ]]): 

1779 The table which these rows belong to, or a reference to it. Used to 

1780 call the BigQuery Storage API to fetch rows. 

1781 selected_fields (Optional[Sequence[google.cloud.bigquery.schema.SchemaField]]): 

1782 A subset of columns to select from this table. 

1783 total_rows (Optional[int]): 

1784 Total number of rows in the table. 

1785 first_page_response (Optional[dict]): 

1786 API response for the first page of results. These are returned when 

1787 the first page is requested. 

1788 query (Optional[str]): 

1789 The query text used. 

1790 total_bytes_processed (Optional[int]): 

1791 total bytes processed from job statistics, if present. 

1792 """ 

1793 

1794 def __init__( 

1795 self, 

1796 client, 

1797 api_request, 

1798 path, 

1799 schema, 

1800 page_token=None, 

1801 max_results=None, 

1802 page_size=None, 

1803 extra_params=None, 

1804 table=None, 

1805 selected_fields=None, 

1806 total_rows=None, 

1807 first_page_response=None, 

1808 location: Optional[str] = None, 

1809 job_id: Optional[str] = None, 

1810 query_id: Optional[str] = None, 

1811 project: Optional[str] = None, 

1812 num_dml_affected_rows: Optional[int] = None, 

1813 query: Optional[str] = None, 

1814 total_bytes_processed: Optional[int] = None, 

1815 ): 

1816 super(RowIterator, self).__init__( 

1817 client, 

1818 api_request, 

1819 path, 

1820 item_to_value=_item_to_row, 

1821 items_key="rows", 

1822 page_token=page_token, 

1823 max_results=max_results, 

1824 extra_params=extra_params, 

1825 page_start=_rows_page_start, 

1826 next_token="pageToken", 

1827 ) 

1828 schema = _to_schema_fields(schema) 

1829 self._field_to_index = _helpers._field_to_index_mapping(schema) 

1830 self._page_size = page_size 

1831 self._preserve_order = False 

1832 self._schema = schema 

1833 self._selected_fields = selected_fields 

1834 self._table = table 

1835 self._total_rows = total_rows 

1836 self._first_page_response = first_page_response 

1837 self._location = location 

1838 self._job_id = job_id 

1839 self._query_id = query_id 

1840 self._project = project 

1841 self._num_dml_affected_rows = num_dml_affected_rows 

1842 self._query = query 

1843 self._total_bytes_processed = total_bytes_processed 

1844 

1845 @property 

1846 def _billing_project(self) -> Optional[str]: 

1847 """GCP Project ID where BQ API will bill to (if applicable).""" 

1848 client = self.client 

1849 return client.project if client is not None else None 

1850 

1851 @property 

1852 def job_id(self) -> Optional[str]: 

1853 """ID of the query job (if applicable). 

1854 

1855 To get the job metadata, call 

1856 ``job = client.get_job(rows.job_id, location=rows.location)``. 

1857 """ 

1858 return self._job_id 

1859 

1860 @property 

1861 def location(self) -> Optional[str]: 

1862 """Location where the query executed (if applicable). 

1863 

1864 See: https://cloud.google.com/bigquery/docs/locations 

1865 """ 

1866 return self._location 

1867 

1868 @property 

1869 def num_dml_affected_rows(self) -> Optional[int]: 

1870 """If this RowIterator is the result of a DML query, the number of 

1871 rows that were affected. 

1872 

1873 See: 

1874 https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#body.QueryResponse.FIELDS.num_dml_affected_rows 

1875 """ 

1876 return self._num_dml_affected_rows 

1877 

1878 @property 

1879 def project(self) -> Optional[str]: 

1880 """GCP Project ID where these rows are read from.""" 

1881 return self._project 

1882 

1883 @property 

1884 def query_id(self) -> Optional[str]: 

1885 """[Preview] ID of a completed query. 

1886 

1887 This ID is auto-generated and not guaranteed to be populated. 

1888 """ 

1889 return self._query_id 

1890 

1891 @property 

1892 def query(self) -> Optional[str]: 

1893 """The query text used.""" 

1894 return self._query 

1895 

1896 @property 

1897 def total_bytes_processed(self) -> Optional[int]: 

1898 """total bytes processed from job statistics, if present.""" 

1899 return self._total_bytes_processed 

1900 

1901 def _is_almost_completely_cached(self): 

1902 """Check if all results are completely cached. 

1903 

1904 This is useful to know, because we can avoid alternative download 

1905 mechanisms. 

1906 """ 

1907 if ( 

1908 not hasattr(self, "_first_page_response") 

1909 or self._first_page_response is None 

1910 ): 

1911 return False 

1912 

1913 total_cached_rows = len(self._first_page_response.get(self._items_key, [])) 

1914 if self.max_results is not None and total_cached_rows >= self.max_results: 

1915 return True 

1916 

1917 if ( 

1918 self.next_page_token is None 

1919 and self._first_page_response.get(self._next_token) is None 

1920 ): 

1921 return True 

1922 

1923 if self._total_rows is not None: 

1924 almost_completely = self._total_rows * ALMOST_COMPLETELY_CACHED_RATIO 

1925 if total_cached_rows >= almost_completely: 

1926 return True 

1927 

1928 return False 

1929 

1930 def _should_use_bqstorage(self, bqstorage_client, create_bqstorage_client): 

1931 """Returns True if the BigQuery Storage API can be used. 

1932 

1933 Returns: 

1934 bool 

1935 True if the BigQuery Storage client can be used or created. 

1936 """ 

1937 using_bqstorage_api = bqstorage_client or create_bqstorage_client 

1938 if not using_bqstorage_api: 

1939 return False 

1940 

1941 if self._table is None: 

1942 return False 

1943 

1944 # The developer has already started paging through results if 

1945 # next_page_token is set. 

1946 if hasattr(self, "next_page_token") and self.next_page_token is not None: 

1947 return False 

1948 

1949 if self._is_almost_completely_cached(): 

1950 return False 

1951 

1952 if self.max_results is not None: 

1953 return False 

1954 

1955 try: 

1956 _versions_helpers.BQ_STORAGE_VERSIONS.try_import(raise_if_error=True) 

1957 except bq_exceptions.BigQueryStorageNotFoundError: 

1958 warnings.warn( 

1959 "BigQuery Storage module not found, fetch data with the REST " 

1960 "endpoint instead." 

1961 ) 

1962 return False 

1963 except bq_exceptions.LegacyBigQueryStorageError as exc: 

1964 warnings.warn(str(exc)) 

1965 return False 

1966 

1967 return True 

1968 

1969 def _get_next_page_response(self): 

1970 """Requests the next page from the path provided. 

1971 

1972 Returns: 

1973 Dict[str, object]: 

1974 The parsed JSON response of the next page's contents. 

1975 """ 

1976 if self._first_page_response: 

1977 rows = self._first_page_response.get(self._items_key, [])[ 

1978 : self.max_results 

1979 ] 

1980 response = { 

1981 self._items_key: rows, 

1982 } 

1983 if self._next_token in self._first_page_response: 

1984 response[self._next_token] = self._first_page_response[self._next_token] 

1985 

1986 self._first_page_response = None 

1987 return response 

1988 

1989 params = self._get_query_params() 

1990 

1991 # If the user has provided page_size and start_index, we need to pass 

1992 # start_index for the first page, but for all subsequent pages, we 

1993 # should not pass start_index. We make a shallow copy of params and do 

1994 # not alter the original, so if the user iterates the results again, 

1995 # start_index is preserved. 

1996 params_copy = copy.copy(params) 

1997 if self._page_size is not None: 

1998 if self.page_number and "startIndex" in params: 

1999 del params_copy["startIndex"] 

2000 

2001 return self.api_request( 

2002 method=self._HTTP_METHOD, path=self.path, query_params=params_copy 

2003 ) 

2004 

2005 @property 

2006 def schema(self): 

2007 """List[google.cloud.bigquery.schema.SchemaField]: The subset of 

2008 columns to be read from the table.""" 

2009 return list(self._schema) 

2010 

2011 @property 

2012 def total_rows(self): 

2013 """int: The total number of rows in the table or query results.""" 

2014 return self._total_rows 

2015 

2016 def _maybe_warn_max_results( 

2017 self, 

2018 bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"], 

2019 ): 

2020 """Issue a warning if BQ Storage client is not ``None`` with ``max_results`` set. 

2021 

2022 This helper method should be used directly in the relevant top-level public 

2023 methods, so that the warning is issued for the correct line in user code. 

2024 

2025 Args: 

2026 bqstorage_client: 

2027 The BigQuery Storage client intended to use for downloading result rows. 

2028 """ 

2029 if bqstorage_client is not None and self.max_results is not None: 

2030 warnings.warn( 

2031 "Cannot use bqstorage_client if max_results is set, " 

2032 "reverting to fetching data with the REST endpoint.", 

2033 stacklevel=3, 

2034 ) 

2035 

2036 def _to_page_iterable( 

2037 self, bqstorage_download, tabledata_list_download, bqstorage_client=None 

2038 ): 

2039 if not self._should_use_bqstorage(bqstorage_client, False): 

2040 bqstorage_client = None 

2041 

2042 result_pages = ( 

2043 bqstorage_download() 

2044 if bqstorage_client is not None 

2045 else tabledata_list_download() 

2046 ) 

2047 yield from result_pages 

2048 

2049 def to_arrow_iterable( 

2050 self, 

2051 bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"] = None, 

2052 max_queue_size: int = _pandas_helpers._MAX_QUEUE_SIZE_DEFAULT, # type: ignore 

2053 max_stream_count: Optional[int] = None, 

2054 ) -> Iterator["pyarrow.RecordBatch"]: 

2055 """[Beta] Create an iterable of class:`pyarrow.RecordBatch`, to process the table as a stream. 

2056 

2057 Args: 

2058 bqstorage_client (Optional[google.cloud.bigquery_storage_v1.BigQueryReadClient]): 

2059 A BigQuery Storage API client. If supplied, use the faster 

2060 BigQuery Storage API to fetch rows from BigQuery. 

2061 

2062 This method requires the ``pyarrow`` and 

2063 ``google-cloud-bigquery-storage`` libraries. 

2064 

2065 This method only exposes a subset of the capabilities of the 

2066 BigQuery Storage API. For full access to all features 

2067 (projections, filters, snapshots) use the Storage API directly. 

2068 

2069 max_queue_size (Optional[int]): 

2070 The maximum number of result pages to hold in the internal queue when 

2071 streaming query results over the BigQuery Storage API. Ignored if 

2072 Storage API is not used. 

2073 

2074 By default, the max queue size is set to the number of BQ Storage streams 

2075 created by the server. If ``max_queue_size`` is :data:`None`, the queue 

2076 size is infinite. 

2077 

2078 max_stream_count (Optional[int]): 

2079 The maximum number of parallel download streams when 

2080 using BigQuery Storage API. Ignored if 

2081 BigQuery Storage API is not used. 

2082 

2083 This setting also has no effect if the query result 

2084 is deterministically ordered with ORDER BY, 

2085 in which case, the number of download stream is always 1. 

2086 

2087 If set to 0 or None (the default), the number of download 

2088 streams is determined by BigQuery the server. However, this behaviour 

2089 can require a lot of memory to store temporary download result, 

2090 especially with very large queries. In that case, 

2091 setting this parameter value to a value > 0 can help 

2092 reduce system resource consumption. 

2093 

2094 Returns: 

2095 pyarrow.RecordBatch: 

2096 A generator of :class:`~pyarrow.RecordBatch`. 

2097 

2098 .. versionadded:: 2.31.0 

2099 """ 

2100 self._maybe_warn_max_results(bqstorage_client) 

2101 

2102 bqstorage_download = functools.partial( 

2103 _pandas_helpers.download_arrow_bqstorage, 

2104 self._billing_project, 

2105 self._table, 

2106 bqstorage_client, 

2107 preserve_order=self._preserve_order, 

2108 selected_fields=self._selected_fields, 

2109 max_queue_size=max_queue_size, 

2110 max_stream_count=max_stream_count, 

2111 ) 

2112 tabledata_list_download = functools.partial( 

2113 _pandas_helpers.download_arrow_row_iterator, iter(self.pages), self.schema 

2114 ) 

2115 return self._to_page_iterable( 

2116 bqstorage_download, 

2117 tabledata_list_download, 

2118 bqstorage_client=bqstorage_client, 

2119 ) 

2120 

2121 # If changing the signature of this method, make sure to apply the same 

2122 # changes to job.QueryJob.to_arrow() 

2123 def to_arrow( 

2124 self, 

2125 progress_bar_type: Optional[str] = None, 

2126 bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"] = None, 

2127 create_bqstorage_client: bool = True, 

2128 ) -> "pyarrow.Table": 

2129 """[Beta] Create a class:`pyarrow.Table` by loading all pages of a 

2130 table or query. 

2131 

2132 Args: 

2133 progress_bar_type (Optional[str]): 

2134 If set, use the `tqdm <https://tqdm.github.io/>`_ library to 

2135 display a progress bar while the data downloads. Install the 

2136 ``tqdm`` package to use this feature. 

2137 

2138 Possible values of ``progress_bar_type`` include: 

2139 

2140 ``None`` 

2141 No progress bar. 

2142 ``'tqdm'`` 

2143 Use the :func:`tqdm.tqdm` function to print a progress bar 

2144 to :data:`sys.stdout`. 

2145 ``'tqdm_notebook'`` 

2146 Use the :func:`tqdm.notebook.tqdm` function to display a 

2147 progress bar as a Jupyter notebook widget. 

2148 ``'tqdm_gui'`` 

2149 Use the :func:`tqdm.tqdm_gui` function to display a 

2150 progress bar as a graphical dialog box. 

2151 bqstorage_client (Optional[google.cloud.bigquery_storage_v1.BigQueryReadClient]): 

2152 A BigQuery Storage API client. If supplied, use the faster BigQuery 

2153 Storage API to fetch rows from BigQuery. This API is a billable API. 

2154 

2155 This method requires ``google-cloud-bigquery-storage`` library. 

2156 

2157 This method only exposes a subset of the capabilities of the 

2158 BigQuery Storage API. For full access to all features 

2159 (projections, filters, snapshots) use the Storage API directly. 

2160 create_bqstorage_client (Optional[bool]): 

2161 If ``True`` (default), create a BigQuery Storage API client using 

2162 the default API settings. The BigQuery Storage API is a faster way 

2163 to fetch rows from BigQuery. See the ``bqstorage_client`` parameter 

2164 for more information. 

2165 

2166 This argument does nothing if ``bqstorage_client`` is supplied. 

2167 

2168 .. versionadded:: 1.24.0 

2169 

2170 Returns: 

2171 pyarrow.Table 

2172 A :class:`pyarrow.Table` populated with row data and column 

2173 headers from the query results. The column headers are derived 

2174 from the destination table's schema. 

2175 

2176 Raises: 

2177 ValueError: If the :mod:`pyarrow` library cannot be imported. 

2178 

2179 

2180 .. versionadded:: 1.17.0 

2181 """ 

2182 if pyarrow is None: 

2183 raise ValueError(_NO_PYARROW_ERROR) 

2184 

2185 self._maybe_warn_max_results(bqstorage_client) 

2186 

2187 if not self._should_use_bqstorage(bqstorage_client, create_bqstorage_client): 

2188 create_bqstorage_client = False 

2189 bqstorage_client = None 

2190 

2191 owns_bqstorage_client = False 

2192 if not bqstorage_client and create_bqstorage_client: 

2193 bqstorage_client = self.client._ensure_bqstorage_client() 

2194 owns_bqstorage_client = bqstorage_client is not None 

2195 

2196 try: 

2197 progress_bar = get_progress_bar( 

2198 progress_bar_type, "Downloading", self.total_rows, "rows" 

2199 ) 

2200 

2201 record_batches = [] 

2202 for record_batch in self.to_arrow_iterable( 

2203 bqstorage_client=bqstorage_client 

2204 ): 

2205 record_batches.append(record_batch) 

2206 

2207 if progress_bar is not None: 

2208 # In some cases, the number of total rows is not populated 

2209 # until the first page of rows is fetched. Update the 

2210 # progress bar's total to keep an accurate count. 

2211 progress_bar.total = progress_bar.total or self.total_rows 

2212 progress_bar.update(record_batch.num_rows) 

2213 

2214 if progress_bar is not None: 

2215 # Indicate that the download has finished. 

2216 progress_bar.close() 

2217 finally: 

2218 if owns_bqstorage_client: 

2219 bqstorage_client._transport.grpc_channel.close() # type: ignore 

2220 

2221 if record_batches and bqstorage_client is not None: 

2222 return pyarrow.Table.from_batches(record_batches) 

2223 else: 

2224 # No records (not record_batches), use schema based on BigQuery schema 

2225 # **or** 

2226 # we used the REST API (bqstorage_client is None), 

2227 # which doesn't add arrow extension metadata, so we let 

2228 # `bq_to_arrow_schema` do it. 

2229 arrow_schema = _pandas_helpers.bq_to_arrow_schema(self._schema) 

2230 return pyarrow.Table.from_batches(record_batches, schema=arrow_schema) 

2231 

2232 def to_dataframe_iterable( 

2233 self, 

2234 bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"] = None, 

2235 dtypes: Optional[Dict[str, Any]] = None, 

2236 max_queue_size: int = _pandas_helpers._MAX_QUEUE_SIZE_DEFAULT, # type: ignore 

2237 max_stream_count: Optional[int] = None, 

2238 ) -> "pandas.DataFrame": 

2239 """Create an iterable of pandas DataFrames, to process the table as a stream. 

2240 

2241 Args: 

2242 bqstorage_client (Optional[google.cloud.bigquery_storage_v1.BigQueryReadClient]): 

2243 A BigQuery Storage API client. If supplied, use the faster 

2244 BigQuery Storage API to fetch rows from BigQuery. 

2245 

2246 This method requires ``google-cloud-bigquery-storage`` library. 

2247 

2248 This method only exposes a subset of the capabilities of the 

2249 BigQuery Storage API. For full access to all features 

2250 (projections, filters, snapshots) use the Storage API directly. 

2251 

2252 dtypes (Optional[Map[str, Union[str, pandas.Series.dtype]]]): 

2253 A dictionary of column names pandas ``dtype``s. The provided 

2254 ``dtype`` is used when constructing the series for the column 

2255 specified. Otherwise, the default pandas behavior is used. 

2256 

2257 max_queue_size (Optional[int]): 

2258 The maximum number of result pages to hold in the internal queue when 

2259 streaming query results over the BigQuery Storage API. Ignored if 

2260 Storage API is not used. 

2261 

2262 By default, the max queue size is set to the number of BQ Storage streams 

2263 created by the server. If ``max_queue_size`` is :data:`None`, the queue 

2264 size is infinite. 

2265 

2266 .. versionadded:: 2.14.0 

2267 

2268 max_stream_count (Optional[int]): 

2269 The maximum number of parallel download streams when 

2270 using BigQuery Storage API. Ignored if 

2271 BigQuery Storage API is not used. 

2272 

2273 This setting also has no effect if the query result 

2274 is deterministically ordered with ORDER BY, 

2275 in which case, the number of download stream is always 1. 

2276 

2277 If set to 0 or None (the default), the number of download 

2278 streams is determined by BigQuery the server. However, this behaviour 

2279 can require a lot of memory to store temporary download result, 

2280 especially with very large queries. In that case, 

2281 setting this parameter value to a value > 0 can help 

2282 reduce system resource consumption. 

2283 

2284 Returns: 

2285 pandas.DataFrame: 

2286 A generator of :class:`~pandas.DataFrame`. 

2287 

2288 Raises: 

2289 ValueError: 

2290 If the :mod:`pandas` library cannot be imported. 

2291 """ 

2292 _pandas_helpers.verify_pandas_imports() 

2293 

2294 if dtypes is None: 

2295 dtypes = {} 

2296 

2297 self._maybe_warn_max_results(bqstorage_client) 

2298 

2299 column_names = [field.name for field in self._schema] 

2300 bqstorage_download = functools.partial( 

2301 _pandas_helpers.download_dataframe_bqstorage, 

2302 self._billing_project, 

2303 self._table, 

2304 bqstorage_client, 

2305 column_names, 

2306 dtypes, 

2307 preserve_order=self._preserve_order, 

2308 selected_fields=self._selected_fields, 

2309 max_queue_size=max_queue_size, 

2310 max_stream_count=max_stream_count, 

2311 ) 

2312 tabledata_list_download = functools.partial( 

2313 _pandas_helpers.download_dataframe_row_iterator, 

2314 iter(self.pages), 

2315 self.schema, 

2316 dtypes, 

2317 ) 

2318 return self._to_page_iterable( 

2319 bqstorage_download, 

2320 tabledata_list_download, 

2321 bqstorage_client=bqstorage_client, 

2322 ) 

2323 

2324 # If changing the signature of this method, make sure to apply the same 

2325 # changes to job.QueryJob.to_dataframe() 

2326 def to_dataframe( 

2327 self, 

2328 bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"] = None, 

2329 dtypes: Optional[Dict[str, Any]] = None, 

2330 progress_bar_type: Optional[str] = None, 

2331 create_bqstorage_client: bool = True, 

2332 geography_as_object: bool = False, 

2333 bool_dtype: Union[Any, None] = DefaultPandasDTypes.BOOL_DTYPE, 

2334 int_dtype: Union[Any, None] = DefaultPandasDTypes.INT_DTYPE, 

2335 float_dtype: Union[Any, None] = None, 

2336 string_dtype: Union[Any, None] = None, 

2337 date_dtype: Union[Any, None] = DefaultPandasDTypes.DATE_DTYPE, 

2338 datetime_dtype: Union[Any, None] = None, 

2339 time_dtype: Union[Any, None] = DefaultPandasDTypes.TIME_DTYPE, 

2340 timestamp_dtype: Union[Any, None] = None, 

2341 range_date_dtype: Union[Any, None] = DefaultPandasDTypes.RANGE_DATE_DTYPE, 

2342 range_datetime_dtype: Union[ 

2343 Any, None 

2344 ] = DefaultPandasDTypes.RANGE_DATETIME_DTYPE, 

2345 range_timestamp_dtype: Union[ 

2346 Any, None 

2347 ] = DefaultPandasDTypes.RANGE_TIMESTAMP_DTYPE, 

2348 ) -> "pandas.DataFrame": 

2349 """Create a pandas DataFrame by loading all pages of a query. 

2350 

2351 Args: 

2352 bqstorage_client (Optional[google.cloud.bigquery_storage_v1.BigQueryReadClient]): 

2353 A BigQuery Storage API client. If supplied, use the faster 

2354 BigQuery Storage API to fetch rows from BigQuery. 

2355 

2356 This method requires ``google-cloud-bigquery-storage`` library. 

2357 

2358 This method only exposes a subset of the capabilities of the 

2359 BigQuery Storage API. For full access to all features 

2360 (projections, filters, snapshots) use the Storage API directly. 

2361 

2362 dtypes (Optional[Map[str, Union[str, pandas.Series.dtype]]]): 

2363 A dictionary of column names pandas ``dtype``s. The provided 

2364 ``dtype`` is used when constructing the series for the column 

2365 specified. Otherwise, the default pandas behavior is used. 

2366 progress_bar_type (Optional[str]): 

2367 If set, use the `tqdm <https://tqdm.github.io/>`_ library to 

2368 display a progress bar while the data downloads. Install the 

2369 ``tqdm`` package to use this feature. 

2370 

2371 Possible values of ``progress_bar_type`` include: 

2372 

2373 ``None`` 

2374 No progress bar. 

2375 ``'tqdm'`` 

2376 Use the :func:`tqdm.tqdm` function to print a progress bar 

2377 to :data:`sys.stdout`. 

2378 ``'tqdm_notebook'`` 

2379 Use the :func:`tqdm.notebook.tqdm` function to display a 

2380 progress bar as a Jupyter notebook widget. 

2381 ``'tqdm_gui'`` 

2382 Use the :func:`tqdm.tqdm_gui` function to display a 

2383 progress bar as a graphical dialog box. 

2384 

2385 .. versionadded:: 1.11.0 

2386 

2387 create_bqstorage_client (Optional[bool]): 

2388 If ``True`` (default), create a BigQuery Storage API client 

2389 using the default API settings. The BigQuery Storage API 

2390 is a faster way to fetch rows from BigQuery. See the 

2391 ``bqstorage_client`` parameter for more information. 

2392 

2393 This argument does nothing if ``bqstorage_client`` is supplied. 

2394 

2395 .. versionadded:: 1.24.0 

2396 

2397 geography_as_object (Optional[bool]): 

2398 If ``True``, convert GEOGRAPHY data to :mod:`shapely` 

2399 geometry objects. If ``False`` (default), don't cast 

2400 geography data to :mod:`shapely` geometry objects. 

2401 

2402 .. versionadded:: 2.24.0 

2403 

2404 bool_dtype (Optional[pandas.Series.dtype, None]): 

2405 If set, indicate a pandas ExtensionDtype (e.g. ``pandas.BooleanDtype()``) 

2406 to convert BigQuery Boolean type, instead of relying on the default 

2407 ``pandas.BooleanDtype()``. If you explicitly set the value to ``None``, 

2408 then the data type will be ``numpy.dtype("bool")``. BigQuery Boolean 

2409 type can be found at: 

2410 https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#boolean_type 

2411 

2412 .. versionadded:: 3.8.0 

2413 

2414 int_dtype (Optional[pandas.Series.dtype, None]): 

2415 If set, indicate a pandas ExtensionDtype (e.g. ``pandas.Int64Dtype()``) 

2416 to convert BigQuery Integer types, instead of relying on the default 

2417 ``pandas.Int64Dtype()``. If you explicitly set the value to ``None``, 

2418 then the data type will be ``numpy.dtype("int64")``. A list of BigQuery 

2419 Integer types can be found at: 

2420 https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#integer_types 

2421 

2422 .. versionadded:: 3.8.0 

2423 

2424 float_dtype (Optional[pandas.Series.dtype, None]): 

2425 If set, indicate a pandas ExtensionDtype (e.g. ``pandas.Float32Dtype()``) 

2426 to convert BigQuery Float type, instead of relying on the default 

2427 ``numpy.dtype("float64")``. If you explicitly set the value to ``None``, 

2428 then the data type will be ``numpy.dtype("float64")``. BigQuery Float 

2429 type can be found at: 

2430 https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#floating_point_types 

2431 

2432 .. versionadded:: 3.8.0 

2433 

2434 string_dtype (Optional[pandas.Series.dtype, None]): 

2435 If set, indicate a pandas ExtensionDtype (e.g. ``pandas.StringDtype()``) to 

2436 convert BigQuery String type, instead of relying on the default 

2437 ``numpy.dtype("object")``. If you explicitly set the value to ``None``, 

2438 then the data type will be ``numpy.dtype("object")``. BigQuery String 

2439 type can be found at: 

2440 https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#string_type 

2441 

2442 .. versionadded:: 3.8.0 

2443 

2444 date_dtype (Optional[pandas.Series.dtype, None]): 

2445 If set, indicate a pandas ExtensionDtype (e.g. 

2446 ``pandas.ArrowDtype(pyarrow.date32())``) to convert BigQuery Date 

2447 type, instead of relying on the default ``db_dtypes.DateDtype()``. 

2448 If you explicitly set the value to ``None``, then the data type will be 

2449 ``numpy.dtype("datetime64[ns]")`` or ``object`` if out of bound. BigQuery 

2450 Date type can be found at: 

2451 https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#date_type 

2452 

2453 .. versionadded:: 3.10.0 

2454 

2455 datetime_dtype (Optional[pandas.Series.dtype, None]): 

2456 If set, indicate a pandas ExtensionDtype (e.g. 

2457 ``pandas.ArrowDtype(pyarrow.timestamp("us"))``) to convert BigQuery Datetime 

2458 type, instead of relying on the default ``numpy.dtype("datetime64[ns]``. 

2459 If you explicitly set the value to ``None``, then the data type will be 

2460 ``numpy.dtype("datetime64[ns]")`` or ``object`` if out of bound. BigQuery 

2461 Datetime type can be found at: 

2462 https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#datetime_type 

2463 

2464 .. versionadded:: 3.10.0 

2465 

2466 time_dtype (Optional[pandas.Series.dtype, None]): 

2467 If set, indicate a pandas ExtensionDtype (e.g. 

2468 ``pandas.ArrowDtype(pyarrow.time64("us"))``) to convert BigQuery Time 

2469 type, instead of relying on the default ``db_dtypes.TimeDtype()``. 

2470 If you explicitly set the value to ``None``, then the data type will be 

2471 ``numpy.dtype("object")``. BigQuery Time type can be found at: 

2472 https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#time_type 

2473 

2474 .. versionadded:: 3.10.0 

2475 

2476 timestamp_dtype (Optional[pandas.Series.dtype, None]): 

2477 If set, indicate a pandas ExtensionDtype (e.g. 

2478 ``pandas.ArrowDtype(pyarrow.timestamp("us", tz="UTC"))``) to convert BigQuery Timestamp 

2479 type, instead of relying on the default ``numpy.dtype("datetime64[ns, UTC]")``. 

2480 If you explicitly set the value to ``None``, then the data type will be 

2481 ``numpy.dtype("datetime64[ns, UTC]")`` or ``object`` if out of bound. BigQuery 

2482 Datetime type can be found at: 

2483 https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#timestamp_type 

2484 

2485 .. versionadded:: 3.10.0 

2486 

2487 range_date_dtype (Optional[pandas.Series.dtype, None]): 

2488 If set, indicate a pandas ExtensionDtype, such as: 

2489 

2490 .. code-block:: python 

2491 

2492 pandas.ArrowDtype(pyarrow.struct( 

2493 [("start", pyarrow.date32()), ("end", pyarrow.date32())] 

2494 )) 

2495 

2496 to convert BigQuery RANGE<DATE> type, instead of relying on 

2497 the default ``object``. If you explicitly set the value to 

2498 ``None``, the data type will be ``object``. BigQuery Range type 

2499 can be found at: 

2500 https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#range_type 

2501 

2502 .. versionadded:: 3.21.0 

2503 

2504 range_datetime_dtype (Optional[pandas.Series.dtype, None]): 

2505 If set, indicate a pandas ExtensionDtype, such as: 

2506 

2507 .. code-block:: python 

2508 

2509 pandas.ArrowDtype(pyarrow.struct( 

2510 [ 

2511 ("start", pyarrow.timestamp("us")), 

2512 ("end", pyarrow.timestamp("us")), 

2513 ] 

2514 )) 

2515 

2516 to convert BigQuery RANGE<DATETIME> type, instead of relying on 

2517 the default ``object``. If you explicitly set the value to 

2518 ``None``, the data type will be ``object``. BigQuery Range type 

2519 can be found at: 

2520 https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#range_type 

2521 

2522 .. versionadded:: 3.21.0 

2523 

2524 range_timestamp_dtype (Optional[pandas.Series.dtype, None]): 

2525 If set, indicate a pandas ExtensionDtype, such as: 

2526 

2527 .. code-block:: python 

2528 

2529 pandas.ArrowDtype(pyarrow.struct( 

2530 [ 

2531 ("start", pyarrow.timestamp("us", tz="UTC")), 

2532 ("end", pyarrow.timestamp("us", tz="UTC")), 

2533 ] 

2534 )) 

2535 

2536 to convert BigQuery RANGE<TIMESTAMP> type, instead of relying 

2537 on the default ``object``. If you explicitly set the value to 

2538 ``None``, the data type will be ``object``. BigQuery Range type 

2539 can be found at: 

2540 https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#range_type 

2541 

2542 .. versionadded:: 3.21.0 

2543 

2544 Returns: 

2545 pandas.DataFrame: 

2546 A :class:`~pandas.DataFrame` populated with row data and column 

2547 headers from the query results. The column headers are derived 

2548 from the destination table's schema. 

2549 

2550 Raises: 

2551 ValueError: 

2552 If the :mod:`pandas` library cannot be imported, or 

2553 the :mod:`google.cloud.bigquery_storage_v1` module is 

2554 required but cannot be imported. Also if 

2555 `geography_as_object` is `True`, but the 

2556 :mod:`shapely` library cannot be imported. Also if 

2557 `bool_dtype`, `int_dtype` or other dtype parameters 

2558 is not supported dtype. 

2559 

2560 """ 

2561 _pandas_helpers.verify_pandas_imports() 

2562 

2563 if geography_as_object and shapely is None: 

2564 raise ValueError(_NO_SHAPELY_ERROR) 

2565 

2566 if bool_dtype is DefaultPandasDTypes.BOOL_DTYPE: 

2567 bool_dtype = pandas.BooleanDtype() 

2568 

2569 if int_dtype is DefaultPandasDTypes.INT_DTYPE: 

2570 int_dtype = pandas.Int64Dtype() 

2571 

2572 if time_dtype is DefaultPandasDTypes.TIME_DTYPE: 

2573 time_dtype = db_dtypes.TimeDtype() 

2574 

2575 if range_date_dtype is DefaultPandasDTypes.RANGE_DATE_DTYPE: 

2576 if _versions_helpers.SUPPORTS_RANGE_PYARROW: 

2577 range_date_dtype = pandas.ArrowDtype( 

2578 pyarrow.struct( 

2579 [("start", pyarrow.date32()), ("end", pyarrow.date32())] 

2580 ) 

2581 ) 

2582 else: 

2583 warnings.warn(_RANGE_PYARROW_WARNING) 

2584 range_date_dtype = None 

2585 

2586 if range_datetime_dtype is DefaultPandasDTypes.RANGE_DATETIME_DTYPE: 

2587 if _versions_helpers.SUPPORTS_RANGE_PYARROW: 

2588 range_datetime_dtype = pandas.ArrowDtype( 

2589 pyarrow.struct( 

2590 [ 

2591 ("start", pyarrow.timestamp("us")), 

2592 ("end", pyarrow.timestamp("us")), 

2593 ] 

2594 ) 

2595 ) 

2596 else: 

2597 warnings.warn(_RANGE_PYARROW_WARNING) 

2598 range_datetime_dtype = None 

2599 

2600 if range_timestamp_dtype is DefaultPandasDTypes.RANGE_TIMESTAMP_DTYPE: 

2601 if _versions_helpers.SUPPORTS_RANGE_PYARROW: 

2602 range_timestamp_dtype = pandas.ArrowDtype( 

2603 pyarrow.struct( 

2604 [ 

2605 ("start", pyarrow.timestamp("us", tz="UTC")), 

2606 ("end", pyarrow.timestamp("us", tz="UTC")), 

2607 ] 

2608 ) 

2609 ) 

2610 else: 

2611 warnings.warn(_RANGE_PYARROW_WARNING) 

2612 range_timestamp_dtype = None 

2613 

2614 if bool_dtype is not None and not hasattr(bool_dtype, "__from_arrow__"): 

2615 raise ValueError("bool_dtype", _NO_SUPPORTED_DTYPE) 

2616 

2617 if int_dtype is not None and not hasattr(int_dtype, "__from_arrow__"): 

2618 raise ValueError("int_dtype", _NO_SUPPORTED_DTYPE) 

2619 

2620 if float_dtype is not None and not hasattr(float_dtype, "__from_arrow__"): 

2621 raise ValueError("float_dtype", _NO_SUPPORTED_DTYPE) 

2622 

2623 if string_dtype is not None and not hasattr(string_dtype, "__from_arrow__"): 

2624 raise ValueError("string_dtype", _NO_SUPPORTED_DTYPE) 

2625 

2626 if ( 

2627 date_dtype is not None 

2628 and date_dtype is not DefaultPandasDTypes.DATE_DTYPE 

2629 and not hasattr(date_dtype, "__from_arrow__") 

2630 ): 

2631 raise ValueError("date_dtype", _NO_SUPPORTED_DTYPE) 

2632 

2633 if datetime_dtype is not None and not hasattr(datetime_dtype, "__from_arrow__"): 

2634 raise ValueError("datetime_dtype", _NO_SUPPORTED_DTYPE) 

2635 

2636 if time_dtype is not None and not hasattr(time_dtype, "__from_arrow__"): 

2637 raise ValueError("time_dtype", _NO_SUPPORTED_DTYPE) 

2638 

2639 if timestamp_dtype is not None and not hasattr( 

2640 timestamp_dtype, "__from_arrow__" 

2641 ): 

2642 raise ValueError("timestamp_dtype", _NO_SUPPORTED_DTYPE) 

2643 

2644 if dtypes is None: 

2645 dtypes = {} 

2646 

2647 self._maybe_warn_max_results(bqstorage_client) 

2648 

2649 if not self._should_use_bqstorage(bqstorage_client, create_bqstorage_client): 

2650 create_bqstorage_client = False 

2651 bqstorage_client = None 

2652 

2653 record_batch = self.to_arrow( 

2654 progress_bar_type=progress_bar_type, 

2655 bqstorage_client=bqstorage_client, 

2656 create_bqstorage_client=create_bqstorage_client, 

2657 ) 

2658 

2659 # Default date dtype is `db_dtypes.DateDtype()` that could cause out of bounds error, 

2660 # when pyarrow converts date values to nanosecond precision. To avoid the error, we 

2661 # set the date_as_object parameter to True, if necessary. 

2662 date_as_object = False 

2663 if date_dtype is DefaultPandasDTypes.DATE_DTYPE: 

2664 date_dtype = db_dtypes.DateDtype() 

2665 date_as_object = not all( 

2666 self.__can_cast_timestamp_ns(col) 

2667 for col in record_batch 

2668 # Type can be date32 or date64 (plus units). 

2669 # See: https://arrow.apache.org/docs/python/api/datatypes.html 

2670 if pyarrow.types.is_date(col.type) 

2671 ) 

2672 

2673 timestamp_as_object = False 

2674 if datetime_dtype is None and timestamp_dtype is None: 

2675 timestamp_as_object = not all( 

2676 self.__can_cast_timestamp_ns(col) 

2677 for col in record_batch 

2678 # Type can be datetime and timestamp (plus units and time zone). 

2679 # See: https://arrow.apache.org/docs/python/api/datatypes.html 

2680 if pyarrow.types.is_timestamp(col.type) 

2681 ) 

2682 

2683 df = record_batch.to_pandas( 

2684 date_as_object=date_as_object, 

2685 timestamp_as_object=timestamp_as_object, 

2686 integer_object_nulls=True, 

2687 types_mapper=_pandas_helpers.default_types_mapper( 

2688 date_as_object=date_as_object, 

2689 bool_dtype=bool_dtype, 

2690 int_dtype=int_dtype, 

2691 float_dtype=float_dtype, 

2692 string_dtype=string_dtype, 

2693 date_dtype=date_dtype, 

2694 datetime_dtype=datetime_dtype, 

2695 time_dtype=time_dtype, 

2696 timestamp_dtype=timestamp_dtype, 

2697 range_date_dtype=range_date_dtype, 

2698 range_datetime_dtype=range_datetime_dtype, 

2699 range_timestamp_dtype=range_timestamp_dtype, 

2700 ), 

2701 ) 

2702 

2703 for column in dtypes: 

2704 df[column] = pandas.Series(df[column], dtype=dtypes[column], copy=False) 

2705 

2706 if geography_as_object: 

2707 for field in self.schema: 

2708 if field.field_type.upper() == "GEOGRAPHY" and field.mode != "REPEATED": 

2709 df[field.name] = df[field.name].dropna().apply(_read_wkt) 

2710 

2711 return df 

2712 

2713 @staticmethod 

2714 def __can_cast_timestamp_ns(column): 

2715 try: 

2716 column.cast("timestamp[ns]") 

2717 except pyarrow.lib.ArrowInvalid: 

2718 return False 

2719 else: 

2720 return True 

2721 

2722 # If changing the signature of this method, make sure to apply the same 

2723 # changes to job.QueryJob.to_geodataframe() 

2724 def to_geodataframe( 

2725 self, 

2726 bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"] = None, 

2727 dtypes: Optional[Dict[str, Any]] = None, 

2728 progress_bar_type: Optional[str] = None, 

2729 create_bqstorage_client: bool = True, 

2730 geography_column: Optional[str] = None, 

2731 bool_dtype: Union[Any, None] = DefaultPandasDTypes.BOOL_DTYPE, 

2732 int_dtype: Union[Any, None] = DefaultPandasDTypes.INT_DTYPE, 

2733 float_dtype: Union[Any, None] = None, 

2734 string_dtype: Union[Any, None] = None, 

2735 ) -> "geopandas.GeoDataFrame": 

2736 """Create a GeoPandas GeoDataFrame by loading all pages of a query. 

2737 

2738 Args: 

2739 bqstorage_client (Optional[google.cloud.bigquery_storage_v1.BigQueryReadClient]): 

2740 A BigQuery Storage API client. If supplied, use the faster 

2741 BigQuery Storage API to fetch rows from BigQuery. 

2742 

2743 This method requires the ``pyarrow`` and 

2744 ``google-cloud-bigquery-storage`` libraries. 

2745 

2746 This method only exposes a subset of the capabilities of the 

2747 BigQuery Storage API. For full access to all features 

2748 (projections, filters, snapshots) use the Storage API directly. 

2749 

2750 dtypes (Optional[Map[str, Union[str, pandas.Series.dtype]]]): 

2751 A dictionary of column names pandas ``dtype``s. The provided 

2752 ``dtype`` is used when constructing the series for the column 

2753 specified. Otherwise, the default pandas behavior is used. 

2754 progress_bar_type (Optional[str]): 

2755 If set, use the `tqdm <https://tqdm.github.io/>`_ library to 

2756 display a progress bar while the data downloads. Install the 

2757 ``tqdm`` package to use this feature. 

2758 

2759 Possible values of ``progress_bar_type`` include: 

2760 

2761 ``None`` 

2762 No progress bar. 

2763 ``'tqdm'`` 

2764 Use the :func:`tqdm.tqdm` function to print a progress bar 

2765 to :data:`sys.stdout`. 

2766 ``'tqdm_notebook'`` 

2767 Use the :func:`tqdm.notebook.tqdm` function to display a 

2768 progress bar as a Jupyter notebook widget. 

2769 ``'tqdm_gui'`` 

2770 Use the :func:`tqdm.tqdm_gui` function to display a 

2771 progress bar as a graphical dialog box. 

2772 

2773 create_bqstorage_client (Optional[bool]): 

2774 If ``True`` (default), create a BigQuery Storage API client 

2775 using the default API settings. The BigQuery Storage API 

2776 is a faster way to fetch rows from BigQuery. See the 

2777 ``bqstorage_client`` parameter for more information. 

2778 

2779 This argument does nothing if ``bqstorage_client`` is supplied. 

2780 

2781 geography_column (Optional[str]): 

2782 If there are more than one GEOGRAPHY column, 

2783 identifies which one to use to construct a geopandas 

2784 GeoDataFrame. This option can be ommitted if there's 

2785 only one GEOGRAPHY column. 

2786 bool_dtype (Optional[pandas.Series.dtype, None]): 

2787 If set, indicate a pandas ExtensionDtype (e.g. ``pandas.BooleanDtype()``) 

2788 to convert BigQuery Boolean type, instead of relying on the default 

2789 ``pandas.BooleanDtype()``. If you explicitly set the value to ``None``, 

2790 then the data type will be ``numpy.dtype("bool")``. BigQuery Boolean 

2791 type can be found at: 

2792 https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#boolean_type 

2793 int_dtype (Optional[pandas.Series.dtype, None]): 

2794 If set, indicate a pandas ExtensionDtype (e.g. ``pandas.Int64Dtype()``) 

2795 to convert BigQuery Integer types, instead of relying on the default 

2796 ``pandas.Int64Dtype()``. If you explicitly set the value to ``None``, 

2797 then the data type will be ``numpy.dtype("int64")``. A list of BigQuery 

2798 Integer types can be found at: 

2799 https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#integer_types 

2800 float_dtype (Optional[pandas.Series.dtype, None]): 

2801 If set, indicate a pandas ExtensionDtype (e.g. ``pandas.Float32Dtype()``) 

2802 to convert BigQuery Float type, instead of relying on the default 

2803 ``numpy.dtype("float64")``. If you explicitly set the value to ``None``, 

2804 then the data type will be ``numpy.dtype("float64")``. BigQuery Float 

2805 type can be found at: 

2806 https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#floating_point_types 

2807 string_dtype (Optional[pandas.Series.dtype, None]): 

2808 If set, indicate a pandas ExtensionDtype (e.g. ``pandas.StringDtype()``) to 

2809 convert BigQuery String type, instead of relying on the default 

2810 ``numpy.dtype("object")``. If you explicitly set the value to ``None``, 

2811 then the data type will be ``numpy.dtype("object")``. BigQuery String 

2812 type can be found at: 

2813 https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#string_type 

2814 

2815 Returns: 

2816 geopandas.GeoDataFrame: 

2817 A :class:`geopandas.GeoDataFrame` populated with row 

2818 data and column headers from the query results. The 

2819 column headers are derived from the destination 

2820 table's schema. 

2821 

2822 Raises: 

2823 ValueError: 

2824 If the :mod:`geopandas` library cannot be imported, or the 

2825 :mod:`google.cloud.bigquery_storage_v1` module is 

2826 required but cannot be imported. 

2827 

2828 .. versionadded:: 2.24.0 

2829 """ 

2830 if geopandas is None: 

2831 raise ValueError(_NO_GEOPANDAS_ERROR) 

2832 

2833 geography_columns = set( 

2834 field.name 

2835 for field in self.schema 

2836 if field.field_type.upper() == "GEOGRAPHY" 

2837 ) 

2838 if not geography_columns: 

2839 raise TypeError( 

2840 "There must be at least one GEOGRAPHY column" 

2841 " to create a GeoDataFrame" 

2842 ) 

2843 

2844 if geography_column: 

2845 if geography_column not in geography_columns: 

2846 raise ValueError( 

2847 f"The given geography column, {geography_column}, doesn't name" 

2848 f" a GEOGRAPHY column in the result." 

2849 ) 

2850 elif len(geography_columns) == 1: 

2851 [geography_column] = geography_columns 

2852 else: 

2853 raise ValueError( 

2854 "There is more than one GEOGRAPHY column in the result. " 

2855 "The geography_column argument must be used to specify which " 

2856 "one to use to create a GeoDataFrame" 

2857 ) 

2858 

2859 df = self.to_dataframe( 

2860 bqstorage_client, 

2861 dtypes, 

2862 progress_bar_type, 

2863 create_bqstorage_client, 

2864 geography_as_object=True, 

2865 bool_dtype=bool_dtype, 

2866 int_dtype=int_dtype, 

2867 float_dtype=float_dtype, 

2868 string_dtype=string_dtype, 

2869 ) 

2870 

2871 return geopandas.GeoDataFrame( 

2872 df, crs=_COORDINATE_REFERENCE_SYSTEM, geometry=geography_column 

2873 ) 

2874 

2875 

2876class _EmptyRowIterator(RowIterator): 

2877 """An empty row iterator. 

2878 

2879 This class prevents API requests when there are no rows to fetch or rows 

2880 are impossible to fetch, such as with query results for DDL CREATE VIEW 

2881 statements. 

2882 """ 

2883 

2884 schema = () 

2885 pages = () 

2886 total_rows = 0 

2887 

2888 def __init__( 

2889 self, client=None, api_request=None, path=None, schema=(), *args, **kwargs 

2890 ): 

2891 super().__init__( 

2892 client=client, 

2893 api_request=api_request, 

2894 path=path, 

2895 schema=schema, 

2896 *args, 

2897 **kwargs, 

2898 ) 

2899 

2900 def to_arrow( 

2901 self, 

2902 progress_bar_type=None, 

2903 bqstorage_client=None, 

2904 create_bqstorage_client=True, 

2905 ) -> "pyarrow.Table": 

2906 """[Beta] Create an empty class:`pyarrow.Table`. 

2907 

2908 Args: 

2909 progress_bar_type (str): Ignored. Added for compatibility with RowIterator. 

2910 bqstorage_client (Any): Ignored. Added for compatibility with RowIterator. 

2911 create_bqstorage_client (bool): Ignored. Added for compatibility with RowIterator. 

2912 

2913 Returns: 

2914 pyarrow.Table: An empty :class:`pyarrow.Table`. 

2915 """ 

2916 if pyarrow is None: 

2917 raise ValueError(_NO_PYARROW_ERROR) 

2918 return pyarrow.Table.from_arrays(()) 

2919 

2920 def to_dataframe( 

2921 self, 

2922 bqstorage_client=None, 

2923 dtypes=None, 

2924 progress_bar_type=None, 

2925 create_bqstorage_client=True, 

2926 geography_as_object=False, 

2927 bool_dtype=None, 

2928 int_dtype=None, 

2929 float_dtype=None, 

2930 string_dtype=None, 

2931 date_dtype=None, 

2932 datetime_dtype=None, 

2933 time_dtype=None, 

2934 timestamp_dtype=None, 

2935 range_date_dtype=None, 

2936 range_datetime_dtype=None, 

2937 range_timestamp_dtype=None, 

2938 ) -> "pandas.DataFrame": 

2939 """Create an empty dataframe. 

2940 

2941 Args: 

2942 bqstorage_client (Any): Ignored. Added for compatibility with RowIterator. 

2943 dtypes (Any): Ignored. Added for compatibility with RowIterator. 

2944 progress_bar_type (Any): Ignored. Added for compatibility with RowIterator. 

2945 create_bqstorage_client (bool): Ignored. Added for compatibility with RowIterator. 

2946 geography_as_object (bool): Ignored. Added for compatibility with RowIterator. 

2947 bool_dtype (Any): Ignored. Added for compatibility with RowIterator. 

2948 int_dtype (Any): Ignored. Added for compatibility with RowIterator. 

2949 float_dtype (Any): Ignored. Added for compatibility with RowIterator. 

2950 string_dtype (Any): Ignored. Added for compatibility with RowIterator. 

2951 date_dtype (Any): Ignored. Added for compatibility with RowIterator. 

2952 datetime_dtype (Any): Ignored. Added for compatibility with RowIterator. 

2953 time_dtype (Any): Ignored. Added for compatibility with RowIterator. 

2954 timestamp_dtype (Any): Ignored. Added for compatibility with RowIterator. 

2955 range_date_dtype (Any): Ignored. Added for compatibility with RowIterator. 

2956 range_datetime_dtype (Any): Ignored. Added for compatibility with RowIterator. 

2957 range_timestamp_dtype (Any): Ignored. Added for compatibility with RowIterator. 

2958 

2959 Returns: 

2960 pandas.DataFrame: An empty :class:`~pandas.DataFrame`. 

2961 """ 

2962 _pandas_helpers.verify_pandas_imports() 

2963 return pandas.DataFrame() 

2964 

2965 def to_geodataframe( 

2966 self, 

2967 bqstorage_client=None, 

2968 dtypes=None, 

2969 progress_bar_type=None, 

2970 create_bqstorage_client=True, 

2971 geography_column: Optional[str] = None, 

2972 bool_dtype: Union[Any, None] = DefaultPandasDTypes.BOOL_DTYPE, 

2973 int_dtype: Union[Any, None] = DefaultPandasDTypes.INT_DTYPE, 

2974 float_dtype: Union[Any, None] = None, 

2975 string_dtype: Union[Any, None] = None, 

2976 ) -> "pandas.DataFrame": 

2977 """Create an empty dataframe. 

2978 

2979 Args: 

2980 bqstorage_client (Any): Ignored. Added for compatibility with RowIterator. 

2981 dtypes (Any): Ignored. Added for compatibility with RowIterator. 

2982 progress_bar_type (Any): Ignored. Added for compatibility with RowIterator. 

2983 create_bqstorage_client (bool): Ignored. Added for compatibility with RowIterator. 

2984 geography_column (str): Ignored. Added for compatibility with RowIterator. 

2985 bool_dtype (Any): Ignored. Added for compatibility with RowIterator. 

2986 int_dtype (Any): Ignored. Added for compatibility with RowIterator. 

2987 float_dtype (Any): Ignored. Added for compatibility with RowIterator. 

2988 string_dtype (Any): Ignored. Added for compatibility with RowIterator. 

2989 

2990 Returns: 

2991 pandas.DataFrame: An empty :class:`~pandas.DataFrame`. 

2992 """ 

2993 if geopandas is None: 

2994 raise ValueError(_NO_GEOPANDAS_ERROR) 

2995 

2996 # Since an empty GeoDataFrame has no geometry column, we do not CRS on it, 

2997 # because that's deprecated. 

2998 return geopandas.GeoDataFrame() 

2999 

3000 def to_dataframe_iterable( 

3001 self, 

3002 bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"] = None, 

3003 dtypes: Optional[Dict[str, Any]] = None, 

3004 max_queue_size: Optional[int] = None, 

3005 max_stream_count: Optional[int] = None, 

3006 ) -> Iterator["pandas.DataFrame"]: 

3007 """Create an iterable of pandas DataFrames, to process the table as a stream. 

3008 

3009 .. versionadded:: 2.21.0 

3010 

3011 Args: 

3012 bqstorage_client: 

3013 Ignored. Added for compatibility with RowIterator. 

3014 

3015 dtypes (Optional[Map[str, Union[str, pandas.Series.dtype]]]): 

3016 Ignored. Added for compatibility with RowIterator. 

3017 

3018 max_queue_size: 

3019 Ignored. Added for compatibility with RowIterator. 

3020 

3021 max_stream_count: 

3022 Ignored. Added for compatibility with RowIterator. 

3023 

3024 Returns: 

3025 An iterator yielding a single empty :class:`~pandas.DataFrame`. 

3026 

3027 Raises: 

3028 ValueError: 

3029 If the :mod:`pandas` library cannot be imported. 

3030 """ 

3031 _pandas_helpers.verify_pandas_imports() 

3032 return iter((pandas.DataFrame(),)) 

3033 

3034 def to_arrow_iterable( 

3035 self, 

3036 bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"] = None, 

3037 max_queue_size: Optional[int] = None, 

3038 max_stream_count: Optional[int] = None, 

3039 ) -> Iterator["pyarrow.RecordBatch"]: 

3040 """Create an iterable of pandas DataFrames, to process the table as a stream. 

3041 

3042 .. versionadded:: 2.31.0 

3043 

3044 Args: 

3045 bqstorage_client: 

3046 Ignored. Added for compatibility with RowIterator. 

3047 

3048 max_queue_size: 

3049 Ignored. Added for compatibility with RowIterator. 

3050 

3051 max_stream_count: 

3052 Ignored. Added for compatibility with RowIterator. 

3053 

3054 Returns: 

3055 An iterator yielding a single empty :class:`~pyarrow.RecordBatch`. 

3056 """ 

3057 return iter((pyarrow.record_batch([]),)) 

3058 

3059 def __iter__(self): 

3060 return iter(()) 

3061 

3062 

3063class PartitionRange(object): 

3064 """Definition of the ranges for range partitioning. 

3065 

3066 .. note:: 

3067 **Beta**. The integer range partitioning feature is in a pre-release 

3068 state and might change or have limited support. 

3069 

3070 Args: 

3071 start (Optional[int]): 

3072 Sets the 

3073 :attr:`~google.cloud.bigquery.table.PartitionRange.start` 

3074 property. 

3075 end (Optional[int]): 

3076 Sets the 

3077 :attr:`~google.cloud.bigquery.table.PartitionRange.end` 

3078 property. 

3079 interval (Optional[int]): 

3080 Sets the 

3081 :attr:`~google.cloud.bigquery.table.PartitionRange.interval` 

3082 property. 

3083 _properties (Optional[dict]): 

3084 Private. Used to construct object from API resource. 

3085 """ 

3086 

3087 def __init__(self, start=None, end=None, interval=None, _properties=None) -> None: 

3088 if _properties is None: 

3089 _properties = {} 

3090 self._properties = _properties 

3091 

3092 if start is not None: 

3093 self.start = start 

3094 if end is not None: 

3095 self.end = end 

3096 if interval is not None: 

3097 self.interval = interval 

3098 

3099 @property 

3100 def start(self): 

3101 """int: The start of range partitioning, inclusive.""" 

3102 return _helpers._int_or_none(self._properties.get("start")) 

3103 

3104 @start.setter 

3105 def start(self, value): 

3106 self._properties["start"] = _helpers._str_or_none(value) 

3107 

3108 @property 

3109 def end(self): 

3110 """int: The end of range partitioning, exclusive.""" 

3111 return _helpers._int_or_none(self._properties.get("end")) 

3112 

3113 @end.setter 

3114 def end(self, value): 

3115 self._properties["end"] = _helpers._str_or_none(value) 

3116 

3117 @property 

3118 def interval(self): 

3119 """int: The width of each interval.""" 

3120 return _helpers._int_or_none(self._properties.get("interval")) 

3121 

3122 @interval.setter 

3123 def interval(self, value): 

3124 self._properties["interval"] = _helpers._str_or_none(value) 

3125 

3126 def _key(self): 

3127 return tuple(sorted(self._properties.items())) 

3128 

3129 def __eq__(self, other): 

3130 if not isinstance(other, PartitionRange): 

3131 return NotImplemented 

3132 return self._key() == other._key() 

3133 

3134 def __ne__(self, other): 

3135 return not self == other 

3136 

3137 def __repr__(self): 

3138 key_vals = ["{}={}".format(key, val) for key, val in self._key()] 

3139 return "PartitionRange({})".format(", ".join(key_vals)) 

3140 

3141 

3142class RangePartitioning(object): 

3143 """Range-based partitioning configuration for a table. 

3144 

3145 .. note:: 

3146 **Beta**. The integer range partitioning feature is in a pre-release 

3147 state and might change or have limited support. 

3148 

3149 Args: 

3150 range_ (Optional[google.cloud.bigquery.table.PartitionRange]): 

3151 Sets the 

3152 :attr:`google.cloud.bigquery.table.RangePartitioning.range_` 

3153 property. 

3154 field (Optional[str]): 

3155 Sets the 

3156 :attr:`google.cloud.bigquery.table.RangePartitioning.field` 

3157 property. 

3158 _properties (Optional[dict]): 

3159 Private. Used to construct object from API resource. 

3160 """ 

3161 

3162 def __init__(self, range_=None, field=None, _properties=None) -> None: 

3163 if _properties is None: 

3164 _properties = {} 

3165 self._properties: Dict[str, Any] = _properties 

3166 

3167 if range_ is not None: 

3168 self.range_ = range_ 

3169 if field is not None: 

3170 self.field = field 

3171 

3172 # Trailing underscore to prevent conflict with built-in range() function. 

3173 @property 

3174 def range_(self): 

3175 """google.cloud.bigquery.table.PartitionRange: Defines the 

3176 ranges for range partitioning. 

3177 

3178 Raises: 

3179 ValueError: 

3180 If the value is not a :class:`PartitionRange`. 

3181 """ 

3182 range_properties = self._properties.setdefault("range", {}) 

3183 return PartitionRange(_properties=range_properties) 

3184 

3185 @range_.setter 

3186 def range_(self, value): 

3187 if not isinstance(value, PartitionRange): 

3188 raise ValueError("Expected a PartitionRange, but got {}.".format(value)) 

3189 self._properties["range"] = value._properties 

3190 

3191 @property 

3192 def field(self): 

3193 """str: The table is partitioned by this field. 

3194 

3195 The field must be a top-level ``NULLABLE`` / ``REQUIRED`` field. The 

3196 only supported type is ``INTEGER`` / ``INT64``. 

3197 """ 

3198 return self._properties.get("field") 

3199 

3200 @field.setter 

3201 def field(self, value): 

3202 self._properties["field"] = value 

3203 

3204 def _key(self): 

3205 return (("field", self.field), ("range_", self.range_)) 

3206 

3207 def __eq__(self, other): 

3208 if not isinstance(other, RangePartitioning): 

3209 return NotImplemented 

3210 return self._key() == other._key() 

3211 

3212 def __ne__(self, other): 

3213 return not self == other 

3214 

3215 def __repr__(self): 

3216 key_vals = ["{}={}".format(key, repr(val)) for key, val in self._key()] 

3217 return "RangePartitioning({})".format(", ".join(key_vals)) 

3218 

3219 

3220class TimePartitioningType(object): 

3221 """Specifies the type of time partitioning to perform.""" 

3222 

3223 DAY = "DAY" 

3224 """str: Generates one partition per day.""" 

3225 

3226 HOUR = "HOUR" 

3227 """str: Generates one partition per hour.""" 

3228 

3229 MONTH = "MONTH" 

3230 """str: Generates one partition per month.""" 

3231 

3232 YEAR = "YEAR" 

3233 """str: Generates one partition per year.""" 

3234 

3235 

3236class TimePartitioning(object): 

3237 """Configures time-based partitioning for a table. 

3238 

3239 Args: 

3240 type_ (Optional[google.cloud.bigquery.table.TimePartitioningType]): 

3241 Specifies the type of time partitioning to perform. Defaults to 

3242 :attr:`~google.cloud.bigquery.table.TimePartitioningType.DAY`. 

3243 

3244 Supported values are: 

3245 

3246 * :attr:`~google.cloud.bigquery.table.TimePartitioningType.HOUR` 

3247 * :attr:`~google.cloud.bigquery.table.TimePartitioningType.DAY` 

3248 * :attr:`~google.cloud.bigquery.table.TimePartitioningType.MONTH` 

3249 * :attr:`~google.cloud.bigquery.table.TimePartitioningType.YEAR` 

3250 

3251 field (Optional[str]): 

3252 If set, the table is partitioned by this field. If not set, the 

3253 table is partitioned by pseudo column ``_PARTITIONTIME``. The field 

3254 must be a top-level ``TIMESTAMP``, ``DATETIME``, or ``DATE`` 

3255 field. Its mode must be ``NULLABLE`` or ``REQUIRED``. 

3256 

3257 See the `time-unit column-partitioned tables guide 

3258 <https://cloud.google.com/bigquery/docs/creating-column-partitions>`_ 

3259 in the BigQuery documentation. 

3260 expiration_ms(Optional[int]): 

3261 Number of milliseconds for which to keep the storage for a 

3262 partition. 

3263 require_partition_filter (Optional[bool]): 

3264 DEPRECATED: Use 

3265 :attr:`~google.cloud.bigquery.table.Table.require_partition_filter`, 

3266 instead. 

3267 """ 

3268 

3269 def __init__( 

3270 self, type_=None, field=None, expiration_ms=None, require_partition_filter=None 

3271 ) -> None: 

3272 self._properties: Dict[str, Any] = {} 

3273 if type_ is None: 

3274 self.type_ = TimePartitioningType.DAY 

3275 else: 

3276 self.type_ = type_ 

3277 if field is not None: 

3278 self.field = field 

3279 if expiration_ms is not None: 

3280 self.expiration_ms = expiration_ms 

3281 if require_partition_filter is not None: 

3282 self.require_partition_filter = require_partition_filter 

3283 

3284 @property 

3285 def type_(self): 

3286 """google.cloud.bigquery.table.TimePartitioningType: The type of time 

3287 partitioning to use. 

3288 """ 

3289 return self._properties.get("type") 

3290 

3291 @type_.setter 

3292 def type_(self, value): 

3293 self._properties["type"] = value 

3294 

3295 @property 

3296 def field(self): 

3297 """str: Field in the table to use for partitioning""" 

3298 return self._properties.get("field") 

3299 

3300 @field.setter 

3301 def field(self, value): 

3302 self._properties["field"] = value 

3303 

3304 @property 

3305 def expiration_ms(self): 

3306 """int: Number of milliseconds to keep the storage for a partition.""" 

3307 return _helpers._int_or_none(self._properties.get("expirationMs")) 

3308 

3309 @expiration_ms.setter 

3310 def expiration_ms(self, value): 

3311 if value is not None: 

3312 # Allow explicitly setting the expiration to None. 

3313 value = str(value) 

3314 self._properties["expirationMs"] = value 

3315 

3316 @property 

3317 def require_partition_filter(self): 

3318 """bool: Specifies whether partition filters are required for queries 

3319 

3320 DEPRECATED: Use 

3321 :attr:`~google.cloud.bigquery.table.Table.require_partition_filter`, 

3322 instead. 

3323 """ 

3324 warnings.warn( 

3325 ( 

3326 "TimePartitioning.require_partition_filter will be removed in " 

3327 "future versions. Please use Table.require_partition_filter " 

3328 "instead." 

3329 ), 

3330 PendingDeprecationWarning, 

3331 stacklevel=2, 

3332 ) 

3333 return self._properties.get("requirePartitionFilter") 

3334 

3335 @require_partition_filter.setter 

3336 def require_partition_filter(self, value): 

3337 warnings.warn( 

3338 ( 

3339 "TimePartitioning.require_partition_filter will be removed in " 

3340 "future versions. Please use Table.require_partition_filter " 

3341 "instead." 

3342 ), 

3343 PendingDeprecationWarning, 

3344 stacklevel=2, 

3345 ) 

3346 self._properties["requirePartitionFilter"] = value 

3347 

3348 @classmethod 

3349 def from_api_repr(cls, api_repr: dict) -> "TimePartitioning": 

3350 """Return a :class:`TimePartitioning` object deserialized from a dict. 

3351 

3352 This method creates a new ``TimePartitioning`` instance that points to 

3353 the ``api_repr`` parameter as its internal properties dict. This means 

3354 that when a ``TimePartitioning`` instance is stored as a property of 

3355 another object, any changes made at the higher level will also appear 

3356 here:: 

3357 

3358 >>> time_partitioning = TimePartitioning() 

3359 >>> table.time_partitioning = time_partitioning 

3360 >>> table.time_partitioning.field = 'timecolumn' 

3361 >>> time_partitioning.field 

3362 'timecolumn' 

3363 

3364 Args: 

3365 api_repr (Mapping[str, str]): 

3366 The serialized representation of the TimePartitioning, such as 

3367 what is output by :meth:`to_api_repr`. 

3368 

3369 Returns: 

3370 google.cloud.bigquery.table.TimePartitioning: 

3371 The ``TimePartitioning`` object. 

3372 """ 

3373 instance = cls() 

3374 instance._properties = api_repr 

3375 return instance 

3376 

3377 def to_api_repr(self) -> dict: 

3378 """Return a dictionary representing this object. 

3379 

3380 This method returns the properties dict of the ``TimePartitioning`` 

3381 instance rather than making a copy. This means that when a 

3382 ``TimePartitioning`` instance is stored as a property of another 

3383 object, any changes made at the higher level will also appear here. 

3384 

3385 Returns: 

3386 dict: 

3387 A dictionary representing the TimePartitioning object in 

3388 serialized form. 

3389 """ 

3390 return self._properties 

3391 

3392 def _key(self): 

3393 # because we are only "renaming" top level keys shallow copy is sufficient here. 

3394 properties = self._properties.copy() 

3395 # calling repr for non built-in type objects. 

3396 properties["type_"] = repr(properties.pop("type")) 

3397 if "field" in properties: 

3398 # calling repr for non built-in type objects. 

3399 properties["field"] = repr(properties["field"]) 

3400 if "requirePartitionFilter" in properties: 

3401 properties["require_partition_filter"] = properties.pop( 

3402 "requirePartitionFilter" 

3403 ) 

3404 if "expirationMs" in properties: 

3405 properties["expiration_ms"] = properties.pop("expirationMs") 

3406 return tuple(sorted(properties.items())) 

3407 

3408 def __eq__(self, other): 

3409 if not isinstance(other, TimePartitioning): 

3410 return NotImplemented 

3411 return self._key() == other._key() 

3412 

3413 def __ne__(self, other): 

3414 return not self == other 

3415 

3416 def __hash__(self): 

3417 return hash(self._key()) 

3418 

3419 def __repr__(self): 

3420 key_vals = ["{}={}".format(key, val) for key, val in self._key()] 

3421 return "TimePartitioning({})".format(",".join(key_vals)) 

3422 

3423 

3424class PrimaryKey: 

3425 """Represents the primary key constraint on a table's columns. 

3426 

3427 Args: 

3428 columns: The columns that are composed of the primary key constraint. 

3429 """ 

3430 

3431 def __init__(self, columns: List[str]): 

3432 self.columns = columns 

3433 

3434 def __eq__(self, other): 

3435 if not isinstance(other, PrimaryKey): 

3436 raise TypeError("The value provided is not a BigQuery PrimaryKey.") 

3437 return self.columns == other.columns 

3438 

3439 

3440class ColumnReference: 

3441 """The pair of the foreign key column and primary key column. 

3442 

3443 Args: 

3444 referencing_column: The column that composes the foreign key. 

3445 referenced_column: The column in the primary key that are referenced by the referencingColumn. 

3446 """ 

3447 

3448 def __init__(self, referencing_column: str, referenced_column: str): 

3449 self.referencing_column = referencing_column 

3450 self.referenced_column = referenced_column 

3451 

3452 def __eq__(self, other): 

3453 if not isinstance(other, ColumnReference): 

3454 raise TypeError("The value provided is not a BigQuery ColumnReference.") 

3455 return ( 

3456 self.referencing_column == other.referencing_column 

3457 and self.referenced_column == other.referenced_column 

3458 ) 

3459 

3460 

3461class ForeignKey: 

3462 """Represents a foreign key constraint on a table's columns. 

3463 

3464 Args: 

3465 name: Set only if the foreign key constraint is named. 

3466 referenced_table: The table that holds the primary key and is referenced by this foreign key. 

3467 column_references: The columns that compose the foreign key. 

3468 """ 

3469 

3470 def __init__( 

3471 self, 

3472 name: str, 

3473 referenced_table: TableReference, 

3474 column_references: List[ColumnReference], 

3475 ): 

3476 self.name = name 

3477 self.referenced_table = referenced_table 

3478 self.column_references = column_references 

3479 

3480 def __eq__(self, other): 

3481 if not isinstance(other, ForeignKey): 

3482 raise TypeError("The value provided is not a BigQuery ForeignKey.") 

3483 return ( 

3484 self.name == other.name 

3485 and self.referenced_table == other.referenced_table 

3486 and self.column_references == other.column_references 

3487 ) 

3488 

3489 @classmethod 

3490 def from_api_repr(cls, api_repr: Dict[str, Any]) -> "ForeignKey": 

3491 """Create an instance from API representation.""" 

3492 return cls( 

3493 name=api_repr["name"], 

3494 referenced_table=TableReference.from_api_repr(api_repr["referencedTable"]), 

3495 column_references=[ 

3496 ColumnReference( 

3497 column_reference_resource["referencingColumn"], 

3498 column_reference_resource["referencedColumn"], 

3499 ) 

3500 for column_reference_resource in api_repr["columnReferences"] 

3501 ], 

3502 ) 

3503 

3504 def to_api_repr(self) -> Dict[str, Any]: 

3505 """Return a dictionary representing this object.""" 

3506 return { 

3507 "name": self.name, 

3508 "referencedTable": self.referenced_table.to_api_repr(), 

3509 "columnReferences": [ 

3510 { 

3511 "referencingColumn": column_reference.referencing_column, 

3512 "referencedColumn": column_reference.referenced_column, 

3513 } 

3514 for column_reference in self.column_references 

3515 ], 

3516 } 

3517 

3518 

3519class TableConstraints: 

3520 """The TableConstraints defines the primary key and foreign key. 

3521 

3522 Args: 

3523 primary_key: 

3524 Represents a primary key constraint on a table's columns. Present only if the table 

3525 has a primary key. The primary key is not enforced. 

3526 foreign_keys: 

3527 Present only if the table has a foreign key. The foreign key is not enforced. 

3528 

3529 """ 

3530 

3531 def __init__( 

3532 self, 

3533 primary_key: Optional[PrimaryKey], 

3534 foreign_keys: Optional[List[ForeignKey]], 

3535 ): 

3536 self.primary_key = primary_key 

3537 self.foreign_keys = foreign_keys 

3538 

3539 def __eq__(self, other): 

3540 if not isinstance(other, TableConstraints) and other is not None: 

3541 raise TypeError("The value provided is not a BigQuery TableConstraints.") 

3542 return ( 

3543 self.primary_key == other.primary_key if other.primary_key else None 

3544 ) and (self.foreign_keys == other.foreign_keys if other.foreign_keys else None) 

3545 

3546 @classmethod 

3547 def from_api_repr(cls, resource: Dict[str, Any]) -> "TableConstraints": 

3548 """Create an instance from API representation.""" 

3549 primary_key = None 

3550 if "primaryKey" in resource: 

3551 primary_key = PrimaryKey(resource["primaryKey"]["columns"]) 

3552 

3553 foreign_keys = None 

3554 if "foreignKeys" in resource: 

3555 foreign_keys = [ 

3556 ForeignKey.from_api_repr(foreign_key_resource) 

3557 for foreign_key_resource in resource["foreignKeys"] 

3558 ] 

3559 return cls(primary_key, foreign_keys) 

3560 

3561 def to_api_repr(self) -> Dict[str, Any]: 

3562 """Return a dictionary representing this object.""" 

3563 resource: Dict[str, Any] = {} 

3564 if self.primary_key: 

3565 resource["primaryKey"] = {"columns": self.primary_key.columns} 

3566 if self.foreign_keys: 

3567 resource["foreignKeys"] = [ 

3568 foreign_key.to_api_repr() for foreign_key in self.foreign_keys 

3569 ] 

3570 return resource 

3571 

3572 

3573class BigLakeConfiguration(object): 

3574 """Configuration for managed tables for Apache Iceberg, formerly 

3575 known as BigLake. 

3576 

3577 Args: 

3578 connection_id (Optional[str]): 

3579 The connection specifying the credentials to be used to read and write to external 

3580 storage, such as Cloud Storage. The connection_id can have the form 

3581 ``{project}.{location}.{connection_id}`` or 

3582 ``projects/{project}/locations/{location}/connections/{connection_id}``. 

3583 storage_uri (Optional[str]): 

3584 The fully qualified location prefix of the external folder where table data is 

3585 stored. The '*' wildcard character is not allowed. The URI should be in the 

3586 format ``gs://bucket/path_to_table/``. 

3587 file_format (Optional[str]): 

3588 The file format the table data is stored in. See BigLakeFileFormat for available 

3589 values. 

3590 table_format (Optional[str]): 

3591 The table format the metadata only snapshots are stored in. See BigLakeTableFormat 

3592 for available values. 

3593 _properties (Optional[dict]): 

3594 Private. Used to construct object from API resource. 

3595 """ 

3596 

3597 def __init__( 

3598 self, 

3599 connection_id: Optional[str] = None, 

3600 storage_uri: Optional[str] = None, 

3601 file_format: Optional[str] = None, 

3602 table_format: Optional[str] = None, 

3603 _properties: Optional[dict] = None, 

3604 ) -> None: 

3605 if _properties is None: 

3606 _properties = {} 

3607 self._properties = _properties 

3608 if connection_id is not None: 

3609 self.connection_id = connection_id 

3610 if storage_uri is not None: 

3611 self.storage_uri = storage_uri 

3612 if file_format is not None: 

3613 self.file_format = file_format 

3614 if table_format is not None: 

3615 self.table_format = table_format 

3616 

3617 @property 

3618 def connection_id(self) -> Optional[str]: 

3619 """str: The connection specifying the credentials to be used to read and write to external 

3620 storage, such as Cloud Storage.""" 

3621 return self._properties.get("connectionId") 

3622 

3623 @connection_id.setter 

3624 def connection_id(self, value: Optional[str]): 

3625 self._properties["connectionId"] = value 

3626 

3627 @property 

3628 def storage_uri(self) -> Optional[str]: 

3629 """str: The fully qualified location prefix of the external folder where table data is 

3630 stored.""" 

3631 return self._properties.get("storageUri") 

3632 

3633 @storage_uri.setter 

3634 def storage_uri(self, value: Optional[str]): 

3635 self._properties["storageUri"] = value 

3636 

3637 @property 

3638 def file_format(self) -> Optional[str]: 

3639 """str: The file format the table data is stored in. See BigLakeFileFormat for available 

3640 values.""" 

3641 return self._properties.get("fileFormat") 

3642 

3643 @file_format.setter 

3644 def file_format(self, value: Optional[str]): 

3645 self._properties["fileFormat"] = value 

3646 

3647 @property 

3648 def table_format(self) -> Optional[str]: 

3649 """str: The table format the metadata only snapshots are stored in. See BigLakeTableFormat 

3650 for available values.""" 

3651 return self._properties.get("tableFormat") 

3652 

3653 @table_format.setter 

3654 def table_format(self, value: Optional[str]): 

3655 self._properties["tableFormat"] = value 

3656 

3657 def _key(self): 

3658 return tuple(sorted(self._properties.items())) 

3659 

3660 def __eq__(self, other): 

3661 if not isinstance(other, BigLakeConfiguration): 

3662 return NotImplemented 

3663 return self._key() == other._key() 

3664 

3665 def __ne__(self, other): 

3666 return not self == other 

3667 

3668 def __hash__(self): 

3669 return hash(self._key()) 

3670 

3671 def __repr__(self): 

3672 key_vals = ["{}={}".format(key, val) for key, val in self._key()] 

3673 return "BigLakeConfiguration({})".format(",".join(key_vals)) 

3674 

3675 @classmethod 

3676 def from_api_repr(cls, resource: Dict[str, Any]) -> "BigLakeConfiguration": 

3677 """Factory: construct a BigLakeConfiguration given its API representation. 

3678 

3679 Args: 

3680 resource: 

3681 BigLakeConfiguration representation returned from the API 

3682 

3683 Returns: 

3684 BigLakeConfiguration parsed from ``resource``. 

3685 """ 

3686 ref = cls() 

3687 ref._properties = resource 

3688 return ref 

3689 

3690 def to_api_repr(self) -> Dict[str, Any]: 

3691 """Construct the API resource representation of this BigLakeConfiguration. 

3692 

3693 Returns: 

3694 BigLakeConfiguration represented as an API resource. 

3695 """ 

3696 return copy.deepcopy(self._properties) 

3697 

3698 

3699def _item_to_row(iterator, resource): 

3700 """Convert a JSON row to the native object. 

3701 

3702 .. note:: 

3703 

3704 This assumes that the ``schema`` attribute has been 

3705 added to the iterator after being created, which 

3706 should be done by the caller. 

3707 

3708 Args: 

3709 iterator (google.api_core.page_iterator.Iterator): The iterator that is currently in use. 

3710 resource (Dict): An item to be converted to a row. 

3711 

3712 Returns: 

3713 google.cloud.bigquery.table.Row: The next row in the page. 

3714 """ 

3715 return Row( 

3716 _helpers._row_tuple_from_json(resource, iterator.schema), 

3717 iterator._field_to_index, 

3718 ) 

3719 

3720 

3721def _row_iterator_page_columns(schema, response): 

3722 """Make a generator of all the columns in a page from tabledata.list. 

3723 

3724 This enables creating a :class:`pandas.DataFrame` and other 

3725 column-oriented data structures such as :class:`pyarrow.RecordBatch` 

3726 """ 

3727 columns = [] 

3728 rows = response.get("rows", []) 

3729 

3730 def get_column_data(field_index, field): 

3731 for row in rows: 

3732 yield _helpers.DATA_FRAME_CELL_DATA_PARSER.to_py( 

3733 row["f"][field_index]["v"], field 

3734 ) 

3735 

3736 for field_index, field in enumerate(schema): 

3737 columns.append(get_column_data(field_index, field)) 

3738 

3739 return columns 

3740 

3741 

3742# pylint: disable=unused-argument 

3743def _rows_page_start(iterator, page, response): 

3744 """Grab total rows when :class:`~google.cloud.iterator.Page` starts. 

3745 

3746 Args: 

3747 iterator (google.api_core.page_iterator.Iterator): The iterator that is currently in use. 

3748 page (google.api_core.page_iterator.Page): The page that was just created. 

3749 response (Dict): The JSON API response for a page of rows in a table. 

3750 """ 

3751 # Make a (lazy) copy of the page in column-oriented format for use in data 

3752 # science packages. 

3753 page._columns = _row_iterator_page_columns(iterator._schema, response) 

3754 

3755 total_rows = response.get("totalRows") 

3756 # Don't reset total_rows if it's not present in the next API response. 

3757 if total_rows is not None: 

3758 iterator._total_rows = int(total_rows) 

3759 

3760 

3761# pylint: enable=unused-argument 

3762 

3763 

3764def _table_arg_to_table_ref(value, default_project=None) -> TableReference: 

3765 """Helper to convert a string or Table to TableReference. 

3766 

3767 This function keeps TableReference and other kinds of objects unchanged. 

3768 """ 

3769 if isinstance(value, str): 

3770 value = TableReference.from_string(value, default_project=default_project) 

3771 if isinstance(value, (Table, TableListItem)): 

3772 value = value.reference 

3773 return value 

3774 

3775 

3776def _table_arg_to_table(value, default_project=None) -> Table: 

3777 """Helper to convert a string or TableReference to a Table. 

3778 

3779 This function keeps Table and other kinds of objects unchanged. 

3780 """ 

3781 if isinstance(value, str): 

3782 value = TableReference.from_string(value, default_project=default_project) 

3783 if isinstance(value, TableReference): 

3784 value = Table(value) 

3785 if isinstance(value, TableListItem): 

3786 newvalue = Table(value.reference) 

3787 newvalue._properties = value._properties 

3788 value = newvalue 

3789 

3790 return value