1# Copyright 2015 Google LLC
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14
15"""Define API Tables."""
16
17from __future__ import absolute_import
18
19import copy
20import datetime
21import functools
22import operator
23import typing
24from typing import Any, Dict, Iterable, Iterator, List, Optional, Tuple, Union, Sequence
25
26import warnings
27
28try:
29 import pandas # type: ignore
30except ImportError:
31 pandas = None
32
33try:
34 import pyarrow # type: ignore
35except ImportError:
36 pyarrow = None
37
38try:
39 import db_dtypes # type: ignore
40except ImportError:
41 db_dtypes = None
42
43try:
44 import geopandas # type: ignore
45except ImportError:
46 geopandas = None
47finally:
48 _COORDINATE_REFERENCE_SYSTEM = "EPSG:4326"
49
50try:
51 import shapely # type: ignore
52 from shapely import wkt # type: ignore
53except ImportError:
54 shapely = None
55else:
56 _read_wkt = wkt.loads
57
58import google.api_core.exceptions
59from google.api_core.page_iterator import HTTPIterator
60
61import google.cloud._helpers # type: ignore
62from google.cloud.bigquery import _helpers
63from google.cloud.bigquery import _pandas_helpers
64from google.cloud.bigquery import _versions_helpers
65from google.cloud.bigquery import exceptions as bq_exceptions
66from google.cloud.bigquery._tqdm_helpers import get_progress_bar
67from google.cloud.bigquery.encryption_configuration import EncryptionConfiguration
68from google.cloud.bigquery.enums import DefaultPandasDTypes
69from google.cloud.bigquery.external_config import ExternalConfig
70from google.cloud.bigquery import schema as _schema
71from google.cloud.bigquery.schema import _build_schema_resource
72from google.cloud.bigquery.schema import _parse_schema_resource
73from google.cloud.bigquery.schema import _to_schema_fields
74from google.cloud.bigquery import external_config
75
76if typing.TYPE_CHECKING: # pragma: NO COVER
77 # Unconditionally import optional dependencies again to tell pytype that
78 # they are not None, avoiding false "no attribute" errors.
79 import pandas
80 import pyarrow
81 import geopandas # type: ignore
82 from google.cloud import bigquery_storage # type: ignore
83 from google.cloud.bigquery.dataset import DatasetReference
84
85
86_NO_GEOPANDAS_ERROR = (
87 "The geopandas library is not installed, please install "
88 "geopandas to use the to_geodataframe() function."
89)
90_NO_PYARROW_ERROR = (
91 "The pyarrow library is not installed, please install "
92 "pyarrow to use the to_arrow() function."
93)
94_NO_SHAPELY_ERROR = (
95 "The shapely library is not installed, please install "
96 "shapely to use the geography_as_object option."
97)
98
99_TABLE_HAS_NO_SCHEMA = 'Table has no schema: call "client.get_table()"'
100
101_NO_SUPPORTED_DTYPE = (
102 "The dtype cannot to be converted to a pandas ExtensionArray "
103 "because the necessary `__from_arrow__` attribute is missing."
104)
105
106_RANGE_PYARROW_WARNING = (
107 "Unable to represent RANGE schema as struct using pandas ArrowDtype. Using "
108 "`object` instead. To use ArrowDtype, use pandas >= 1.5 and "
109 "pyarrow >= 10.0.1."
110)
111
112# How many of the total rows need to be downloaded already for us to skip
113# calling the BQ Storage API?
114#
115# In microbenchmarks on 2024-05-21, I (tswast@) measure that at about 2 MB of
116# remaining results, it's faster to use the BQ Storage Read API to download
117# the results than use jobs.getQueryResults. Since we don't have a good way to
118# know the remaining bytes, we estimate by remaining number of rows.
119#
120# Except when rows themselves are larger, I observe that the a single page of
121# results will be around 10 MB. Therefore, the proportion of rows already
122# downloaded should be 10 (first page) / 12 (all results) or less for it to be
123# worth it to make a call to jobs.getQueryResults.
124ALMOST_COMPLETELY_CACHED_RATIO = 0.833333
125
126
127def _reference_getter(table):
128 """A :class:`~google.cloud.bigquery.table.TableReference` pointing to
129 this table.
130
131 Returns:
132 google.cloud.bigquery.table.TableReference: pointer to this table.
133 """
134 from google.cloud.bigquery import dataset
135
136 dataset_ref = dataset.DatasetReference(table.project, table.dataset_id)
137 return TableReference(dataset_ref, table.table_id)
138
139
140def _view_use_legacy_sql_getter(
141 table: Union["Table", "TableListItem"]
142) -> Optional[bool]:
143 """bool: Specifies whether to execute the view with Legacy or Standard SQL.
144
145 This boolean specifies whether to execute the view with Legacy SQL
146 (:data:`True`) or Standard SQL (:data:`False`). The client side default is
147 :data:`False`. The server-side default is :data:`True`. If this table is
148 not a view, :data:`None` is returned.
149
150 Raises:
151 ValueError: For invalid value types.
152 """
153
154 view: Optional[Dict[str, Any]] = table._properties.get("view")
155 if view is not None:
156 # The server-side default for useLegacySql is True.
157 return view.get("useLegacySql", True) if view is not None else True
158 # In some cases, such as in a table list no view object is present, but the
159 # resource still represents a view. Use the type as a fallback.
160 if table.table_type == "VIEW":
161 # The server-side default for useLegacySql is True.
162 return True
163 return None # explicit return statement to appease mypy
164
165
166class _TableBase:
167 """Base class for Table-related classes with common functionality."""
168
169 _PROPERTY_TO_API_FIELD: Dict[str, Union[str, List[str]]] = {
170 "dataset_id": ["tableReference", "datasetId"],
171 "project": ["tableReference", "projectId"],
172 "table_id": ["tableReference", "tableId"],
173 }
174
175 def __init__(self):
176 self._properties = {}
177
178 @property
179 def project(self) -> str:
180 """Project bound to the table."""
181 return _helpers._get_sub_prop(
182 self._properties, self._PROPERTY_TO_API_FIELD["project"]
183 )
184
185 @property
186 def dataset_id(self) -> str:
187 """ID of dataset containing the table."""
188 return _helpers._get_sub_prop(
189 self._properties, self._PROPERTY_TO_API_FIELD["dataset_id"]
190 )
191
192 @property
193 def table_id(self) -> str:
194 """The table ID."""
195 return _helpers._get_sub_prop(
196 self._properties, self._PROPERTY_TO_API_FIELD["table_id"]
197 )
198
199 @property
200 def path(self) -> str:
201 """URL path for the table's APIs."""
202 return (
203 f"/projects/{self.project}/datasets/{self.dataset_id}"
204 f"/tables/{self.table_id}"
205 )
206
207 def __eq__(self, other):
208 if isinstance(other, _TableBase):
209 return (
210 self.project == other.project
211 and self.dataset_id == other.dataset_id
212 and self.table_id == other.table_id
213 )
214 else:
215 return NotImplemented
216
217 def __hash__(self):
218 return hash((self.project, self.dataset_id, self.table_id))
219
220
221class TableReference(_TableBase):
222 """TableReferences are pointers to tables.
223
224 See
225 https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#tablereference
226
227 Args:
228 dataset_ref: A pointer to the dataset
229 table_id: The ID of the table
230 """
231
232 _PROPERTY_TO_API_FIELD = {
233 "dataset_id": "datasetId",
234 "project": "projectId",
235 "table_id": "tableId",
236 }
237
238 def __init__(self, dataset_ref: "DatasetReference", table_id: str):
239 self._properties = {}
240
241 _helpers._set_sub_prop(
242 self._properties,
243 self._PROPERTY_TO_API_FIELD["project"],
244 dataset_ref.project,
245 )
246 _helpers._set_sub_prop(
247 self._properties,
248 self._PROPERTY_TO_API_FIELD["dataset_id"],
249 dataset_ref.dataset_id,
250 )
251 _helpers._set_sub_prop(
252 self._properties,
253 self._PROPERTY_TO_API_FIELD["table_id"],
254 table_id,
255 )
256
257 @classmethod
258 def from_string(
259 cls, table_id: str, default_project: Optional[str] = None
260 ) -> "TableReference":
261 """Construct a table reference from table ID string.
262
263 Args:
264 table_id (str):
265 A table ID in standard SQL format. If ``default_project``
266 is not specified, this must included a project ID, dataset
267 ID, and table ID, each separated by ``.``.
268 default_project (Optional[str]):
269 The project ID to use when ``table_id`` does not
270 include a project ID.
271
272 Returns:
273 TableReference: Table reference parsed from ``table_id``.
274
275 Examples:
276 >>> TableReference.from_string('my-project.mydataset.mytable')
277 TableRef...(DatasetRef...('my-project', 'mydataset'), 'mytable')
278
279 Raises:
280 ValueError:
281 If ``table_id`` is not a fully-qualified table ID in
282 standard SQL format.
283 """
284 from google.cloud.bigquery.dataset import DatasetReference
285
286 (
287 output_project_id,
288 output_dataset_id,
289 output_table_id,
290 ) = _helpers._parse_3_part_id(
291 table_id, default_project=default_project, property_name="table_id"
292 )
293
294 return cls(
295 DatasetReference(output_project_id, output_dataset_id), output_table_id
296 )
297
298 @classmethod
299 def from_api_repr(cls, resource: dict) -> "TableReference":
300 """Factory: construct a table reference given its API representation
301
302 Args:
303 resource (Dict[str, object]):
304 Table reference representation returned from the API
305
306 Returns:
307 google.cloud.bigquery.table.TableReference:
308 Table reference parsed from ``resource``.
309 """
310 from google.cloud.bigquery.dataset import DatasetReference
311
312 project = resource["projectId"]
313 dataset_id = resource["datasetId"]
314 table_id = resource["tableId"]
315
316 return cls(DatasetReference(project, dataset_id), table_id)
317
318 def to_api_repr(self) -> dict:
319 """Construct the API resource representation of this table reference.
320
321 Returns:
322 Dict[str, object]: Table reference represented as an API resource
323 """
324 return copy.deepcopy(self._properties)
325
326 def to_bqstorage(self) -> str:
327 """Construct a BigQuery Storage API representation of this table.
328
329 Install the ``google-cloud-bigquery-storage`` package to use this
330 feature.
331
332 If the ``table_id`` contains a partition identifier (e.g.
333 ``my_table$201812``) or a snapshot identifier (e.g.
334 ``mytable@1234567890``), it is ignored. Use
335 :class:`google.cloud.bigquery_storage.types.ReadSession.TableReadOptions`
336 to filter rows by partition. Use
337 :class:`google.cloud.bigquery_storage.types.ReadSession.TableModifiers`
338 to select a specific snapshot to read from.
339
340 Returns:
341 str: A reference to this table in the BigQuery Storage API.
342 """
343
344 table_id, _, _ = self.table_id.partition("@")
345 table_id, _, _ = table_id.partition("$")
346
347 table_ref = (
348 f"projects/{self.project}/datasets/{self.dataset_id}/tables/{table_id}"
349 )
350 return table_ref
351
352 def __str__(self):
353 return f"{self.project}.{self.dataset_id}.{self.table_id}"
354
355 def __repr__(self):
356 from google.cloud.bigquery.dataset import DatasetReference
357
358 dataset_ref = DatasetReference(self.project, self.dataset_id)
359 return f"TableReference({dataset_ref!r}, '{self.table_id}')"
360
361
362class Table(_TableBase):
363 """Tables represent a set of rows whose values correspond to a schema.
364
365 See
366 https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#resource-table
367
368 Args:
369 table_ref (Union[google.cloud.bigquery.table.TableReference, str]):
370 A pointer to a table. If ``table_ref`` is a string, it must
371 included a project ID, dataset ID, and table ID, each separated
372 by ``.``.
373 schema (Optional[Sequence[Union[ \
374 :class:`~google.cloud.bigquery.schema.SchemaField`, \
375 Mapping[str, Any] \
376 ]]]):
377 The table's schema. If any item is a mapping, its content must be
378 compatible with
379 :meth:`~google.cloud.bigquery.schema.SchemaField.from_api_repr`.
380 """
381
382 _PROPERTY_TO_API_FIELD: Dict[str, Any] = {
383 **_TableBase._PROPERTY_TO_API_FIELD,
384 "biglake_configuration": "biglakeConfiguration",
385 "clustering_fields": "clustering",
386 "created": "creationTime",
387 "description": "description",
388 "encryption_configuration": "encryptionConfiguration",
389 "etag": "etag",
390 "expires": "expirationTime",
391 "external_data_configuration": "externalDataConfiguration",
392 "friendly_name": "friendlyName",
393 "full_table_id": "id",
394 "labels": "labels",
395 "location": "location",
396 "modified": "lastModifiedTime",
397 "mview_enable_refresh": "materializedView",
398 "mview_last_refresh_time": ["materializedView", "lastRefreshTime"],
399 "mview_query": "materializedView",
400 "mview_refresh_interval": "materializedView",
401 "mview_allow_non_incremental_definition": "materializedView",
402 "num_bytes": "numBytes",
403 "num_rows": "numRows",
404 "partition_expiration": "timePartitioning",
405 "partitioning_type": "timePartitioning",
406 "range_partitioning": "rangePartitioning",
407 "time_partitioning": "timePartitioning",
408 "schema": ["schema", "fields"],
409 "snapshot_definition": "snapshotDefinition",
410 "clone_definition": "cloneDefinition",
411 "streaming_buffer": "streamingBuffer",
412 "self_link": "selfLink",
413 "type": "type",
414 "view_use_legacy_sql": "view",
415 "view_query": "view",
416 "require_partition_filter": "requirePartitionFilter",
417 "table_constraints": "tableConstraints",
418 "max_staleness": "maxStaleness",
419 "resource_tags": "resourceTags",
420 "external_catalog_table_options": "externalCatalogTableOptions",
421 "foreign_type_info": ["schema", "foreignTypeInfo"],
422 }
423
424 def __init__(self, table_ref, schema=None) -> None:
425 table_ref = _table_arg_to_table_ref(table_ref)
426 self._properties: Dict[str, Any] = {
427 "tableReference": table_ref.to_api_repr(),
428 "labels": {},
429 }
430 # Let the @property do validation.
431 if schema is not None:
432 self.schema = schema
433
434 reference = property(_reference_getter)
435
436 @property
437 def biglake_configuration(self):
438 """google.cloud.bigquery.table.BigLakeConfiguration: Configuration
439 for managed tables for Apache Iceberg.
440
441 See https://cloud.google.com/bigquery/docs/iceberg-tables for more information.
442 """
443 prop = self._properties.get(
444 self._PROPERTY_TO_API_FIELD["biglake_configuration"]
445 )
446 if prop is not None:
447 prop = BigLakeConfiguration.from_api_repr(prop)
448 return prop
449
450 @biglake_configuration.setter
451 def biglake_configuration(self, value):
452 api_repr = value
453 if value is not None:
454 api_repr = value.to_api_repr()
455 self._properties[
456 self._PROPERTY_TO_API_FIELD["biglake_configuration"]
457 ] = api_repr
458
459 @property
460 def require_partition_filter(self):
461 """bool: If set to true, queries over the partitioned table require a
462 partition filter that can be used for partition elimination to be
463 specified.
464 """
465 return self._properties.get(
466 self._PROPERTY_TO_API_FIELD["require_partition_filter"]
467 )
468
469 @require_partition_filter.setter
470 def require_partition_filter(self, value):
471 self._properties[
472 self._PROPERTY_TO_API_FIELD["require_partition_filter"]
473 ] = value
474
475 @property
476 def schema(self):
477 """Sequence[Union[ \
478 :class:`~google.cloud.bigquery.schema.SchemaField`, \
479 Mapping[str, Any] \
480 ]]:
481 Table's schema.
482
483 Raises:
484 Exception:
485 If ``schema`` is not a sequence, or if any item in the sequence
486 is not a :class:`~google.cloud.bigquery.schema.SchemaField`
487 instance or a compatible mapping representation of the field.
488
489 .. Note::
490 If you are referencing a schema for an external catalog table such
491 as a Hive table, it will also be necessary to populate the foreign_type_info
492 attribute. This is not necessary if defining the schema for a BigQuery table.
493
494 For details, see:
495 https://cloud.google.com/bigquery/docs/external-tables
496 https://cloud.google.com/bigquery/docs/datasets-intro#external_datasets
497
498 """
499 prop = _helpers._get_sub_prop(
500 self._properties, self._PROPERTY_TO_API_FIELD["schema"]
501 )
502 if not prop:
503 return []
504 else:
505 return _parse_schema_resource(prop)
506
507 @schema.setter
508 def schema(self, value):
509 api_field = self._PROPERTY_TO_API_FIELD["schema"]
510
511 if value is None:
512 _helpers._set_sub_prop(
513 self._properties,
514 api_field,
515 None,
516 )
517 elif isinstance(value, Sequence):
518 value = _to_schema_fields(value)
519 value = _build_schema_resource(value)
520 _helpers._set_sub_prop(
521 self._properties,
522 api_field,
523 value,
524 )
525 else:
526 raise TypeError("Schema must be a Sequence (e.g. a list) or None.")
527
528 @property
529 def labels(self):
530 """Dict[str, str]: Labels for the table.
531
532 This method always returns a dict. To change a table's labels,
533 modify the dict, then call ``Client.update_table``. To delete a
534 label, set its value to :data:`None` before updating.
535
536 Raises:
537 ValueError: If ``value`` type is invalid.
538 """
539 return self._properties.setdefault(self._PROPERTY_TO_API_FIELD["labels"], {})
540
541 @labels.setter
542 def labels(self, value):
543 if not isinstance(value, dict):
544 raise ValueError("Pass a dict")
545 self._properties[self._PROPERTY_TO_API_FIELD["labels"]] = value
546
547 @property
548 def encryption_configuration(self):
549 """google.cloud.bigquery.encryption_configuration.EncryptionConfiguration: Custom
550 encryption configuration for the table.
551
552 Custom encryption configuration (e.g., Cloud KMS keys) or :data:`None`
553 if using default encryption.
554
555 See `protecting data with Cloud KMS keys
556 <https://cloud.google.com/bigquery/docs/customer-managed-encryption>`_
557 in the BigQuery documentation.
558 """
559 prop = self._properties.get(
560 self._PROPERTY_TO_API_FIELD["encryption_configuration"]
561 )
562 if prop is not None:
563 prop = EncryptionConfiguration.from_api_repr(prop)
564 return prop
565
566 @encryption_configuration.setter
567 def encryption_configuration(self, value):
568 api_repr = value
569 if value is not None:
570 api_repr = value.to_api_repr()
571 self._properties[
572 self._PROPERTY_TO_API_FIELD["encryption_configuration"]
573 ] = api_repr
574
575 @property
576 def created(self):
577 """Union[datetime.datetime, None]: Datetime at which the table was
578 created (:data:`None` until set from the server).
579 """
580 creation_time = self._properties.get(self._PROPERTY_TO_API_FIELD["created"])
581 if creation_time is not None:
582 # creation_time will be in milliseconds.
583 return google.cloud._helpers._datetime_from_microseconds(
584 1000.0 * float(creation_time)
585 )
586
587 @property
588 def etag(self):
589 """Union[str, None]: ETag for the table resource (:data:`None` until
590 set from the server).
591 """
592 return self._properties.get(self._PROPERTY_TO_API_FIELD["etag"])
593
594 @property
595 def modified(self):
596 """Union[datetime.datetime, None]: Datetime at which the table was last
597 modified (:data:`None` until set from the server).
598 """
599 modified_time = self._properties.get(self._PROPERTY_TO_API_FIELD["modified"])
600 if modified_time is not None:
601 # modified_time will be in milliseconds.
602 return google.cloud._helpers._datetime_from_microseconds(
603 1000.0 * float(modified_time)
604 )
605
606 @property
607 def num_bytes(self):
608 """Union[int, None]: The size of the table in bytes (:data:`None` until
609 set from the server).
610 """
611 return _helpers._int_or_none(
612 self._properties.get(self._PROPERTY_TO_API_FIELD["num_bytes"])
613 )
614
615 @property
616 def num_rows(self):
617 """Union[int, None]: The number of rows in the table (:data:`None`
618 until set from the server).
619 """
620 return _helpers._int_or_none(
621 self._properties.get(self._PROPERTY_TO_API_FIELD["num_rows"])
622 )
623
624 @property
625 def self_link(self):
626 """Union[str, None]: URL for the table resource (:data:`None` until set
627 from the server).
628 """
629 return self._properties.get(self._PROPERTY_TO_API_FIELD["self_link"])
630
631 @property
632 def full_table_id(self):
633 """Union[str, None]: ID for the table (:data:`None` until set from the
634 server).
635
636 In the format ``project-id:dataset_id.table_id``.
637 """
638 return self._properties.get(self._PROPERTY_TO_API_FIELD["full_table_id"])
639
640 @property
641 def table_type(self):
642 """Union[str, None]: The type of the table (:data:`None` until set from
643 the server).
644
645 Possible values are ``'TABLE'``, ``'VIEW'``, ``'MATERIALIZED_VIEW'`` or
646 ``'EXTERNAL'``.
647 """
648 return self._properties.get(self._PROPERTY_TO_API_FIELD["type"])
649
650 @property
651 def range_partitioning(self):
652 """Optional[google.cloud.bigquery.table.RangePartitioning]:
653 Configures range-based partitioning for a table.
654
655 .. note::
656 **Beta**. The integer range partitioning feature is in a
657 pre-release state and might change or have limited support.
658
659 Only specify at most one of
660 :attr:`~google.cloud.bigquery.table.Table.time_partitioning` or
661 :attr:`~google.cloud.bigquery.table.Table.range_partitioning`.
662
663 Raises:
664 ValueError:
665 If the value is not
666 :class:`~google.cloud.bigquery.table.RangePartitioning` or
667 :data:`None`.
668 """
669 resource = self._properties.get(
670 self._PROPERTY_TO_API_FIELD["range_partitioning"]
671 )
672 if resource is not None:
673 return RangePartitioning(_properties=resource)
674
675 @range_partitioning.setter
676 def range_partitioning(self, value):
677 resource = value
678 if isinstance(value, RangePartitioning):
679 resource = value._properties
680 elif value is not None:
681 raise ValueError(
682 "Expected value to be RangePartitioning or None, got {}.".format(value)
683 )
684 self._properties[self._PROPERTY_TO_API_FIELD["range_partitioning"]] = resource
685
686 @property
687 def time_partitioning(self):
688 """Optional[google.cloud.bigquery.table.TimePartitioning]: Configures time-based
689 partitioning for a table.
690
691 Only specify at most one of
692 :attr:`~google.cloud.bigquery.table.Table.time_partitioning` or
693 :attr:`~google.cloud.bigquery.table.Table.range_partitioning`.
694
695 Raises:
696 ValueError:
697 If the value is not
698 :class:`~google.cloud.bigquery.table.TimePartitioning` or
699 :data:`None`.
700 """
701 prop = self._properties.get(self._PROPERTY_TO_API_FIELD["time_partitioning"])
702 if prop is not None:
703 return TimePartitioning.from_api_repr(prop)
704
705 @time_partitioning.setter
706 def time_partitioning(self, value):
707 api_repr = value
708 if isinstance(value, TimePartitioning):
709 api_repr = value.to_api_repr()
710 elif value is not None:
711 raise ValueError(
712 "value must be google.cloud.bigquery.table.TimePartitioning " "or None"
713 )
714 self._properties[self._PROPERTY_TO_API_FIELD["time_partitioning"]] = api_repr
715
716 @property
717 def partitioning_type(self):
718 """Union[str, None]: Time partitioning of the table if it is
719 partitioned (Defaults to :data:`None`).
720
721 """
722 warnings.warn(
723 "This method will be deprecated in future versions. Please use "
724 "Table.time_partitioning.type_ instead.",
725 PendingDeprecationWarning,
726 stacklevel=2,
727 )
728 if self.time_partitioning is not None:
729 return self.time_partitioning.type_
730
731 @partitioning_type.setter
732 def partitioning_type(self, value):
733 warnings.warn(
734 "This method will be deprecated in future versions. Please use "
735 "Table.time_partitioning.type_ instead.",
736 PendingDeprecationWarning,
737 stacklevel=2,
738 )
739 api_field = self._PROPERTY_TO_API_FIELD["partitioning_type"]
740 if self.time_partitioning is None:
741 self._properties[api_field] = {}
742 self._properties[api_field]["type"] = value
743
744 @property
745 def partition_expiration(self):
746 """Union[int, None]: Expiration time in milliseconds for a partition.
747
748 If :attr:`partition_expiration` is set and :attr:`type_` is
749 not set, :attr:`type_` will default to
750 :attr:`~google.cloud.bigquery.table.TimePartitioningType.DAY`.
751 """
752 warnings.warn(
753 "This method will be deprecated in future versions. Please use "
754 "Table.time_partitioning.expiration_ms instead.",
755 PendingDeprecationWarning,
756 stacklevel=2,
757 )
758 if self.time_partitioning is not None:
759 return self.time_partitioning.expiration_ms
760
761 @partition_expiration.setter
762 def partition_expiration(self, value):
763 warnings.warn(
764 "This method will be deprecated in future versions. Please use "
765 "Table.time_partitioning.expiration_ms instead.",
766 PendingDeprecationWarning,
767 stacklevel=2,
768 )
769 api_field = self._PROPERTY_TO_API_FIELD["partition_expiration"]
770
771 if self.time_partitioning is None:
772 self._properties[api_field] = {"type": TimePartitioningType.DAY}
773
774 if value is None:
775 self._properties[api_field]["expirationMs"] = None
776 else:
777 self._properties[api_field]["expirationMs"] = str(value)
778
779 @property
780 def clustering_fields(self):
781 """Union[List[str], None]: Fields defining clustering for the table
782
783 (Defaults to :data:`None`).
784
785 Clustering fields are immutable after table creation.
786
787 .. note::
788
789 BigQuery supports clustering for both partitioned and
790 non-partitioned tables.
791 """
792 prop = self._properties.get(self._PROPERTY_TO_API_FIELD["clustering_fields"])
793 if prop is not None:
794 return list(prop.get("fields", ()))
795
796 @clustering_fields.setter
797 def clustering_fields(self, value):
798 """Union[List[str], None]: Fields defining clustering for the table
799
800 (Defaults to :data:`None`).
801 """
802 api_field = self._PROPERTY_TO_API_FIELD["clustering_fields"]
803
804 if value is not None:
805 prop = self._properties.setdefault(api_field, {})
806 prop["fields"] = value
807 else:
808 # In order to allow unsetting clustering fields completely, we explicitly
809 # set this property to None (as oposed to merely removing the key).
810 self._properties[api_field] = None
811
812 @property
813 def description(self):
814 """Union[str, None]: Description of the table (defaults to
815 :data:`None`).
816
817 Raises:
818 ValueError: For invalid value types.
819 """
820 return self._properties.get(self._PROPERTY_TO_API_FIELD["description"])
821
822 @description.setter
823 def description(self, value):
824 if not isinstance(value, str) and value is not None:
825 raise ValueError("Pass a string, or None")
826 self._properties[self._PROPERTY_TO_API_FIELD["description"]] = value
827
828 @property
829 def expires(self):
830 """Union[datetime.datetime, None]: Datetime at which the table will be
831 deleted.
832
833 Raises:
834 ValueError: For invalid value types.
835 """
836 expiration_time = self._properties.get(self._PROPERTY_TO_API_FIELD["expires"])
837 if expiration_time is not None:
838 # expiration_time will be in milliseconds.
839 return google.cloud._helpers._datetime_from_microseconds(
840 1000.0 * float(expiration_time)
841 )
842
843 @expires.setter
844 def expires(self, value):
845 if not isinstance(value, datetime.datetime) and value is not None:
846 raise ValueError("Pass a datetime, or None")
847 value_ms = google.cloud._helpers._millis_from_datetime(value)
848 self._properties[
849 self._PROPERTY_TO_API_FIELD["expires"]
850 ] = _helpers._str_or_none(value_ms)
851
852 @property
853 def friendly_name(self):
854 """Union[str, None]: Title of the table (defaults to :data:`None`).
855
856 Raises:
857 ValueError: For invalid value types.
858 """
859 return self._properties.get(self._PROPERTY_TO_API_FIELD["friendly_name"])
860
861 @friendly_name.setter
862 def friendly_name(self, value):
863 if not isinstance(value, str) and value is not None:
864 raise ValueError("Pass a string, or None")
865 self._properties[self._PROPERTY_TO_API_FIELD["friendly_name"]] = value
866
867 @property
868 def location(self):
869 """Union[str, None]: Location in which the table is hosted
870
871 Defaults to :data:`None`.
872 """
873 return self._properties.get(self._PROPERTY_TO_API_FIELD["location"])
874
875 @property
876 def view_query(self):
877 """Union[str, None]: SQL query defining the table as a view (defaults
878 to :data:`None`).
879
880 By default, the query is treated as Standard SQL. To use Legacy
881 SQL, set :attr:`view_use_legacy_sql` to :data:`True`.
882
883 Raises:
884 ValueError: For invalid value types.
885 """
886 api_field = self._PROPERTY_TO_API_FIELD["view_query"]
887 return _helpers._get_sub_prop(self._properties, [api_field, "query"])
888
889 @view_query.setter
890 def view_query(self, value):
891 if not isinstance(value, str):
892 raise ValueError("Pass a string")
893
894 api_field = self._PROPERTY_TO_API_FIELD["view_query"]
895 _helpers._set_sub_prop(self._properties, [api_field, "query"], value)
896 view = self._properties[api_field]
897 # The service defaults useLegacySql to True, but this
898 # client uses Standard SQL by default.
899 if view.get("useLegacySql") is None:
900 view["useLegacySql"] = False
901
902 @view_query.deleter
903 def view_query(self):
904 """Delete SQL query defining the table as a view."""
905 self._properties.pop(self._PROPERTY_TO_API_FIELD["view_query"], None)
906
907 view_use_legacy_sql = property(_view_use_legacy_sql_getter)
908
909 @view_use_legacy_sql.setter # type: ignore # (redefinition from above)
910 def view_use_legacy_sql(self, value):
911 if not isinstance(value, bool):
912 raise ValueError("Pass a boolean")
913
914 api_field = self._PROPERTY_TO_API_FIELD["view_query"]
915 if self._properties.get(api_field) is None:
916 self._properties[api_field] = {}
917 self._properties[api_field]["useLegacySql"] = value
918
919 @property
920 def mview_query(self):
921 """Optional[str]: SQL query defining the table as a materialized
922 view (defaults to :data:`None`).
923 """
924 api_field = self._PROPERTY_TO_API_FIELD["mview_query"]
925 return _helpers._get_sub_prop(self._properties, [api_field, "query"])
926
927 @mview_query.setter
928 def mview_query(self, value):
929 api_field = self._PROPERTY_TO_API_FIELD["mview_query"]
930 _helpers._set_sub_prop(self._properties, [api_field, "query"], str(value))
931
932 @mview_query.deleter
933 def mview_query(self):
934 """Delete SQL query defining the table as a materialized view."""
935 self._properties.pop(self._PROPERTY_TO_API_FIELD["mview_query"], None)
936
937 @property
938 def mview_last_refresh_time(self):
939 """Optional[datetime.datetime]: Datetime at which the materialized view was last
940 refreshed (:data:`None` until set from the server).
941 """
942 refresh_time = _helpers._get_sub_prop(
943 self._properties, self._PROPERTY_TO_API_FIELD["mview_last_refresh_time"]
944 )
945 if refresh_time is not None:
946 # refresh_time will be in milliseconds.
947 return google.cloud._helpers._datetime_from_microseconds(
948 1000 * int(refresh_time)
949 )
950
951 @property
952 def mview_enable_refresh(self):
953 """Optional[bool]: Enable automatic refresh of the materialized view
954 when the base table is updated. The default value is :data:`True`.
955 """
956 api_field = self._PROPERTY_TO_API_FIELD["mview_enable_refresh"]
957 return _helpers._get_sub_prop(self._properties, [api_field, "enableRefresh"])
958
959 @mview_enable_refresh.setter
960 def mview_enable_refresh(self, value):
961 api_field = self._PROPERTY_TO_API_FIELD["mview_enable_refresh"]
962 return _helpers._set_sub_prop(
963 self._properties, [api_field, "enableRefresh"], value
964 )
965
966 @property
967 def mview_refresh_interval(self):
968 """Optional[datetime.timedelta]: The maximum frequency at which this
969 materialized view will be refreshed. The default value is 1800000
970 milliseconds (30 minutes).
971 """
972 api_field = self._PROPERTY_TO_API_FIELD["mview_refresh_interval"]
973 refresh_interval = _helpers._get_sub_prop(
974 self._properties, [api_field, "refreshIntervalMs"]
975 )
976 if refresh_interval is not None:
977 return datetime.timedelta(milliseconds=int(refresh_interval))
978
979 @mview_refresh_interval.setter
980 def mview_refresh_interval(self, value):
981 if value is None:
982 refresh_interval_ms = None
983 else:
984 refresh_interval_ms = str(value // datetime.timedelta(milliseconds=1))
985
986 api_field = self._PROPERTY_TO_API_FIELD["mview_refresh_interval"]
987 _helpers._set_sub_prop(
988 self._properties,
989 [api_field, "refreshIntervalMs"],
990 refresh_interval_ms,
991 )
992
993 @property
994 def mview_allow_non_incremental_definition(self):
995 """Optional[bool]: This option declares the intention to construct a
996 materialized view that isn't refreshed incrementally.
997 The default value is :data:`False`.
998 """
999 api_field = self._PROPERTY_TO_API_FIELD[
1000 "mview_allow_non_incremental_definition"
1001 ]
1002 return _helpers._get_sub_prop(
1003 self._properties, [api_field, "allowNonIncrementalDefinition"]
1004 )
1005
1006 @mview_allow_non_incremental_definition.setter
1007 def mview_allow_non_incremental_definition(self, value):
1008 api_field = self._PROPERTY_TO_API_FIELD[
1009 "mview_allow_non_incremental_definition"
1010 ]
1011 _helpers._set_sub_prop(
1012 self._properties, [api_field, "allowNonIncrementalDefinition"], value
1013 )
1014
1015 @property
1016 def streaming_buffer(self):
1017 """google.cloud.bigquery.StreamingBuffer: Information about a table's
1018 streaming buffer.
1019 """
1020 sb = self._properties.get(self._PROPERTY_TO_API_FIELD["streaming_buffer"])
1021 if sb is not None:
1022 return StreamingBuffer(sb)
1023
1024 @property
1025 def external_data_configuration(self):
1026 """Union[google.cloud.bigquery.ExternalConfig, None]: Configuration for
1027 an external data source (defaults to :data:`None`).
1028
1029 Raises:
1030 ValueError: For invalid value types.
1031 """
1032 prop = self._properties.get(
1033 self._PROPERTY_TO_API_FIELD["external_data_configuration"]
1034 )
1035 if prop is not None:
1036 prop = ExternalConfig.from_api_repr(prop)
1037 return prop
1038
1039 @external_data_configuration.setter
1040 def external_data_configuration(self, value):
1041 if not (value is None or isinstance(value, ExternalConfig)):
1042 raise ValueError("Pass an ExternalConfig or None")
1043 api_repr = value
1044 if value is not None:
1045 api_repr = value.to_api_repr()
1046 self._properties[
1047 self._PROPERTY_TO_API_FIELD["external_data_configuration"]
1048 ] = api_repr
1049
1050 @property
1051 def snapshot_definition(self) -> Optional["SnapshotDefinition"]:
1052 """Information about the snapshot. This value is set via snapshot creation.
1053
1054 See: https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#Table.FIELDS.snapshot_definition
1055 """
1056 snapshot_info = self._properties.get(
1057 self._PROPERTY_TO_API_FIELD["snapshot_definition"]
1058 )
1059 if snapshot_info is not None:
1060 snapshot_info = SnapshotDefinition(snapshot_info)
1061 return snapshot_info
1062
1063 @property
1064 def clone_definition(self) -> Optional["CloneDefinition"]:
1065 """Information about the clone. This value is set via clone creation.
1066
1067 See: https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#Table.FIELDS.clone_definition
1068 """
1069 clone_info = self._properties.get(
1070 self._PROPERTY_TO_API_FIELD["clone_definition"]
1071 )
1072 if clone_info is not None:
1073 clone_info = CloneDefinition(clone_info)
1074 return clone_info
1075
1076 @property
1077 def table_constraints(self) -> Optional["TableConstraints"]:
1078 """Tables Primary Key and Foreign Key information."""
1079 table_constraints = self._properties.get(
1080 self._PROPERTY_TO_API_FIELD["table_constraints"]
1081 )
1082 if table_constraints is not None:
1083 table_constraints = TableConstraints.from_api_repr(table_constraints)
1084 return table_constraints
1085
1086 @table_constraints.setter
1087 def table_constraints(self, value):
1088 """Tables Primary Key and Foreign Key information."""
1089 api_repr = value
1090 if not isinstance(value, TableConstraints) and value is not None:
1091 raise ValueError(
1092 "value must be google.cloud.bigquery.table.TableConstraints or None"
1093 )
1094 api_repr = value.to_api_repr() if value else None
1095 self._properties[self._PROPERTY_TO_API_FIELD["table_constraints"]] = api_repr
1096
1097 @property
1098 def resource_tags(self):
1099 """Dict[str, str]: Resource tags for the table.
1100
1101 See: https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#Table.FIELDS.resource_tags
1102 """
1103 return self._properties.setdefault(
1104 self._PROPERTY_TO_API_FIELD["resource_tags"], {}
1105 )
1106
1107 @resource_tags.setter
1108 def resource_tags(self, value):
1109 if not isinstance(value, dict) and value is not None:
1110 raise ValueError("resource_tags must be a dict or None")
1111 self._properties[self._PROPERTY_TO_API_FIELD["resource_tags"]] = value
1112
1113 @property
1114 def external_catalog_table_options(
1115 self,
1116 ) -> Optional[external_config.ExternalCatalogTableOptions]:
1117 """Options defining open source compatible datasets living in the
1118 BigQuery catalog. Contains metadata of open source database, schema
1119 or namespace represented by the current dataset."""
1120
1121 prop = self._properties.get(
1122 self._PROPERTY_TO_API_FIELD["external_catalog_table_options"]
1123 )
1124 if prop is not None:
1125 return external_config.ExternalCatalogTableOptions.from_api_repr(prop)
1126 return None
1127
1128 @external_catalog_table_options.setter
1129 def external_catalog_table_options(
1130 self, value: Union[external_config.ExternalCatalogTableOptions, dict, None]
1131 ):
1132 value = _helpers._isinstance_or_raise(
1133 value,
1134 (external_config.ExternalCatalogTableOptions, dict),
1135 none_allowed=True,
1136 )
1137 if isinstance(value, external_config.ExternalCatalogTableOptions):
1138 self._properties[
1139 self._PROPERTY_TO_API_FIELD["external_catalog_table_options"]
1140 ] = value.to_api_repr()
1141 else:
1142 self._properties[
1143 self._PROPERTY_TO_API_FIELD["external_catalog_table_options"]
1144 ] = value
1145
1146 @property
1147 def foreign_type_info(self) -> Optional[_schema.ForeignTypeInfo]:
1148 """Optional. Specifies metadata of the foreign data type definition in
1149 field schema (TableFieldSchema.foreign_type_definition).
1150 Returns:
1151 Optional[schema.ForeignTypeInfo]:
1152 Foreign type information, or :data:`None` if not set.
1153 .. Note::
1154 foreign_type_info is only required if you are referencing an
1155 external catalog such as a Hive table.
1156 For details, see:
1157 https://cloud.google.com/bigquery/docs/external-tables
1158 https://cloud.google.com/bigquery/docs/datasets-intro#external_datasets
1159 """
1160
1161 prop = _helpers._get_sub_prop(
1162 self._properties, self._PROPERTY_TO_API_FIELD["foreign_type_info"]
1163 )
1164 if prop is not None:
1165 return _schema.ForeignTypeInfo.from_api_repr(prop)
1166 return None
1167
1168 @foreign_type_info.setter
1169 def foreign_type_info(self, value: Union[_schema.ForeignTypeInfo, dict, None]):
1170 value = _helpers._isinstance_or_raise(
1171 value,
1172 (_schema.ForeignTypeInfo, dict),
1173 none_allowed=True,
1174 )
1175 if isinstance(value, _schema.ForeignTypeInfo):
1176 value = value.to_api_repr()
1177 _helpers._set_sub_prop(
1178 self._properties, self._PROPERTY_TO_API_FIELD["foreign_type_info"], value
1179 )
1180
1181 @classmethod
1182 def from_string(cls, full_table_id: str) -> "Table":
1183 """Construct a table from fully-qualified table ID.
1184
1185 Args:
1186 full_table_id (str):
1187 A fully-qualified table ID in standard SQL format. Must
1188 included a project ID, dataset ID, and table ID, each
1189 separated by ``.``.
1190
1191 Returns:
1192 Table: Table parsed from ``full_table_id``.
1193
1194 Examples:
1195 >>> Table.from_string('my-project.mydataset.mytable')
1196 Table(TableRef...(D...('my-project', 'mydataset'), 'mytable'))
1197
1198 Raises:
1199 ValueError:
1200 If ``full_table_id`` is not a fully-qualified table ID in
1201 standard SQL format.
1202 """
1203 return cls(TableReference.from_string(full_table_id))
1204
1205 @classmethod
1206 def from_api_repr(cls, resource: dict) -> "Table":
1207 """Factory: construct a table given its API representation
1208
1209 Args:
1210 resource (Dict[str, object]):
1211 Table resource representation from the API
1212
1213 Returns:
1214 google.cloud.bigquery.table.Table: Table parsed from ``resource``.
1215
1216 Raises:
1217 KeyError:
1218 If the ``resource`` lacks the key ``'tableReference'``, or if
1219 the ``dict`` stored within the key ``'tableReference'`` lacks
1220 the keys ``'tableId'``, ``'projectId'``, or ``'datasetId'``.
1221 """
1222 from google.cloud.bigquery import dataset
1223
1224 if (
1225 "tableReference" not in resource
1226 or "tableId" not in resource["tableReference"]
1227 ):
1228 raise KeyError(
1229 "Resource lacks required identity information:"
1230 '["tableReference"]["tableId"]'
1231 )
1232 project_id = _helpers._get_sub_prop(
1233 resource, cls._PROPERTY_TO_API_FIELD["project"]
1234 )
1235 table_id = _helpers._get_sub_prop(
1236 resource, cls._PROPERTY_TO_API_FIELD["table_id"]
1237 )
1238 dataset_id = _helpers._get_sub_prop(
1239 resource, cls._PROPERTY_TO_API_FIELD["dataset_id"]
1240 )
1241 dataset_ref = dataset.DatasetReference(project_id, dataset_id)
1242
1243 table = cls(dataset_ref.table(table_id))
1244 table._properties = resource
1245
1246 return table
1247
1248 def to_api_repr(self) -> dict:
1249 """Constructs the API resource of this table
1250
1251 Returns:
1252 Dict[str, object]: Table represented as an API resource
1253 """
1254 return copy.deepcopy(self._properties)
1255
1256 def to_bqstorage(self) -> str:
1257 """Construct a BigQuery Storage API representation of this table.
1258
1259 Returns:
1260 str: A reference to this table in the BigQuery Storage API.
1261 """
1262 return self.reference.to_bqstorage()
1263
1264 def _build_resource(self, filter_fields):
1265 """Generate a resource for ``update``."""
1266 return _helpers._build_resource_from_properties(self, filter_fields)
1267
1268 def __repr__(self):
1269 return "Table({})".format(repr(self.reference))
1270
1271 def __str__(self):
1272 return f"{self.project}.{self.dataset_id}.{self.table_id}"
1273
1274 @property
1275 def max_staleness(self):
1276 """Union[str, None]: The maximum staleness of data that could be returned when the table is queried.
1277
1278 Staleness encoded as a string encoding of sql IntervalValue type.
1279 This property is optional and defaults to None.
1280
1281 According to the BigQuery API documentation, maxStaleness specifies the maximum time
1282 interval for which stale data can be returned when querying the table.
1283 It helps control data freshness in scenarios like metadata-cached external tables.
1284
1285 Returns:
1286 Optional[str]: A string representing the maximum staleness interval
1287 (e.g., '1h', '30m', '15s' for hours, minutes, seconds respectively).
1288 """
1289 return self._properties.get(self._PROPERTY_TO_API_FIELD["max_staleness"])
1290
1291 @max_staleness.setter
1292 def max_staleness(self, value):
1293 """Set the maximum staleness for the table.
1294
1295 Args:
1296 value (Optional[str]): A string representing the maximum staleness interval.
1297 Must be a valid time interval string.
1298 Examples include '1h' (1 hour), '30m' (30 minutes), '15s' (15 seconds).
1299
1300 Raises:
1301 ValueError: If the value is not None and not a string.
1302 """
1303 if value is not None and not isinstance(value, str):
1304 raise ValueError("max_staleness must be a string or None")
1305
1306 self._properties[self._PROPERTY_TO_API_FIELD["max_staleness"]] = value
1307
1308
1309class TableListItem(_TableBase):
1310 """A read-only table resource from a list operation.
1311
1312 For performance reasons, the BigQuery API only includes some of the table
1313 properties when listing tables. Notably,
1314 :attr:`~google.cloud.bigquery.table.Table.schema` and
1315 :attr:`~google.cloud.bigquery.table.Table.num_rows` are missing.
1316
1317 For a full list of the properties that the BigQuery API returns, see the
1318 `REST documentation for tables.list
1319 <https://cloud.google.com/bigquery/docs/reference/rest/v2/tables/list>`_.
1320
1321
1322 Args:
1323 resource (Dict[str, object]):
1324 A table-like resource object from a table list response. A
1325 ``tableReference`` property is required.
1326
1327 Raises:
1328 ValueError:
1329 If ``tableReference`` or one of its required members is missing
1330 from ``resource``.
1331 """
1332
1333 def __init__(self, resource):
1334 if "tableReference" not in resource:
1335 raise ValueError("resource must contain a tableReference value")
1336 if "projectId" not in resource["tableReference"]:
1337 raise ValueError(
1338 "resource['tableReference'] must contain a projectId value"
1339 )
1340 if "datasetId" not in resource["tableReference"]:
1341 raise ValueError(
1342 "resource['tableReference'] must contain a datasetId value"
1343 )
1344 if "tableId" not in resource["tableReference"]:
1345 raise ValueError("resource['tableReference'] must contain a tableId value")
1346
1347 self._properties = resource
1348
1349 @property
1350 def created(self):
1351 """Union[datetime.datetime, None]: Datetime at which the table was
1352 created (:data:`None` until set from the server).
1353 """
1354 creation_time = self._properties.get("creationTime")
1355 if creation_time is not None:
1356 # creation_time will be in milliseconds.
1357 return google.cloud._helpers._datetime_from_microseconds(
1358 1000.0 * float(creation_time)
1359 )
1360
1361 @property
1362 def expires(self):
1363 """Union[datetime.datetime, None]: Datetime at which the table will be
1364 deleted.
1365 """
1366 expiration_time = self._properties.get("expirationTime")
1367 if expiration_time is not None:
1368 # expiration_time will be in milliseconds.
1369 return google.cloud._helpers._datetime_from_microseconds(
1370 1000.0 * float(expiration_time)
1371 )
1372
1373 reference = property(_reference_getter)
1374
1375 @property
1376 def labels(self):
1377 """Dict[str, str]: Labels for the table.
1378
1379 This method always returns a dict. To change a table's labels,
1380 modify the dict, then call ``Client.update_table``. To delete a
1381 label, set its value to :data:`None` before updating.
1382 """
1383 return self._properties.setdefault("labels", {})
1384
1385 @property
1386 def full_table_id(self):
1387 """Union[str, None]: ID for the table (:data:`None` until set from the
1388 server).
1389
1390 In the format ``project_id:dataset_id.table_id``.
1391 """
1392 return self._properties.get("id")
1393
1394 @property
1395 def table_type(self):
1396 """Union[str, None]: The type of the table (:data:`None` until set from
1397 the server).
1398
1399 Possible values are ``'TABLE'``, ``'VIEW'``, or ``'EXTERNAL'``.
1400 """
1401 return self._properties.get("type")
1402
1403 @property
1404 def time_partitioning(self):
1405 """google.cloud.bigquery.table.TimePartitioning: Configures time-based
1406 partitioning for a table.
1407 """
1408 prop = self._properties.get("timePartitioning")
1409 if prop is not None:
1410 return TimePartitioning.from_api_repr(prop)
1411
1412 @property
1413 def partitioning_type(self):
1414 """Union[str, None]: Time partitioning of the table if it is
1415 partitioned (Defaults to :data:`None`).
1416 """
1417 warnings.warn(
1418 "This method will be deprecated in future versions. Please use "
1419 "TableListItem.time_partitioning.type_ instead.",
1420 PendingDeprecationWarning,
1421 stacklevel=2,
1422 )
1423 if self.time_partitioning is not None:
1424 return self.time_partitioning.type_
1425
1426 @property
1427 def partition_expiration(self):
1428 """Union[int, None]: Expiration time in milliseconds for a partition.
1429
1430 If this property is set and :attr:`type_` is not set, :attr:`type_`
1431 will default to :attr:`TimePartitioningType.DAY`.
1432 """
1433 warnings.warn(
1434 "This method will be deprecated in future versions. Please use "
1435 "TableListItem.time_partitioning.expiration_ms instead.",
1436 PendingDeprecationWarning,
1437 stacklevel=2,
1438 )
1439 if self.time_partitioning is not None:
1440 return self.time_partitioning.expiration_ms
1441
1442 @property
1443 def friendly_name(self):
1444 """Union[str, None]: Title of the table (defaults to :data:`None`)."""
1445 return self._properties.get("friendlyName")
1446
1447 view_use_legacy_sql = property(_view_use_legacy_sql_getter)
1448
1449 @property
1450 def clustering_fields(self):
1451 """Union[List[str], None]: Fields defining clustering for the table
1452
1453 (Defaults to :data:`None`).
1454
1455 Clustering fields are immutable after table creation.
1456
1457 .. note::
1458
1459 BigQuery supports clustering for both partitioned and
1460 non-partitioned tables.
1461 """
1462 prop = self._properties.get("clustering")
1463 if prop is not None:
1464 return list(prop.get("fields", ()))
1465
1466 @classmethod
1467 def from_string(cls, full_table_id: str) -> "TableListItem":
1468 """Construct a table from fully-qualified table ID.
1469
1470 Args:
1471 full_table_id (str):
1472 A fully-qualified table ID in standard SQL format. Must
1473 included a project ID, dataset ID, and table ID, each
1474 separated by ``.``.
1475
1476 Returns:
1477 Table: Table parsed from ``full_table_id``.
1478
1479 Examples:
1480 >>> Table.from_string('my-project.mydataset.mytable')
1481 Table(TableRef...(D...('my-project', 'mydataset'), 'mytable'))
1482
1483 Raises:
1484 ValueError:
1485 If ``full_table_id`` is not a fully-qualified table ID in
1486 standard SQL format.
1487 """
1488 return cls(
1489 {"tableReference": TableReference.from_string(full_table_id).to_api_repr()}
1490 )
1491
1492 def to_bqstorage(self) -> str:
1493 """Construct a BigQuery Storage API representation of this table.
1494
1495 Returns:
1496 str: A reference to this table in the BigQuery Storage API.
1497 """
1498 return self.reference.to_bqstorage()
1499
1500 def to_api_repr(self) -> dict:
1501 """Constructs the API resource of this table
1502
1503 Returns:
1504 Dict[str, object]: Table represented as an API resource
1505 """
1506 return copy.deepcopy(self._properties)
1507
1508
1509def _row_from_mapping(mapping, schema):
1510 """Convert a mapping to a row tuple using the schema.
1511
1512 Args:
1513 mapping (Dict[str, object])
1514 Mapping of row data: must contain keys for all required fields in
1515 the schema. Keys which do not correspond to a field in the schema
1516 are ignored.
1517 schema (List[google.cloud.bigquery.schema.SchemaField]):
1518 The schema of the table destination for the rows
1519
1520 Returns:
1521 Tuple[object]:
1522 Tuple whose elements are ordered according to the schema.
1523
1524 Raises:
1525 ValueError: If schema is empty.
1526 """
1527 if len(schema) == 0:
1528 raise ValueError(_TABLE_HAS_NO_SCHEMA)
1529
1530 row = []
1531 for field in schema:
1532 if field.mode == "REQUIRED":
1533 row.append(mapping[field.name])
1534 elif field.mode == "REPEATED":
1535 row.append(mapping.get(field.name, ()))
1536 elif field.mode == "NULLABLE":
1537 row.append(mapping.get(field.name))
1538 else:
1539 raise ValueError("Unknown field mode: {}".format(field.mode))
1540 return tuple(row)
1541
1542
1543class StreamingBuffer(object):
1544 """Information about a table's streaming buffer.
1545
1546 See https://cloud.google.com/bigquery/streaming-data-into-bigquery.
1547
1548 Args:
1549 resource (Dict[str, object]):
1550 streaming buffer representation returned from the API
1551 """
1552
1553 def __init__(self, resource):
1554 self.estimated_bytes = None
1555 if "estimatedBytes" in resource:
1556 self.estimated_bytes = int(resource["estimatedBytes"])
1557 self.estimated_rows = None
1558 if "estimatedRows" in resource:
1559 self.estimated_rows = int(resource["estimatedRows"])
1560 self.oldest_entry_time = None
1561 if "oldestEntryTime" in resource:
1562 self.oldest_entry_time = google.cloud._helpers._datetime_from_microseconds(
1563 1000.0 * int(resource["oldestEntryTime"])
1564 )
1565
1566
1567class SnapshotDefinition:
1568 """Information about base table and snapshot time of the snapshot.
1569
1570 See https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#snapshotdefinition
1571
1572 Args:
1573 resource: Snapshot definition representation returned from the API.
1574 """
1575
1576 def __init__(self, resource: Dict[str, Any]):
1577 self.base_table_reference = None
1578 if "baseTableReference" in resource:
1579 self.base_table_reference = TableReference.from_api_repr(
1580 resource["baseTableReference"]
1581 )
1582
1583 self.snapshot_time = None
1584 if "snapshotTime" in resource:
1585 self.snapshot_time = google.cloud._helpers._rfc3339_to_datetime(
1586 resource["snapshotTime"]
1587 )
1588
1589
1590class CloneDefinition:
1591 """Information about base table and clone time of the clone.
1592
1593 See https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#clonedefinition
1594
1595 Args:
1596 resource: Clone definition representation returned from the API.
1597 """
1598
1599 def __init__(self, resource: Dict[str, Any]):
1600 self.base_table_reference = None
1601 if "baseTableReference" in resource:
1602 self.base_table_reference = TableReference.from_api_repr(
1603 resource["baseTableReference"]
1604 )
1605
1606 self.clone_time = None
1607 if "cloneTime" in resource:
1608 self.clone_time = google.cloud._helpers._rfc3339_to_datetime(
1609 resource["cloneTime"]
1610 )
1611
1612
1613class Row(object):
1614 """A BigQuery row.
1615
1616 Values can be accessed by position (index), by key like a dict,
1617 or as properties.
1618
1619 Args:
1620 values (Sequence[object]): The row values
1621 field_to_index (Dict[str, int]):
1622 A mapping from schema field names to indexes
1623 """
1624
1625 # Choose unusual field names to try to avoid conflict with schema fields.
1626 __slots__ = ("_xxx_values", "_xxx_field_to_index")
1627
1628 def __init__(self, values, field_to_index) -> None:
1629 self._xxx_values = values
1630 self._xxx_field_to_index = field_to_index
1631
1632 def values(self):
1633 """Return the values included in this row.
1634
1635 Returns:
1636 Sequence[object]: A sequence of length ``len(row)``.
1637 """
1638 return copy.deepcopy(self._xxx_values)
1639
1640 def keys(self) -> Iterable[str]:
1641 """Return the keys for using a row as a dict.
1642
1643 Returns:
1644 Iterable[str]: The keys corresponding to the columns of a row
1645
1646 Examples:
1647
1648 >>> list(Row(('a', 'b'), {'x': 0, 'y': 1}).keys())
1649 ['x', 'y']
1650 """
1651 return self._xxx_field_to_index.keys()
1652
1653 def items(self) -> Iterable[Tuple[str, Any]]:
1654 """Return items as ``(key, value)`` pairs.
1655
1656 Returns:
1657 Iterable[Tuple[str, object]]:
1658 The ``(key, value)`` pairs representing this row.
1659
1660 Examples:
1661
1662 >>> list(Row(('a', 'b'), {'x': 0, 'y': 1}).items())
1663 [('x', 'a'), ('y', 'b')]
1664 """
1665 for key, index in self._xxx_field_to_index.items():
1666 yield (key, copy.deepcopy(self._xxx_values[index]))
1667
1668 def get(self, key: str, default: Any = None) -> Any:
1669 """Return a value for key, with a default value if it does not exist.
1670
1671 Args:
1672 key (str): The key of the column to access
1673 default (object):
1674 The default value to use if the key does not exist. (Defaults
1675 to :data:`None`.)
1676
1677 Returns:
1678 object:
1679 The value associated with the provided key, or a default value.
1680
1681 Examples:
1682 When the key exists, the value associated with it is returned.
1683
1684 >>> Row(('a', 'b'), {'x': 0, 'y': 1}).get('x')
1685 'a'
1686
1687 The default value is :data:`None` when the key does not exist.
1688
1689 >>> Row(('a', 'b'), {'x': 0, 'y': 1}).get('z')
1690 None
1691
1692 The default value can be overridden with the ``default`` parameter.
1693
1694 >>> Row(('a', 'b'), {'x': 0, 'y': 1}).get('z', '')
1695 ''
1696
1697 >>> Row(('a', 'b'), {'x': 0, 'y': 1}).get('z', default = '')
1698 ''
1699 """
1700 index = self._xxx_field_to_index.get(key)
1701 if index is None:
1702 return default
1703 return self._xxx_values[index]
1704
1705 def __getattr__(self, name):
1706 value = self._xxx_field_to_index.get(name)
1707 if value is None:
1708 raise AttributeError("no row field {!r}".format(name))
1709 return self._xxx_values[value]
1710
1711 def __len__(self):
1712 return len(self._xxx_values)
1713
1714 def __getitem__(self, key):
1715 if isinstance(key, str):
1716 value = self._xxx_field_to_index.get(key)
1717 if value is None:
1718 raise KeyError("no row field {!r}".format(key))
1719 key = value
1720 return self._xxx_values[key]
1721
1722 def __eq__(self, other):
1723 if not isinstance(other, Row):
1724 return NotImplemented
1725 return (
1726 self._xxx_values == other._xxx_values
1727 and self._xxx_field_to_index == other._xxx_field_to_index
1728 )
1729
1730 def __ne__(self, other):
1731 return not self == other
1732
1733 def __repr__(self):
1734 # sort field dict by value, for determinism
1735 items = sorted(self._xxx_field_to_index.items(), key=operator.itemgetter(1))
1736 f2i = "{" + ", ".join("%r: %d" % item for item in items) + "}"
1737 return "Row({}, {})".format(self._xxx_values, f2i)
1738
1739
1740class _NoopProgressBarQueue(object):
1741 """A fake Queue class that does nothing.
1742
1743 This is used when there is no progress bar to send updates to.
1744 """
1745
1746 def put_nowait(self, item):
1747 """Don't actually do anything with the item."""
1748
1749
1750class RowIterator(HTTPIterator):
1751 """A class for iterating through HTTP/JSON API row list responses.
1752
1753 Args:
1754 client (Optional[google.cloud.bigquery.Client]):
1755 The API client instance. This should always be non-`None`, except for
1756 subclasses that do not use it, namely the ``_EmptyRowIterator``.
1757 api_request (Callable[google.cloud._http.JSONConnection.api_request]):
1758 The function to use to make API requests.
1759 path (str): The method path to query for the list of items.
1760 schema (Sequence[Union[ \
1761 :class:`~google.cloud.bigquery.schema.SchemaField`, \
1762 Mapping[str, Any] \
1763 ]]):
1764 The table's schema. If any item is a mapping, its content must be
1765 compatible with
1766 :meth:`~google.cloud.bigquery.schema.SchemaField.from_api_repr`.
1767 page_token (str): A token identifying a page in a result set to start
1768 fetching results from.
1769 max_results (Optional[int]): The maximum number of results to fetch.
1770 page_size (Optional[int]): The maximum number of rows in each page
1771 of results from this request. Non-positive values are ignored.
1772 Defaults to a sensible value set by the API.
1773 extra_params (Optional[Dict[str, object]]):
1774 Extra query string parameters for the API call.
1775 table (Optional[Union[ \
1776 google.cloud.bigquery.table.Table, \
1777 google.cloud.bigquery.table.TableReference, \
1778 ]]):
1779 The table which these rows belong to, or a reference to it. Used to
1780 call the BigQuery Storage API to fetch rows.
1781 selected_fields (Optional[Sequence[google.cloud.bigquery.schema.SchemaField]]):
1782 A subset of columns to select from this table.
1783 total_rows (Optional[int]):
1784 Total number of rows in the table.
1785 first_page_response (Optional[dict]):
1786 API response for the first page of results. These are returned when
1787 the first page is requested.
1788 query (Optional[str]):
1789 The query text used.
1790 total_bytes_processed (Optional[int]):
1791 total bytes processed from job statistics, if present.
1792 """
1793
1794 def __init__(
1795 self,
1796 client,
1797 api_request,
1798 path,
1799 schema,
1800 page_token=None,
1801 max_results=None,
1802 page_size=None,
1803 extra_params=None,
1804 table=None,
1805 selected_fields=None,
1806 total_rows=None,
1807 first_page_response=None,
1808 location: Optional[str] = None,
1809 job_id: Optional[str] = None,
1810 query_id: Optional[str] = None,
1811 project: Optional[str] = None,
1812 num_dml_affected_rows: Optional[int] = None,
1813 query: Optional[str] = None,
1814 total_bytes_processed: Optional[int] = None,
1815 slot_millis: Optional[int] = None,
1816 ):
1817 super(RowIterator, self).__init__(
1818 client,
1819 api_request,
1820 path,
1821 item_to_value=_item_to_row,
1822 items_key="rows",
1823 page_token=page_token,
1824 max_results=max_results,
1825 extra_params=extra_params,
1826 page_start=_rows_page_start,
1827 next_token="pageToken",
1828 )
1829 schema = _to_schema_fields(schema)
1830 self._field_to_index = _helpers._field_to_index_mapping(schema)
1831 self._page_size = page_size
1832 self._preserve_order = False
1833 self._schema = schema
1834 self._selected_fields = selected_fields
1835 self._table = table
1836 self._total_rows = total_rows
1837 self._first_page_response = first_page_response
1838 self._location = location
1839 self._job_id = job_id
1840 self._query_id = query_id
1841 self._project = project
1842 self._num_dml_affected_rows = num_dml_affected_rows
1843 self._query = query
1844 self._total_bytes_processed = total_bytes_processed
1845 self._slot_millis = slot_millis
1846
1847 @property
1848 def _billing_project(self) -> Optional[str]:
1849 """GCP Project ID where BQ API will bill to (if applicable)."""
1850 client = self.client
1851 return client.project if client is not None else None
1852
1853 @property
1854 def job_id(self) -> Optional[str]:
1855 """ID of the query job (if applicable).
1856
1857 To get the job metadata, call
1858 ``job = client.get_job(rows.job_id, location=rows.location)``.
1859 """
1860 return self._job_id
1861
1862 @property
1863 def location(self) -> Optional[str]:
1864 """Location where the query executed (if applicable).
1865
1866 See: https://cloud.google.com/bigquery/docs/locations
1867 """
1868 return self._location
1869
1870 @property
1871 def num_dml_affected_rows(self) -> Optional[int]:
1872 """If this RowIterator is the result of a DML query, the number of
1873 rows that were affected.
1874
1875 See:
1876 https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#body.QueryResponse.FIELDS.num_dml_affected_rows
1877 """
1878 return self._num_dml_affected_rows
1879
1880 @property
1881 def project(self) -> Optional[str]:
1882 """GCP Project ID where these rows are read from."""
1883 return self._project
1884
1885 @property
1886 def query_id(self) -> Optional[str]:
1887 """[Preview] ID of a completed query.
1888
1889 This ID is auto-generated and not guaranteed to be populated.
1890 """
1891 return self._query_id
1892
1893 @property
1894 def query(self) -> Optional[str]:
1895 """The query text used."""
1896 return self._query
1897
1898 @property
1899 def total_bytes_processed(self) -> Optional[int]:
1900 """total bytes processed from job statistics, if present."""
1901 return self._total_bytes_processed
1902
1903 @property
1904 def slot_millis(self) -> Optional[int]:
1905 """Number of slot ms the user is actually billed for."""
1906 return self._slot_millis
1907
1908 def _is_almost_completely_cached(self):
1909 """Check if all results are completely cached.
1910
1911 This is useful to know, because we can avoid alternative download
1912 mechanisms.
1913 """
1914 if (
1915 not hasattr(self, "_first_page_response")
1916 or self._first_page_response is None
1917 ):
1918 return False
1919
1920 total_cached_rows = len(self._first_page_response.get(self._items_key, []))
1921 if self.max_results is not None and total_cached_rows >= self.max_results:
1922 return True
1923
1924 if (
1925 self.next_page_token is None
1926 and self._first_page_response.get(self._next_token) is None
1927 ):
1928 return True
1929
1930 if self._total_rows is not None:
1931 almost_completely = self._total_rows * ALMOST_COMPLETELY_CACHED_RATIO
1932 if total_cached_rows >= almost_completely:
1933 return True
1934
1935 return False
1936
1937 def _should_use_bqstorage(self, bqstorage_client, create_bqstorage_client):
1938 """Returns True if the BigQuery Storage API can be used.
1939
1940 Returns:
1941 bool
1942 True if the BigQuery Storage client can be used or created.
1943 """
1944 using_bqstorage_api = bqstorage_client or create_bqstorage_client
1945 if not using_bqstorage_api:
1946 return False
1947
1948 if self._table is None:
1949 return False
1950
1951 # The developer has already started paging through results if
1952 # next_page_token is set.
1953 if hasattr(self, "next_page_token") and self.next_page_token is not None:
1954 return False
1955
1956 if self._is_almost_completely_cached():
1957 return False
1958
1959 if self.max_results is not None:
1960 return False
1961
1962 try:
1963 _versions_helpers.BQ_STORAGE_VERSIONS.try_import(raise_if_error=True)
1964 except bq_exceptions.BigQueryStorageNotFoundError:
1965 warnings.warn(
1966 "BigQuery Storage module not found, fetch data with the REST "
1967 "endpoint instead."
1968 )
1969 return False
1970 except bq_exceptions.LegacyBigQueryStorageError as exc:
1971 warnings.warn(str(exc))
1972 return False
1973
1974 return True
1975
1976 def _get_next_page_response(self):
1977 """Requests the next page from the path provided.
1978
1979 Returns:
1980 Dict[str, object]:
1981 The parsed JSON response of the next page's contents.
1982 """
1983 if self._first_page_response:
1984 rows = self._first_page_response.get(self._items_key, [])[
1985 : self.max_results
1986 ]
1987 response = {
1988 self._items_key: rows,
1989 }
1990 if self._next_token in self._first_page_response:
1991 response[self._next_token] = self._first_page_response[self._next_token]
1992
1993 self._first_page_response = None
1994 return response
1995
1996 params = self._get_query_params()
1997
1998 # If the user has provided page_size and start_index, we need to pass
1999 # start_index for the first page, but for all subsequent pages, we
2000 # should not pass start_index. We make a shallow copy of params and do
2001 # not alter the original, so if the user iterates the results again,
2002 # start_index is preserved.
2003 params_copy = copy.copy(params)
2004 if self._page_size is not None:
2005 if self.page_number and "startIndex" in params:
2006 del params_copy["startIndex"]
2007
2008 return self.api_request(
2009 method=self._HTTP_METHOD, path=self.path, query_params=params_copy
2010 )
2011
2012 @property
2013 def schema(self):
2014 """List[google.cloud.bigquery.schema.SchemaField]: The subset of
2015 columns to be read from the table."""
2016 return list(self._schema)
2017
2018 @property
2019 def total_rows(self):
2020 """int: The total number of rows in the table or query results."""
2021 return self._total_rows
2022
2023 def _maybe_warn_max_results(
2024 self,
2025 bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"],
2026 ):
2027 """Issue a warning if BQ Storage client is not ``None`` with ``max_results`` set.
2028
2029 This helper method should be used directly in the relevant top-level public
2030 methods, so that the warning is issued for the correct line in user code.
2031
2032 Args:
2033 bqstorage_client:
2034 The BigQuery Storage client intended to use for downloading result rows.
2035 """
2036 if bqstorage_client is not None and self.max_results is not None:
2037 warnings.warn(
2038 "Cannot use bqstorage_client if max_results is set, "
2039 "reverting to fetching data with the REST endpoint.",
2040 stacklevel=3,
2041 )
2042
2043 def _to_page_iterable(
2044 self, bqstorage_download, tabledata_list_download, bqstorage_client=None
2045 ):
2046 if not self._should_use_bqstorage(bqstorage_client, False):
2047 bqstorage_client = None
2048
2049 result_pages = (
2050 bqstorage_download()
2051 if bqstorage_client is not None
2052 else tabledata_list_download()
2053 )
2054 yield from result_pages
2055
2056 def to_arrow_iterable(
2057 self,
2058 bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"] = None,
2059 max_queue_size: int = _pandas_helpers._MAX_QUEUE_SIZE_DEFAULT, # type: ignore
2060 max_stream_count: Optional[int] = None,
2061 ) -> Iterator["pyarrow.RecordBatch"]:
2062 """[Beta] Create an iterable of class:`pyarrow.RecordBatch`, to process the table as a stream.
2063
2064 Args:
2065 bqstorage_client (Optional[google.cloud.bigquery_storage_v1.BigQueryReadClient]):
2066 A BigQuery Storage API client. If supplied, use the faster
2067 BigQuery Storage API to fetch rows from BigQuery.
2068
2069 This method requires the ``pyarrow`` and
2070 ``google-cloud-bigquery-storage`` libraries.
2071
2072 This method only exposes a subset of the capabilities of the
2073 BigQuery Storage API. For full access to all features
2074 (projections, filters, snapshots) use the Storage API directly.
2075
2076 max_queue_size (Optional[int]):
2077 The maximum number of result pages to hold in the internal queue when
2078 streaming query results over the BigQuery Storage API. Ignored if
2079 Storage API is not used.
2080
2081 By default, the max queue size is set to the number of BQ Storage streams
2082 created by the server. If ``max_queue_size`` is :data:`None`, the queue
2083 size is infinite.
2084
2085 max_stream_count (Optional[int]):
2086 The maximum number of parallel download streams when
2087 using BigQuery Storage API. Ignored if
2088 BigQuery Storage API is not used.
2089
2090 This setting also has no effect if the query result
2091 is deterministically ordered with ORDER BY,
2092 in which case, the number of download stream is always 1.
2093
2094 If set to 0 or None (the default), the number of download
2095 streams is determined by BigQuery the server. However, this behaviour
2096 can require a lot of memory to store temporary download result,
2097 especially with very large queries. In that case,
2098 setting this parameter value to a value > 0 can help
2099 reduce system resource consumption.
2100
2101 Returns:
2102 pyarrow.RecordBatch:
2103 A generator of :class:`~pyarrow.RecordBatch`.
2104
2105 .. versionadded:: 2.31.0
2106 """
2107 self._maybe_warn_max_results(bqstorage_client)
2108
2109 bqstorage_download = functools.partial(
2110 _pandas_helpers.download_arrow_bqstorage,
2111 self._billing_project,
2112 self._table,
2113 bqstorage_client,
2114 preserve_order=self._preserve_order,
2115 selected_fields=self._selected_fields,
2116 max_queue_size=max_queue_size,
2117 max_stream_count=max_stream_count,
2118 )
2119 tabledata_list_download = functools.partial(
2120 _pandas_helpers.download_arrow_row_iterator, iter(self.pages), self.schema
2121 )
2122 return self._to_page_iterable(
2123 bqstorage_download,
2124 tabledata_list_download,
2125 bqstorage_client=bqstorage_client,
2126 )
2127
2128 # If changing the signature of this method, make sure to apply the same
2129 # changes to job.QueryJob.to_arrow()
2130 def to_arrow(
2131 self,
2132 progress_bar_type: Optional[str] = None,
2133 bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"] = None,
2134 create_bqstorage_client: bool = True,
2135 ) -> "pyarrow.Table":
2136 """[Beta] Create a class:`pyarrow.Table` by loading all pages of a
2137 table or query.
2138
2139 Args:
2140 progress_bar_type (Optional[str]):
2141 If set, use the `tqdm <https://tqdm.github.io/>`_ library to
2142 display a progress bar while the data downloads. Install the
2143 ``tqdm`` package to use this feature.
2144
2145 Possible values of ``progress_bar_type`` include:
2146
2147 ``None``
2148 No progress bar.
2149 ``'tqdm'``
2150 Use the :func:`tqdm.tqdm` function to print a progress bar
2151 to :data:`sys.stdout`.
2152 ``'tqdm_notebook'``
2153 Use the :func:`tqdm.notebook.tqdm` function to display a
2154 progress bar as a Jupyter notebook widget.
2155 ``'tqdm_gui'``
2156 Use the :func:`tqdm.tqdm_gui` function to display a
2157 progress bar as a graphical dialog box.
2158 bqstorage_client (Optional[google.cloud.bigquery_storage_v1.BigQueryReadClient]):
2159 A BigQuery Storage API client. If supplied, use the faster BigQuery
2160 Storage API to fetch rows from BigQuery. This API is a billable API.
2161
2162 This method requires ``google-cloud-bigquery-storage`` library.
2163
2164 This method only exposes a subset of the capabilities of the
2165 BigQuery Storage API. For full access to all features
2166 (projections, filters, snapshots) use the Storage API directly.
2167 create_bqstorage_client (Optional[bool]):
2168 If ``True`` (default), create a BigQuery Storage API client using
2169 the default API settings. The BigQuery Storage API is a faster way
2170 to fetch rows from BigQuery. See the ``bqstorage_client`` parameter
2171 for more information.
2172
2173 This argument does nothing if ``bqstorage_client`` is supplied.
2174
2175 .. versionadded:: 1.24.0
2176
2177 Returns:
2178 pyarrow.Table
2179 A :class:`pyarrow.Table` populated with row data and column
2180 headers from the query results. The column headers are derived
2181 from the destination table's schema.
2182
2183 Raises:
2184 ValueError: If the :mod:`pyarrow` library cannot be imported.
2185
2186
2187 .. versionadded:: 1.17.0
2188 """
2189 if pyarrow is None:
2190 raise ValueError(_NO_PYARROW_ERROR)
2191
2192 self._maybe_warn_max_results(bqstorage_client)
2193
2194 if not self._should_use_bqstorage(bqstorage_client, create_bqstorage_client):
2195 create_bqstorage_client = False
2196 bqstorage_client = None
2197
2198 owns_bqstorage_client = False
2199 if not bqstorage_client and create_bqstorage_client:
2200 bqstorage_client = self.client._ensure_bqstorage_client()
2201 owns_bqstorage_client = bqstorage_client is not None
2202
2203 try:
2204 progress_bar = get_progress_bar(
2205 progress_bar_type, "Downloading", self.total_rows, "rows"
2206 )
2207
2208 record_batches = []
2209 for record_batch in self.to_arrow_iterable(
2210 bqstorage_client=bqstorage_client
2211 ):
2212 record_batches.append(record_batch)
2213
2214 if progress_bar is not None:
2215 # In some cases, the number of total rows is not populated
2216 # until the first page of rows is fetched. Update the
2217 # progress bar's total to keep an accurate count.
2218 progress_bar.total = progress_bar.total or self.total_rows
2219 progress_bar.update(record_batch.num_rows)
2220
2221 if progress_bar is not None:
2222 # Indicate that the download has finished.
2223 progress_bar.close()
2224 finally:
2225 if owns_bqstorage_client:
2226 bqstorage_client._transport.grpc_channel.close() # type: ignore
2227
2228 if record_batches and bqstorage_client is not None:
2229 return pyarrow.Table.from_batches(record_batches)
2230 else:
2231 # No records (not record_batches), use schema based on BigQuery schema
2232 # **or**
2233 # we used the REST API (bqstorage_client is None),
2234 # which doesn't add arrow extension metadata, so we let
2235 # `bq_to_arrow_schema` do it.
2236 arrow_schema = _pandas_helpers.bq_to_arrow_schema(self._schema)
2237 return pyarrow.Table.from_batches(record_batches, schema=arrow_schema)
2238
2239 def to_dataframe_iterable(
2240 self,
2241 bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"] = None,
2242 dtypes: Optional[Dict[str, Any]] = None,
2243 max_queue_size: int = _pandas_helpers._MAX_QUEUE_SIZE_DEFAULT, # type: ignore
2244 max_stream_count: Optional[int] = None,
2245 ) -> "pandas.DataFrame":
2246 """Create an iterable of pandas DataFrames, to process the table as a stream.
2247
2248 Args:
2249 bqstorage_client (Optional[google.cloud.bigquery_storage_v1.BigQueryReadClient]):
2250 A BigQuery Storage API client. If supplied, use the faster
2251 BigQuery Storage API to fetch rows from BigQuery.
2252
2253 This method requires ``google-cloud-bigquery-storage`` library.
2254
2255 This method only exposes a subset of the capabilities of the
2256 BigQuery Storage API. For full access to all features
2257 (projections, filters, snapshots) use the Storage API directly.
2258
2259 dtypes (Optional[Map[str, Union[str, pandas.Series.dtype]]]):
2260 A dictionary of column names pandas ``dtype``s. The provided
2261 ``dtype`` is used when constructing the series for the column
2262 specified. Otherwise, the default pandas behavior is used.
2263
2264 max_queue_size (Optional[int]):
2265 The maximum number of result pages to hold in the internal queue when
2266 streaming query results over the BigQuery Storage API. Ignored if
2267 Storage API is not used.
2268
2269 By default, the max queue size is set to the number of BQ Storage streams
2270 created by the server. If ``max_queue_size`` is :data:`None`, the queue
2271 size is infinite.
2272
2273 .. versionadded:: 2.14.0
2274
2275 max_stream_count (Optional[int]):
2276 The maximum number of parallel download streams when
2277 using BigQuery Storage API. Ignored if
2278 BigQuery Storage API is not used.
2279
2280 This setting also has no effect if the query result
2281 is deterministically ordered with ORDER BY,
2282 in which case, the number of download stream is always 1.
2283
2284 If set to 0 or None (the default), the number of download
2285 streams is determined by BigQuery the server. However, this behaviour
2286 can require a lot of memory to store temporary download result,
2287 especially with very large queries. In that case,
2288 setting this parameter value to a value > 0 can help
2289 reduce system resource consumption.
2290
2291 Returns:
2292 pandas.DataFrame:
2293 A generator of :class:`~pandas.DataFrame`.
2294
2295 Raises:
2296 ValueError:
2297 If the :mod:`pandas` library cannot be imported.
2298 """
2299 _pandas_helpers.verify_pandas_imports()
2300
2301 if dtypes is None:
2302 dtypes = {}
2303
2304 self._maybe_warn_max_results(bqstorage_client)
2305
2306 column_names = [field.name for field in self._schema]
2307 bqstorage_download = functools.partial(
2308 _pandas_helpers.download_dataframe_bqstorage,
2309 self._billing_project,
2310 self._table,
2311 bqstorage_client,
2312 column_names,
2313 dtypes,
2314 preserve_order=self._preserve_order,
2315 selected_fields=self._selected_fields,
2316 max_queue_size=max_queue_size,
2317 max_stream_count=max_stream_count,
2318 )
2319 tabledata_list_download = functools.partial(
2320 _pandas_helpers.download_dataframe_row_iterator,
2321 iter(self.pages),
2322 self.schema,
2323 dtypes,
2324 )
2325 return self._to_page_iterable(
2326 bqstorage_download,
2327 tabledata_list_download,
2328 bqstorage_client=bqstorage_client,
2329 )
2330
2331 # If changing the signature of this method, make sure to apply the same
2332 # changes to job.QueryJob.to_dataframe()
2333 def to_dataframe(
2334 self,
2335 bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"] = None,
2336 dtypes: Optional[Dict[str, Any]] = None,
2337 progress_bar_type: Optional[str] = None,
2338 create_bqstorage_client: bool = True,
2339 geography_as_object: bool = False,
2340 bool_dtype: Union[Any, None] = DefaultPandasDTypes.BOOL_DTYPE,
2341 int_dtype: Union[Any, None] = DefaultPandasDTypes.INT_DTYPE,
2342 float_dtype: Union[Any, None] = None,
2343 string_dtype: Union[Any, None] = None,
2344 date_dtype: Union[Any, None] = DefaultPandasDTypes.DATE_DTYPE,
2345 datetime_dtype: Union[Any, None] = None,
2346 time_dtype: Union[Any, None] = DefaultPandasDTypes.TIME_DTYPE,
2347 timestamp_dtype: Union[Any, None] = None,
2348 range_date_dtype: Union[Any, None] = DefaultPandasDTypes.RANGE_DATE_DTYPE,
2349 range_datetime_dtype: Union[
2350 Any, None
2351 ] = DefaultPandasDTypes.RANGE_DATETIME_DTYPE,
2352 range_timestamp_dtype: Union[
2353 Any, None
2354 ] = DefaultPandasDTypes.RANGE_TIMESTAMP_DTYPE,
2355 ) -> "pandas.DataFrame":
2356 """Create a pandas DataFrame by loading all pages of a query.
2357
2358 Args:
2359 bqstorage_client (Optional[google.cloud.bigquery_storage_v1.BigQueryReadClient]):
2360 A BigQuery Storage API client. If supplied, use the faster
2361 BigQuery Storage API to fetch rows from BigQuery.
2362
2363 This method requires ``google-cloud-bigquery-storage`` library.
2364
2365 This method only exposes a subset of the capabilities of the
2366 BigQuery Storage API. For full access to all features
2367 (projections, filters, snapshots) use the Storage API directly.
2368
2369 dtypes (Optional[Map[str, Union[str, pandas.Series.dtype]]]):
2370 A dictionary of column names pandas ``dtype``s. The provided
2371 ``dtype`` is used when constructing the series for the column
2372 specified. Otherwise, the default pandas behavior is used.
2373 progress_bar_type (Optional[str]):
2374 If set, use the `tqdm <https://tqdm.github.io/>`_ library to
2375 display a progress bar while the data downloads. Install the
2376 ``tqdm`` package to use this feature.
2377
2378 Possible values of ``progress_bar_type`` include:
2379
2380 ``None``
2381 No progress bar.
2382 ``'tqdm'``
2383 Use the :func:`tqdm.tqdm` function to print a progress bar
2384 to :data:`sys.stdout`.
2385 ``'tqdm_notebook'``
2386 Use the :func:`tqdm.notebook.tqdm` function to display a
2387 progress bar as a Jupyter notebook widget.
2388 ``'tqdm_gui'``
2389 Use the :func:`tqdm.tqdm_gui` function to display a
2390 progress bar as a graphical dialog box.
2391
2392 .. versionadded:: 1.11.0
2393
2394 create_bqstorage_client (Optional[bool]):
2395 If ``True`` (default), create a BigQuery Storage API client
2396 using the default API settings. The BigQuery Storage API
2397 is a faster way to fetch rows from BigQuery. See the
2398 ``bqstorage_client`` parameter for more information.
2399
2400 This argument does nothing if ``bqstorage_client`` is supplied.
2401
2402 .. versionadded:: 1.24.0
2403
2404 geography_as_object (Optional[bool]):
2405 If ``True``, convert GEOGRAPHY data to :mod:`shapely`
2406 geometry objects. If ``False`` (default), don't cast
2407 geography data to :mod:`shapely` geometry objects.
2408
2409 .. versionadded:: 2.24.0
2410
2411 bool_dtype (Optional[pandas.Series.dtype, None]):
2412 If set, indicate a pandas ExtensionDtype (e.g. ``pandas.BooleanDtype()``)
2413 to convert BigQuery Boolean type, instead of relying on the default
2414 ``pandas.BooleanDtype()``. If you explicitly set the value to ``None``,
2415 then the data type will be ``numpy.dtype("bool")``. BigQuery Boolean
2416 type can be found at:
2417 https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#boolean_type
2418
2419 .. versionadded:: 3.8.0
2420
2421 int_dtype (Optional[pandas.Series.dtype, None]):
2422 If set, indicate a pandas ExtensionDtype (e.g. ``pandas.Int64Dtype()``)
2423 to convert BigQuery Integer types, instead of relying on the default
2424 ``pandas.Int64Dtype()``. If you explicitly set the value to ``None``,
2425 then the data type will be ``numpy.dtype("int64")``. A list of BigQuery
2426 Integer types can be found at:
2427 https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#integer_types
2428
2429 .. versionadded:: 3.8.0
2430
2431 float_dtype (Optional[pandas.Series.dtype, None]):
2432 If set, indicate a pandas ExtensionDtype (e.g. ``pandas.Float32Dtype()``)
2433 to convert BigQuery Float type, instead of relying on the default
2434 ``numpy.dtype("float64")``. If you explicitly set the value to ``None``,
2435 then the data type will be ``numpy.dtype("float64")``. BigQuery Float
2436 type can be found at:
2437 https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#floating_point_types
2438
2439 .. versionadded:: 3.8.0
2440
2441 string_dtype (Optional[pandas.Series.dtype, None]):
2442 If set, indicate a pandas ExtensionDtype (e.g. ``pandas.StringDtype()``) to
2443 convert BigQuery String type, instead of relying on the default
2444 ``numpy.dtype("object")``. If you explicitly set the value to ``None``,
2445 then the data type will be ``numpy.dtype("object")``. BigQuery String
2446 type can be found at:
2447 https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#string_type
2448
2449 .. versionadded:: 3.8.0
2450
2451 date_dtype (Optional[pandas.Series.dtype, None]):
2452 If set, indicate a pandas ExtensionDtype (e.g.
2453 ``pandas.ArrowDtype(pyarrow.date32())``) to convert BigQuery Date
2454 type, instead of relying on the default ``db_dtypes.DateDtype()``.
2455 If you explicitly set the value to ``None``, then the data type will be
2456 ``numpy.dtype("datetime64[ns]")`` or ``object`` if out of bound. BigQuery
2457 Date type can be found at:
2458 https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#date_type
2459
2460 .. versionadded:: 3.10.0
2461
2462 datetime_dtype (Optional[pandas.Series.dtype, None]):
2463 If set, indicate a pandas ExtensionDtype (e.g.
2464 ``pandas.ArrowDtype(pyarrow.timestamp("us"))``) to convert BigQuery Datetime
2465 type, instead of relying on the default ``numpy.dtype("datetime64[ns]``.
2466 If you explicitly set the value to ``None``, then the data type will be
2467 ``numpy.dtype("datetime64[ns]")`` or ``object`` if out of bound. BigQuery
2468 Datetime type can be found at:
2469 https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#datetime_type
2470
2471 .. versionadded:: 3.10.0
2472
2473 time_dtype (Optional[pandas.Series.dtype, None]):
2474 If set, indicate a pandas ExtensionDtype (e.g.
2475 ``pandas.ArrowDtype(pyarrow.time64("us"))``) to convert BigQuery Time
2476 type, instead of relying on the default ``db_dtypes.TimeDtype()``.
2477 If you explicitly set the value to ``None``, then the data type will be
2478 ``numpy.dtype("object")``. BigQuery Time type can be found at:
2479 https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#time_type
2480
2481 .. versionadded:: 3.10.0
2482
2483 timestamp_dtype (Optional[pandas.Series.dtype, None]):
2484 If set, indicate a pandas ExtensionDtype (e.g.
2485 ``pandas.ArrowDtype(pyarrow.timestamp("us", tz="UTC"))``) to convert BigQuery Timestamp
2486 type, instead of relying on the default ``numpy.dtype("datetime64[ns, UTC]")``.
2487 If you explicitly set the value to ``None``, then the data type will be
2488 ``numpy.dtype("datetime64[ns, UTC]")`` or ``object`` if out of bound. BigQuery
2489 Datetime type can be found at:
2490 https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#timestamp_type
2491
2492 .. versionadded:: 3.10.0
2493
2494 range_date_dtype (Optional[pandas.Series.dtype, None]):
2495 If set, indicate a pandas ExtensionDtype, such as:
2496
2497 .. code-block:: python
2498
2499 pandas.ArrowDtype(pyarrow.struct(
2500 [("start", pyarrow.date32()), ("end", pyarrow.date32())]
2501 ))
2502
2503 to convert BigQuery RANGE<DATE> type, instead of relying on
2504 the default ``object``. If you explicitly set the value to
2505 ``None``, the data type will be ``object``. BigQuery Range type
2506 can be found at:
2507 https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#range_type
2508
2509 .. versionadded:: 3.21.0
2510
2511 range_datetime_dtype (Optional[pandas.Series.dtype, None]):
2512 If set, indicate a pandas ExtensionDtype, such as:
2513
2514 .. code-block:: python
2515
2516 pandas.ArrowDtype(pyarrow.struct(
2517 [
2518 ("start", pyarrow.timestamp("us")),
2519 ("end", pyarrow.timestamp("us")),
2520 ]
2521 ))
2522
2523 to convert BigQuery RANGE<DATETIME> type, instead of relying on
2524 the default ``object``. If you explicitly set the value to
2525 ``None``, the data type will be ``object``. BigQuery Range type
2526 can be found at:
2527 https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#range_type
2528
2529 .. versionadded:: 3.21.0
2530
2531 range_timestamp_dtype (Optional[pandas.Series.dtype, None]):
2532 If set, indicate a pandas ExtensionDtype, such as:
2533
2534 .. code-block:: python
2535
2536 pandas.ArrowDtype(pyarrow.struct(
2537 [
2538 ("start", pyarrow.timestamp("us", tz="UTC")),
2539 ("end", pyarrow.timestamp("us", tz="UTC")),
2540 ]
2541 ))
2542
2543 to convert BigQuery RANGE<TIMESTAMP> type, instead of relying
2544 on the default ``object``. If you explicitly set the value to
2545 ``None``, the data type will be ``object``. BigQuery Range type
2546 can be found at:
2547 https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#range_type
2548
2549 .. versionadded:: 3.21.0
2550
2551 Returns:
2552 pandas.DataFrame:
2553 A :class:`~pandas.DataFrame` populated with row data and column
2554 headers from the query results. The column headers are derived
2555 from the destination table's schema.
2556
2557 Raises:
2558 ValueError:
2559 If the :mod:`pandas` library cannot be imported, or
2560 the :mod:`google.cloud.bigquery_storage_v1` module is
2561 required but cannot be imported. Also if
2562 `geography_as_object` is `True`, but the
2563 :mod:`shapely` library cannot be imported. Also if
2564 `bool_dtype`, `int_dtype` or other dtype parameters
2565 is not supported dtype.
2566
2567 """
2568 _pandas_helpers.verify_pandas_imports()
2569
2570 if geography_as_object and shapely is None:
2571 raise ValueError(_NO_SHAPELY_ERROR)
2572
2573 if bool_dtype is DefaultPandasDTypes.BOOL_DTYPE:
2574 bool_dtype = pandas.BooleanDtype()
2575
2576 if int_dtype is DefaultPandasDTypes.INT_DTYPE:
2577 int_dtype = pandas.Int64Dtype()
2578
2579 if time_dtype is DefaultPandasDTypes.TIME_DTYPE:
2580 time_dtype = db_dtypes.TimeDtype()
2581
2582 if range_date_dtype is DefaultPandasDTypes.RANGE_DATE_DTYPE:
2583 if _versions_helpers.SUPPORTS_RANGE_PYARROW:
2584 range_date_dtype = pandas.ArrowDtype(
2585 pyarrow.struct(
2586 [("start", pyarrow.date32()), ("end", pyarrow.date32())]
2587 )
2588 )
2589 else:
2590 warnings.warn(_RANGE_PYARROW_WARNING)
2591 range_date_dtype = None
2592
2593 if range_datetime_dtype is DefaultPandasDTypes.RANGE_DATETIME_DTYPE:
2594 if _versions_helpers.SUPPORTS_RANGE_PYARROW:
2595 range_datetime_dtype = pandas.ArrowDtype(
2596 pyarrow.struct(
2597 [
2598 ("start", pyarrow.timestamp("us")),
2599 ("end", pyarrow.timestamp("us")),
2600 ]
2601 )
2602 )
2603 else:
2604 warnings.warn(_RANGE_PYARROW_WARNING)
2605 range_datetime_dtype = None
2606
2607 if range_timestamp_dtype is DefaultPandasDTypes.RANGE_TIMESTAMP_DTYPE:
2608 if _versions_helpers.SUPPORTS_RANGE_PYARROW:
2609 range_timestamp_dtype = pandas.ArrowDtype(
2610 pyarrow.struct(
2611 [
2612 ("start", pyarrow.timestamp("us", tz="UTC")),
2613 ("end", pyarrow.timestamp("us", tz="UTC")),
2614 ]
2615 )
2616 )
2617 else:
2618 warnings.warn(_RANGE_PYARROW_WARNING)
2619 range_timestamp_dtype = None
2620
2621 if bool_dtype is not None and not hasattr(bool_dtype, "__from_arrow__"):
2622 raise ValueError("bool_dtype", _NO_SUPPORTED_DTYPE)
2623
2624 if int_dtype is not None and not hasattr(int_dtype, "__from_arrow__"):
2625 raise ValueError("int_dtype", _NO_SUPPORTED_DTYPE)
2626
2627 if float_dtype is not None and not hasattr(float_dtype, "__from_arrow__"):
2628 raise ValueError("float_dtype", _NO_SUPPORTED_DTYPE)
2629
2630 if string_dtype is not None and not hasattr(string_dtype, "__from_arrow__"):
2631 raise ValueError("string_dtype", _NO_SUPPORTED_DTYPE)
2632
2633 if (
2634 date_dtype is not None
2635 and date_dtype is not DefaultPandasDTypes.DATE_DTYPE
2636 and not hasattr(date_dtype, "__from_arrow__")
2637 ):
2638 raise ValueError("date_dtype", _NO_SUPPORTED_DTYPE)
2639
2640 if datetime_dtype is not None and not hasattr(datetime_dtype, "__from_arrow__"):
2641 raise ValueError("datetime_dtype", _NO_SUPPORTED_DTYPE)
2642
2643 if time_dtype is not None and not hasattr(time_dtype, "__from_arrow__"):
2644 raise ValueError("time_dtype", _NO_SUPPORTED_DTYPE)
2645
2646 if timestamp_dtype is not None and not hasattr(
2647 timestamp_dtype, "__from_arrow__"
2648 ):
2649 raise ValueError("timestamp_dtype", _NO_SUPPORTED_DTYPE)
2650
2651 if dtypes is None:
2652 dtypes = {}
2653
2654 self._maybe_warn_max_results(bqstorage_client)
2655
2656 if not self._should_use_bqstorage(bqstorage_client, create_bqstorage_client):
2657 create_bqstorage_client = False
2658 bqstorage_client = None
2659
2660 record_batch = self.to_arrow(
2661 progress_bar_type=progress_bar_type,
2662 bqstorage_client=bqstorage_client,
2663 create_bqstorage_client=create_bqstorage_client,
2664 )
2665
2666 # Default date dtype is `db_dtypes.DateDtype()` that could cause out of bounds error,
2667 # when pyarrow converts date values to nanosecond precision. To avoid the error, we
2668 # set the date_as_object parameter to True, if necessary.
2669 date_as_object = False
2670 if date_dtype is DefaultPandasDTypes.DATE_DTYPE:
2671 date_dtype = db_dtypes.DateDtype()
2672 date_as_object = not all(
2673 self.__can_cast_timestamp_ns(col)
2674 for col in record_batch
2675 # Type can be date32 or date64 (plus units).
2676 # See: https://arrow.apache.org/docs/python/api/datatypes.html
2677 if pyarrow.types.is_date(col.type)
2678 )
2679
2680 timestamp_as_object = False
2681 if datetime_dtype is None and timestamp_dtype is None:
2682 timestamp_as_object = not all(
2683 self.__can_cast_timestamp_ns(col)
2684 for col in record_batch
2685 # Type can be datetime and timestamp (plus units and time zone).
2686 # See: https://arrow.apache.org/docs/python/api/datatypes.html
2687 if pyarrow.types.is_timestamp(col.type)
2688 )
2689
2690 df = record_batch.to_pandas(
2691 date_as_object=date_as_object,
2692 timestamp_as_object=timestamp_as_object,
2693 integer_object_nulls=True,
2694 types_mapper=_pandas_helpers.default_types_mapper(
2695 date_as_object=date_as_object,
2696 bool_dtype=bool_dtype,
2697 int_dtype=int_dtype,
2698 float_dtype=float_dtype,
2699 string_dtype=string_dtype,
2700 date_dtype=date_dtype,
2701 datetime_dtype=datetime_dtype,
2702 time_dtype=time_dtype,
2703 timestamp_dtype=timestamp_dtype,
2704 range_date_dtype=range_date_dtype,
2705 range_datetime_dtype=range_datetime_dtype,
2706 range_timestamp_dtype=range_timestamp_dtype,
2707 ),
2708 )
2709
2710 for column in dtypes:
2711 df[column] = pandas.Series(df[column], dtype=dtypes[column], copy=False)
2712
2713 if geography_as_object:
2714 for field in self.schema:
2715 if field.field_type.upper() == "GEOGRAPHY" and field.mode != "REPEATED":
2716 df[field.name] = df[field.name].dropna().apply(_read_wkt)
2717
2718 return df
2719
2720 @staticmethod
2721 def __can_cast_timestamp_ns(column):
2722 try:
2723 column.cast("timestamp[ns]")
2724 except pyarrow.lib.ArrowInvalid:
2725 return False
2726 else:
2727 return True
2728
2729 # If changing the signature of this method, make sure to apply the same
2730 # changes to job.QueryJob.to_geodataframe()
2731 def to_geodataframe(
2732 self,
2733 bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"] = None,
2734 dtypes: Optional[Dict[str, Any]] = None,
2735 progress_bar_type: Optional[str] = None,
2736 create_bqstorage_client: bool = True,
2737 geography_column: Optional[str] = None,
2738 bool_dtype: Union[Any, None] = DefaultPandasDTypes.BOOL_DTYPE,
2739 int_dtype: Union[Any, None] = DefaultPandasDTypes.INT_DTYPE,
2740 float_dtype: Union[Any, None] = None,
2741 string_dtype: Union[Any, None] = None,
2742 ) -> "geopandas.GeoDataFrame":
2743 """Create a GeoPandas GeoDataFrame by loading all pages of a query.
2744
2745 Args:
2746 bqstorage_client (Optional[google.cloud.bigquery_storage_v1.BigQueryReadClient]):
2747 A BigQuery Storage API client. If supplied, use the faster
2748 BigQuery Storage API to fetch rows from BigQuery.
2749
2750 This method requires the ``pyarrow`` and
2751 ``google-cloud-bigquery-storage`` libraries.
2752
2753 This method only exposes a subset of the capabilities of the
2754 BigQuery Storage API. For full access to all features
2755 (projections, filters, snapshots) use the Storage API directly.
2756
2757 dtypes (Optional[Map[str, Union[str, pandas.Series.dtype]]]):
2758 A dictionary of column names pandas ``dtype``s. The provided
2759 ``dtype`` is used when constructing the series for the column
2760 specified. Otherwise, the default pandas behavior is used.
2761 progress_bar_type (Optional[str]):
2762 If set, use the `tqdm <https://tqdm.github.io/>`_ library to
2763 display a progress bar while the data downloads. Install the
2764 ``tqdm`` package to use this feature.
2765
2766 Possible values of ``progress_bar_type`` include:
2767
2768 ``None``
2769 No progress bar.
2770 ``'tqdm'``
2771 Use the :func:`tqdm.tqdm` function to print a progress bar
2772 to :data:`sys.stdout`.
2773 ``'tqdm_notebook'``
2774 Use the :func:`tqdm.notebook.tqdm` function to display a
2775 progress bar as a Jupyter notebook widget.
2776 ``'tqdm_gui'``
2777 Use the :func:`tqdm.tqdm_gui` function to display a
2778 progress bar as a graphical dialog box.
2779
2780 create_bqstorage_client (Optional[bool]):
2781 If ``True`` (default), create a BigQuery Storage API client
2782 using the default API settings. The BigQuery Storage API
2783 is a faster way to fetch rows from BigQuery. See the
2784 ``bqstorage_client`` parameter for more information.
2785
2786 This argument does nothing if ``bqstorage_client`` is supplied.
2787
2788 geography_column (Optional[str]):
2789 If there are more than one GEOGRAPHY column,
2790 identifies which one to use to construct a geopandas
2791 GeoDataFrame. This option can be ommitted if there's
2792 only one GEOGRAPHY column.
2793 bool_dtype (Optional[pandas.Series.dtype, None]):
2794 If set, indicate a pandas ExtensionDtype (e.g. ``pandas.BooleanDtype()``)
2795 to convert BigQuery Boolean type, instead of relying on the default
2796 ``pandas.BooleanDtype()``. If you explicitly set the value to ``None``,
2797 then the data type will be ``numpy.dtype("bool")``. BigQuery Boolean
2798 type can be found at:
2799 https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#boolean_type
2800 int_dtype (Optional[pandas.Series.dtype, None]):
2801 If set, indicate a pandas ExtensionDtype (e.g. ``pandas.Int64Dtype()``)
2802 to convert BigQuery Integer types, instead of relying on the default
2803 ``pandas.Int64Dtype()``. If you explicitly set the value to ``None``,
2804 then the data type will be ``numpy.dtype("int64")``. A list of BigQuery
2805 Integer types can be found at:
2806 https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#integer_types
2807 float_dtype (Optional[pandas.Series.dtype, None]):
2808 If set, indicate a pandas ExtensionDtype (e.g. ``pandas.Float32Dtype()``)
2809 to convert BigQuery Float type, instead of relying on the default
2810 ``numpy.dtype("float64")``. If you explicitly set the value to ``None``,
2811 then the data type will be ``numpy.dtype("float64")``. BigQuery Float
2812 type can be found at:
2813 https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#floating_point_types
2814 string_dtype (Optional[pandas.Series.dtype, None]):
2815 If set, indicate a pandas ExtensionDtype (e.g. ``pandas.StringDtype()``) to
2816 convert BigQuery String type, instead of relying on the default
2817 ``numpy.dtype("object")``. If you explicitly set the value to ``None``,
2818 then the data type will be ``numpy.dtype("object")``. BigQuery String
2819 type can be found at:
2820 https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#string_type
2821
2822 Returns:
2823 geopandas.GeoDataFrame:
2824 A :class:`geopandas.GeoDataFrame` populated with row
2825 data and column headers from the query results. The
2826 column headers are derived from the destination
2827 table's schema.
2828
2829 Raises:
2830 ValueError:
2831 If the :mod:`geopandas` library cannot be imported, or the
2832 :mod:`google.cloud.bigquery_storage_v1` module is
2833 required but cannot be imported.
2834
2835 .. versionadded:: 2.24.0
2836 """
2837 if geopandas is None:
2838 raise ValueError(_NO_GEOPANDAS_ERROR)
2839
2840 geography_columns = set(
2841 field.name
2842 for field in self.schema
2843 if field.field_type.upper() == "GEOGRAPHY"
2844 )
2845 if not geography_columns:
2846 raise TypeError(
2847 "There must be at least one GEOGRAPHY column"
2848 " to create a GeoDataFrame"
2849 )
2850
2851 if geography_column:
2852 if geography_column not in geography_columns:
2853 raise ValueError(
2854 f"The given geography column, {geography_column}, doesn't name"
2855 f" a GEOGRAPHY column in the result."
2856 )
2857 elif len(geography_columns) == 1:
2858 [geography_column] = geography_columns
2859 else:
2860 raise ValueError(
2861 "There is more than one GEOGRAPHY column in the result. "
2862 "The geography_column argument must be used to specify which "
2863 "one to use to create a GeoDataFrame"
2864 )
2865
2866 df = self.to_dataframe(
2867 bqstorage_client,
2868 dtypes,
2869 progress_bar_type,
2870 create_bqstorage_client,
2871 geography_as_object=True,
2872 bool_dtype=bool_dtype,
2873 int_dtype=int_dtype,
2874 float_dtype=float_dtype,
2875 string_dtype=string_dtype,
2876 )
2877
2878 return geopandas.GeoDataFrame(
2879 df, crs=_COORDINATE_REFERENCE_SYSTEM, geometry=geography_column
2880 )
2881
2882
2883class _EmptyRowIterator(RowIterator):
2884 """An empty row iterator.
2885
2886 This class prevents API requests when there are no rows to fetch or rows
2887 are impossible to fetch, such as with query results for DDL CREATE VIEW
2888 statements.
2889 """
2890
2891 schema = ()
2892 pages = ()
2893 total_rows = 0
2894
2895 def __init__(
2896 self, client=None, api_request=None, path=None, schema=(), *args, **kwargs
2897 ):
2898 super().__init__(
2899 client=client,
2900 api_request=api_request,
2901 path=path,
2902 schema=schema,
2903 *args,
2904 **kwargs,
2905 )
2906
2907 def to_arrow(
2908 self,
2909 progress_bar_type=None,
2910 bqstorage_client=None,
2911 create_bqstorage_client=True,
2912 ) -> "pyarrow.Table":
2913 """[Beta] Create an empty class:`pyarrow.Table`.
2914
2915 Args:
2916 progress_bar_type (str): Ignored. Added for compatibility with RowIterator.
2917 bqstorage_client (Any): Ignored. Added for compatibility with RowIterator.
2918 create_bqstorage_client (bool): Ignored. Added for compatibility with RowIterator.
2919
2920 Returns:
2921 pyarrow.Table: An empty :class:`pyarrow.Table`.
2922 """
2923 if pyarrow is None:
2924 raise ValueError(_NO_PYARROW_ERROR)
2925 return pyarrow.Table.from_arrays(())
2926
2927 def to_dataframe(
2928 self,
2929 bqstorage_client=None,
2930 dtypes=None,
2931 progress_bar_type=None,
2932 create_bqstorage_client=True,
2933 geography_as_object=False,
2934 bool_dtype=None,
2935 int_dtype=None,
2936 float_dtype=None,
2937 string_dtype=None,
2938 date_dtype=None,
2939 datetime_dtype=None,
2940 time_dtype=None,
2941 timestamp_dtype=None,
2942 range_date_dtype=None,
2943 range_datetime_dtype=None,
2944 range_timestamp_dtype=None,
2945 ) -> "pandas.DataFrame":
2946 """Create an empty dataframe.
2947
2948 Args:
2949 bqstorage_client (Any): Ignored. Added for compatibility with RowIterator.
2950 dtypes (Any): Ignored. Added for compatibility with RowIterator.
2951 progress_bar_type (Any): Ignored. Added for compatibility with RowIterator.
2952 create_bqstorage_client (bool): Ignored. Added for compatibility with RowIterator.
2953 geography_as_object (bool): Ignored. Added for compatibility with RowIterator.
2954 bool_dtype (Any): Ignored. Added for compatibility with RowIterator.
2955 int_dtype (Any): Ignored. Added for compatibility with RowIterator.
2956 float_dtype (Any): Ignored. Added for compatibility with RowIterator.
2957 string_dtype (Any): Ignored. Added for compatibility with RowIterator.
2958 date_dtype (Any): Ignored. Added for compatibility with RowIterator.
2959 datetime_dtype (Any): Ignored. Added for compatibility with RowIterator.
2960 time_dtype (Any): Ignored. Added for compatibility with RowIterator.
2961 timestamp_dtype (Any): Ignored. Added for compatibility with RowIterator.
2962 range_date_dtype (Any): Ignored. Added for compatibility with RowIterator.
2963 range_datetime_dtype (Any): Ignored. Added for compatibility with RowIterator.
2964 range_timestamp_dtype (Any): Ignored. Added for compatibility with RowIterator.
2965
2966 Returns:
2967 pandas.DataFrame: An empty :class:`~pandas.DataFrame`.
2968 """
2969 _pandas_helpers.verify_pandas_imports()
2970 return pandas.DataFrame()
2971
2972 def to_geodataframe(
2973 self,
2974 bqstorage_client=None,
2975 dtypes=None,
2976 progress_bar_type=None,
2977 create_bqstorage_client=True,
2978 geography_column: Optional[str] = None,
2979 bool_dtype: Union[Any, None] = DefaultPandasDTypes.BOOL_DTYPE,
2980 int_dtype: Union[Any, None] = DefaultPandasDTypes.INT_DTYPE,
2981 float_dtype: Union[Any, None] = None,
2982 string_dtype: Union[Any, None] = None,
2983 ) -> "pandas.DataFrame":
2984 """Create an empty dataframe.
2985
2986 Args:
2987 bqstorage_client (Any): Ignored. Added for compatibility with RowIterator.
2988 dtypes (Any): Ignored. Added for compatibility with RowIterator.
2989 progress_bar_type (Any): Ignored. Added for compatibility with RowIterator.
2990 create_bqstorage_client (bool): Ignored. Added for compatibility with RowIterator.
2991 geography_column (str): Ignored. Added for compatibility with RowIterator.
2992 bool_dtype (Any): Ignored. Added for compatibility with RowIterator.
2993 int_dtype (Any): Ignored. Added for compatibility with RowIterator.
2994 float_dtype (Any): Ignored. Added for compatibility with RowIterator.
2995 string_dtype (Any): Ignored. Added for compatibility with RowIterator.
2996
2997 Returns:
2998 pandas.DataFrame: An empty :class:`~pandas.DataFrame`.
2999 """
3000 if geopandas is None:
3001 raise ValueError(_NO_GEOPANDAS_ERROR)
3002
3003 # Since an empty GeoDataFrame has no geometry column, we do not CRS on it,
3004 # because that's deprecated.
3005 return geopandas.GeoDataFrame()
3006
3007 def to_dataframe_iterable(
3008 self,
3009 bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"] = None,
3010 dtypes: Optional[Dict[str, Any]] = None,
3011 max_queue_size: Optional[int] = None,
3012 max_stream_count: Optional[int] = None,
3013 ) -> Iterator["pandas.DataFrame"]:
3014 """Create an iterable of pandas DataFrames, to process the table as a stream.
3015
3016 .. versionadded:: 2.21.0
3017
3018 Args:
3019 bqstorage_client:
3020 Ignored. Added for compatibility with RowIterator.
3021
3022 dtypes (Optional[Map[str, Union[str, pandas.Series.dtype]]]):
3023 Ignored. Added for compatibility with RowIterator.
3024
3025 max_queue_size:
3026 Ignored. Added for compatibility with RowIterator.
3027
3028 max_stream_count:
3029 Ignored. Added for compatibility with RowIterator.
3030
3031 Returns:
3032 An iterator yielding a single empty :class:`~pandas.DataFrame`.
3033
3034 Raises:
3035 ValueError:
3036 If the :mod:`pandas` library cannot be imported.
3037 """
3038 _pandas_helpers.verify_pandas_imports()
3039 return iter((pandas.DataFrame(),))
3040
3041 def to_arrow_iterable(
3042 self,
3043 bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"] = None,
3044 max_queue_size: Optional[int] = None,
3045 max_stream_count: Optional[int] = None,
3046 ) -> Iterator["pyarrow.RecordBatch"]:
3047 """Create an iterable of pandas DataFrames, to process the table as a stream.
3048
3049 .. versionadded:: 2.31.0
3050
3051 Args:
3052 bqstorage_client:
3053 Ignored. Added for compatibility with RowIterator.
3054
3055 max_queue_size:
3056 Ignored. Added for compatibility with RowIterator.
3057
3058 max_stream_count:
3059 Ignored. Added for compatibility with RowIterator.
3060
3061 Returns:
3062 An iterator yielding a single empty :class:`~pyarrow.RecordBatch`.
3063 """
3064 return iter((pyarrow.record_batch([]),))
3065
3066 def __iter__(self):
3067 return iter(())
3068
3069
3070class PartitionRange(object):
3071 """Definition of the ranges for range partitioning.
3072
3073 .. note::
3074 **Beta**. The integer range partitioning feature is in a pre-release
3075 state and might change or have limited support.
3076
3077 Args:
3078 start (Optional[int]):
3079 Sets the
3080 :attr:`~google.cloud.bigquery.table.PartitionRange.start`
3081 property.
3082 end (Optional[int]):
3083 Sets the
3084 :attr:`~google.cloud.bigquery.table.PartitionRange.end`
3085 property.
3086 interval (Optional[int]):
3087 Sets the
3088 :attr:`~google.cloud.bigquery.table.PartitionRange.interval`
3089 property.
3090 _properties (Optional[dict]):
3091 Private. Used to construct object from API resource.
3092 """
3093
3094 def __init__(self, start=None, end=None, interval=None, _properties=None) -> None:
3095 if _properties is None:
3096 _properties = {}
3097 self._properties = _properties
3098
3099 if start is not None:
3100 self.start = start
3101 if end is not None:
3102 self.end = end
3103 if interval is not None:
3104 self.interval = interval
3105
3106 @property
3107 def start(self):
3108 """int: The start of range partitioning, inclusive."""
3109 return _helpers._int_or_none(self._properties.get("start"))
3110
3111 @start.setter
3112 def start(self, value):
3113 self._properties["start"] = _helpers._str_or_none(value)
3114
3115 @property
3116 def end(self):
3117 """int: The end of range partitioning, exclusive."""
3118 return _helpers._int_or_none(self._properties.get("end"))
3119
3120 @end.setter
3121 def end(self, value):
3122 self._properties["end"] = _helpers._str_or_none(value)
3123
3124 @property
3125 def interval(self):
3126 """int: The width of each interval."""
3127 return _helpers._int_or_none(self._properties.get("interval"))
3128
3129 @interval.setter
3130 def interval(self, value):
3131 self._properties["interval"] = _helpers._str_or_none(value)
3132
3133 def _key(self):
3134 return tuple(sorted(self._properties.items()))
3135
3136 def __eq__(self, other):
3137 if not isinstance(other, PartitionRange):
3138 return NotImplemented
3139 return self._key() == other._key()
3140
3141 def __ne__(self, other):
3142 return not self == other
3143
3144 def __repr__(self):
3145 key_vals = ["{}={}".format(key, val) for key, val in self._key()]
3146 return "PartitionRange({})".format(", ".join(key_vals))
3147
3148
3149class RangePartitioning(object):
3150 """Range-based partitioning configuration for a table.
3151
3152 .. note::
3153 **Beta**. The integer range partitioning feature is in a pre-release
3154 state and might change or have limited support.
3155
3156 Args:
3157 range_ (Optional[google.cloud.bigquery.table.PartitionRange]):
3158 Sets the
3159 :attr:`google.cloud.bigquery.table.RangePartitioning.range_`
3160 property.
3161 field (Optional[str]):
3162 Sets the
3163 :attr:`google.cloud.bigquery.table.RangePartitioning.field`
3164 property.
3165 _properties (Optional[dict]):
3166 Private. Used to construct object from API resource.
3167 """
3168
3169 def __init__(self, range_=None, field=None, _properties=None) -> None:
3170 if _properties is None:
3171 _properties = {}
3172 self._properties: Dict[str, Any] = _properties
3173
3174 if range_ is not None:
3175 self.range_ = range_
3176 if field is not None:
3177 self.field = field
3178
3179 # Trailing underscore to prevent conflict with built-in range() function.
3180 @property
3181 def range_(self):
3182 """google.cloud.bigquery.table.PartitionRange: Defines the
3183 ranges for range partitioning.
3184
3185 Raises:
3186 ValueError:
3187 If the value is not a :class:`PartitionRange`.
3188 """
3189 range_properties = self._properties.setdefault("range", {})
3190 return PartitionRange(_properties=range_properties)
3191
3192 @range_.setter
3193 def range_(self, value):
3194 if not isinstance(value, PartitionRange):
3195 raise ValueError("Expected a PartitionRange, but got {}.".format(value))
3196 self._properties["range"] = value._properties
3197
3198 @property
3199 def field(self):
3200 """str: The table is partitioned by this field.
3201
3202 The field must be a top-level ``NULLABLE`` / ``REQUIRED`` field. The
3203 only supported type is ``INTEGER`` / ``INT64``.
3204 """
3205 return self._properties.get("field")
3206
3207 @field.setter
3208 def field(self, value):
3209 self._properties["field"] = value
3210
3211 def _key(self):
3212 return (("field", self.field), ("range_", self.range_))
3213
3214 def __eq__(self, other):
3215 if not isinstance(other, RangePartitioning):
3216 return NotImplemented
3217 return self._key() == other._key()
3218
3219 def __ne__(self, other):
3220 return not self == other
3221
3222 def __repr__(self):
3223 key_vals = ["{}={}".format(key, repr(val)) for key, val in self._key()]
3224 return "RangePartitioning({})".format(", ".join(key_vals))
3225
3226
3227class TimePartitioningType(object):
3228 """Specifies the type of time partitioning to perform."""
3229
3230 DAY = "DAY"
3231 """str: Generates one partition per day."""
3232
3233 HOUR = "HOUR"
3234 """str: Generates one partition per hour."""
3235
3236 MONTH = "MONTH"
3237 """str: Generates one partition per month."""
3238
3239 YEAR = "YEAR"
3240 """str: Generates one partition per year."""
3241
3242
3243class TimePartitioning(object):
3244 """Configures time-based partitioning for a table.
3245
3246 Args:
3247 type_ (Optional[google.cloud.bigquery.table.TimePartitioningType]):
3248 Specifies the type of time partitioning to perform. Defaults to
3249 :attr:`~google.cloud.bigquery.table.TimePartitioningType.DAY`.
3250
3251 Supported values are:
3252
3253 * :attr:`~google.cloud.bigquery.table.TimePartitioningType.HOUR`
3254 * :attr:`~google.cloud.bigquery.table.TimePartitioningType.DAY`
3255 * :attr:`~google.cloud.bigquery.table.TimePartitioningType.MONTH`
3256 * :attr:`~google.cloud.bigquery.table.TimePartitioningType.YEAR`
3257
3258 field (Optional[str]):
3259 If set, the table is partitioned by this field. If not set, the
3260 table is partitioned by pseudo column ``_PARTITIONTIME``. The field
3261 must be a top-level ``TIMESTAMP``, ``DATETIME``, or ``DATE``
3262 field. Its mode must be ``NULLABLE`` or ``REQUIRED``.
3263
3264 See the `time-unit column-partitioned tables guide
3265 <https://cloud.google.com/bigquery/docs/creating-column-partitions>`_
3266 in the BigQuery documentation.
3267 expiration_ms(Optional[int]):
3268 Number of milliseconds for which to keep the storage for a
3269 partition.
3270 require_partition_filter (Optional[bool]):
3271 DEPRECATED: Use
3272 :attr:`~google.cloud.bigquery.table.Table.require_partition_filter`,
3273 instead.
3274 """
3275
3276 def __init__(
3277 self, type_=None, field=None, expiration_ms=None, require_partition_filter=None
3278 ) -> None:
3279 self._properties: Dict[str, Any] = {}
3280 if type_ is None:
3281 self.type_ = TimePartitioningType.DAY
3282 else:
3283 self.type_ = type_
3284 if field is not None:
3285 self.field = field
3286 if expiration_ms is not None:
3287 self.expiration_ms = expiration_ms
3288 if require_partition_filter is not None:
3289 self.require_partition_filter = require_partition_filter
3290
3291 @property
3292 def type_(self):
3293 """google.cloud.bigquery.table.TimePartitioningType: The type of time
3294 partitioning to use.
3295 """
3296 return self._properties.get("type")
3297
3298 @type_.setter
3299 def type_(self, value):
3300 self._properties["type"] = value
3301
3302 @property
3303 def field(self):
3304 """str: Field in the table to use for partitioning"""
3305 return self._properties.get("field")
3306
3307 @field.setter
3308 def field(self, value):
3309 self._properties["field"] = value
3310
3311 @property
3312 def expiration_ms(self):
3313 """int: Number of milliseconds to keep the storage for a partition."""
3314 return _helpers._int_or_none(self._properties.get("expirationMs"))
3315
3316 @expiration_ms.setter
3317 def expiration_ms(self, value):
3318 if value is not None:
3319 # Allow explicitly setting the expiration to None.
3320 value = str(value)
3321 self._properties["expirationMs"] = value
3322
3323 @property
3324 def require_partition_filter(self):
3325 """bool: Specifies whether partition filters are required for queries
3326
3327 DEPRECATED: Use
3328 :attr:`~google.cloud.bigquery.table.Table.require_partition_filter`,
3329 instead.
3330 """
3331 warnings.warn(
3332 (
3333 "TimePartitioning.require_partition_filter will be removed in "
3334 "future versions. Please use Table.require_partition_filter "
3335 "instead."
3336 ),
3337 PendingDeprecationWarning,
3338 stacklevel=2,
3339 )
3340 return self._properties.get("requirePartitionFilter")
3341
3342 @require_partition_filter.setter
3343 def require_partition_filter(self, value):
3344 warnings.warn(
3345 (
3346 "TimePartitioning.require_partition_filter will be removed in "
3347 "future versions. Please use Table.require_partition_filter "
3348 "instead."
3349 ),
3350 PendingDeprecationWarning,
3351 stacklevel=2,
3352 )
3353 self._properties["requirePartitionFilter"] = value
3354
3355 @classmethod
3356 def from_api_repr(cls, api_repr: dict) -> "TimePartitioning":
3357 """Return a :class:`TimePartitioning` object deserialized from a dict.
3358
3359 This method creates a new ``TimePartitioning`` instance that points to
3360 the ``api_repr`` parameter as its internal properties dict. This means
3361 that when a ``TimePartitioning`` instance is stored as a property of
3362 another object, any changes made at the higher level will also appear
3363 here::
3364
3365 >>> time_partitioning = TimePartitioning()
3366 >>> table.time_partitioning = time_partitioning
3367 >>> table.time_partitioning.field = 'timecolumn'
3368 >>> time_partitioning.field
3369 'timecolumn'
3370
3371 Args:
3372 api_repr (Mapping[str, str]):
3373 The serialized representation of the TimePartitioning, such as
3374 what is output by :meth:`to_api_repr`.
3375
3376 Returns:
3377 google.cloud.bigquery.table.TimePartitioning:
3378 The ``TimePartitioning`` object.
3379 """
3380 instance = cls()
3381 instance._properties = api_repr
3382 return instance
3383
3384 def to_api_repr(self) -> dict:
3385 """Return a dictionary representing this object.
3386
3387 This method returns the properties dict of the ``TimePartitioning``
3388 instance rather than making a copy. This means that when a
3389 ``TimePartitioning`` instance is stored as a property of another
3390 object, any changes made at the higher level will also appear here.
3391
3392 Returns:
3393 dict:
3394 A dictionary representing the TimePartitioning object in
3395 serialized form.
3396 """
3397 return self._properties
3398
3399 def _key(self):
3400 # because we are only "renaming" top level keys shallow copy is sufficient here.
3401 properties = self._properties.copy()
3402 # calling repr for non built-in type objects.
3403 properties["type_"] = repr(properties.pop("type"))
3404 if "field" in properties:
3405 # calling repr for non built-in type objects.
3406 properties["field"] = repr(properties["field"])
3407 if "requirePartitionFilter" in properties:
3408 properties["require_partition_filter"] = properties.pop(
3409 "requirePartitionFilter"
3410 )
3411 if "expirationMs" in properties:
3412 properties["expiration_ms"] = properties.pop("expirationMs")
3413 return tuple(sorted(properties.items()))
3414
3415 def __eq__(self, other):
3416 if not isinstance(other, TimePartitioning):
3417 return NotImplemented
3418 return self._key() == other._key()
3419
3420 def __ne__(self, other):
3421 return not self == other
3422
3423 def __hash__(self):
3424 return hash(self._key())
3425
3426 def __repr__(self):
3427 key_vals = ["{}={}".format(key, val) for key, val in self._key()]
3428 return "TimePartitioning({})".format(",".join(key_vals))
3429
3430
3431class PrimaryKey:
3432 """Represents the primary key constraint on a table's columns.
3433
3434 Args:
3435 columns: The columns that are composed of the primary key constraint.
3436 """
3437
3438 def __init__(self, columns: List[str]):
3439 self.columns = columns
3440
3441 def __eq__(self, other):
3442 if not isinstance(other, PrimaryKey):
3443 raise TypeError("The value provided is not a BigQuery PrimaryKey.")
3444 return self.columns == other.columns
3445
3446
3447class ColumnReference:
3448 """The pair of the foreign key column and primary key column.
3449
3450 Args:
3451 referencing_column: The column that composes the foreign key.
3452 referenced_column: The column in the primary key that are referenced by the referencingColumn.
3453 """
3454
3455 def __init__(self, referencing_column: str, referenced_column: str):
3456 self.referencing_column = referencing_column
3457 self.referenced_column = referenced_column
3458
3459 def __eq__(self, other):
3460 if not isinstance(other, ColumnReference):
3461 raise TypeError("The value provided is not a BigQuery ColumnReference.")
3462 return (
3463 self.referencing_column == other.referencing_column
3464 and self.referenced_column == other.referenced_column
3465 )
3466
3467
3468class ForeignKey:
3469 """Represents a foreign key constraint on a table's columns.
3470
3471 Args:
3472 name: Set only if the foreign key constraint is named.
3473 referenced_table: The table that holds the primary key and is referenced by this foreign key.
3474 column_references: The columns that compose the foreign key.
3475 """
3476
3477 def __init__(
3478 self,
3479 name: str,
3480 referenced_table: TableReference,
3481 column_references: List[ColumnReference],
3482 ):
3483 self.name = name
3484 self.referenced_table = referenced_table
3485 self.column_references = column_references
3486
3487 def __eq__(self, other):
3488 if not isinstance(other, ForeignKey):
3489 raise TypeError("The value provided is not a BigQuery ForeignKey.")
3490 return (
3491 self.name == other.name
3492 and self.referenced_table == other.referenced_table
3493 and self.column_references == other.column_references
3494 )
3495
3496 @classmethod
3497 def from_api_repr(cls, api_repr: Dict[str, Any]) -> "ForeignKey":
3498 """Create an instance from API representation."""
3499 return cls(
3500 name=api_repr["name"],
3501 referenced_table=TableReference.from_api_repr(api_repr["referencedTable"]),
3502 column_references=[
3503 ColumnReference(
3504 column_reference_resource["referencingColumn"],
3505 column_reference_resource["referencedColumn"],
3506 )
3507 for column_reference_resource in api_repr["columnReferences"]
3508 ],
3509 )
3510
3511 def to_api_repr(self) -> Dict[str, Any]:
3512 """Return a dictionary representing this object."""
3513 return {
3514 "name": self.name,
3515 "referencedTable": self.referenced_table.to_api_repr(),
3516 "columnReferences": [
3517 {
3518 "referencingColumn": column_reference.referencing_column,
3519 "referencedColumn": column_reference.referenced_column,
3520 }
3521 for column_reference in self.column_references
3522 ],
3523 }
3524
3525
3526class TableConstraints:
3527 """The TableConstraints defines the primary key and foreign key.
3528
3529 Args:
3530 primary_key:
3531 Represents a primary key constraint on a table's columns. Present only if the table
3532 has a primary key. The primary key is not enforced.
3533 foreign_keys:
3534 Present only if the table has a foreign key. The foreign key is not enforced.
3535
3536 """
3537
3538 def __init__(
3539 self,
3540 primary_key: Optional[PrimaryKey],
3541 foreign_keys: Optional[List[ForeignKey]],
3542 ):
3543 self.primary_key = primary_key
3544 self.foreign_keys = foreign_keys
3545
3546 def __eq__(self, other):
3547 if not isinstance(other, TableConstraints) and other is not None:
3548 raise TypeError("The value provided is not a BigQuery TableConstraints.")
3549 return (
3550 self.primary_key == other.primary_key if other.primary_key else None
3551 ) and (self.foreign_keys == other.foreign_keys if other.foreign_keys else None)
3552
3553 @classmethod
3554 def from_api_repr(cls, resource: Dict[str, Any]) -> "TableConstraints":
3555 """Create an instance from API representation."""
3556 primary_key = None
3557 if "primaryKey" in resource:
3558 primary_key = PrimaryKey(resource["primaryKey"]["columns"])
3559
3560 foreign_keys = None
3561 if "foreignKeys" in resource:
3562 foreign_keys = [
3563 ForeignKey.from_api_repr(foreign_key_resource)
3564 for foreign_key_resource in resource["foreignKeys"]
3565 ]
3566 return cls(primary_key, foreign_keys)
3567
3568 def to_api_repr(self) -> Dict[str, Any]:
3569 """Return a dictionary representing this object."""
3570 resource: Dict[str, Any] = {}
3571 if self.primary_key:
3572 resource["primaryKey"] = {"columns": self.primary_key.columns}
3573 if self.foreign_keys:
3574 resource["foreignKeys"] = [
3575 foreign_key.to_api_repr() for foreign_key in self.foreign_keys
3576 ]
3577 return resource
3578
3579
3580class BigLakeConfiguration(object):
3581 """Configuration for managed tables for Apache Iceberg, formerly
3582 known as BigLake.
3583
3584 Args:
3585 connection_id (Optional[str]):
3586 The connection specifying the credentials to be used to read and write to external
3587 storage, such as Cloud Storage. The connection_id can have the form
3588 ``{project}.{location}.{connection_id}`` or
3589 ``projects/{project}/locations/{location}/connections/{connection_id}``.
3590 storage_uri (Optional[str]):
3591 The fully qualified location prefix of the external folder where table data is
3592 stored. The '*' wildcard character is not allowed. The URI should be in the
3593 format ``gs://bucket/path_to_table/``.
3594 file_format (Optional[str]):
3595 The file format the table data is stored in. See BigLakeFileFormat for available
3596 values.
3597 table_format (Optional[str]):
3598 The table format the metadata only snapshots are stored in. See BigLakeTableFormat
3599 for available values.
3600 _properties (Optional[dict]):
3601 Private. Used to construct object from API resource.
3602 """
3603
3604 def __init__(
3605 self,
3606 connection_id: Optional[str] = None,
3607 storage_uri: Optional[str] = None,
3608 file_format: Optional[str] = None,
3609 table_format: Optional[str] = None,
3610 _properties: Optional[dict] = None,
3611 ) -> None:
3612 if _properties is None:
3613 _properties = {}
3614 self._properties = _properties
3615 if connection_id is not None:
3616 self.connection_id = connection_id
3617 if storage_uri is not None:
3618 self.storage_uri = storage_uri
3619 if file_format is not None:
3620 self.file_format = file_format
3621 if table_format is not None:
3622 self.table_format = table_format
3623
3624 @property
3625 def connection_id(self) -> Optional[str]:
3626 """str: The connection specifying the credentials to be used to read and write to external
3627 storage, such as Cloud Storage."""
3628 return self._properties.get("connectionId")
3629
3630 @connection_id.setter
3631 def connection_id(self, value: Optional[str]):
3632 self._properties["connectionId"] = value
3633
3634 @property
3635 def storage_uri(self) -> Optional[str]:
3636 """str: The fully qualified location prefix of the external folder where table data is
3637 stored."""
3638 return self._properties.get("storageUri")
3639
3640 @storage_uri.setter
3641 def storage_uri(self, value: Optional[str]):
3642 self._properties["storageUri"] = value
3643
3644 @property
3645 def file_format(self) -> Optional[str]:
3646 """str: The file format the table data is stored in. See BigLakeFileFormat for available
3647 values."""
3648 return self._properties.get("fileFormat")
3649
3650 @file_format.setter
3651 def file_format(self, value: Optional[str]):
3652 self._properties["fileFormat"] = value
3653
3654 @property
3655 def table_format(self) -> Optional[str]:
3656 """str: The table format the metadata only snapshots are stored in. See BigLakeTableFormat
3657 for available values."""
3658 return self._properties.get("tableFormat")
3659
3660 @table_format.setter
3661 def table_format(self, value: Optional[str]):
3662 self._properties["tableFormat"] = value
3663
3664 def _key(self):
3665 return tuple(sorted(self._properties.items()))
3666
3667 def __eq__(self, other):
3668 if not isinstance(other, BigLakeConfiguration):
3669 return NotImplemented
3670 return self._key() == other._key()
3671
3672 def __ne__(self, other):
3673 return not self == other
3674
3675 def __hash__(self):
3676 return hash(self._key())
3677
3678 def __repr__(self):
3679 key_vals = ["{}={}".format(key, val) for key, val in self._key()]
3680 return "BigLakeConfiguration({})".format(",".join(key_vals))
3681
3682 @classmethod
3683 def from_api_repr(cls, resource: Dict[str, Any]) -> "BigLakeConfiguration":
3684 """Factory: construct a BigLakeConfiguration given its API representation.
3685
3686 Args:
3687 resource:
3688 BigLakeConfiguration representation returned from the API
3689
3690 Returns:
3691 BigLakeConfiguration parsed from ``resource``.
3692 """
3693 ref = cls()
3694 ref._properties = resource
3695 return ref
3696
3697 def to_api_repr(self) -> Dict[str, Any]:
3698 """Construct the API resource representation of this BigLakeConfiguration.
3699
3700 Returns:
3701 BigLakeConfiguration represented as an API resource.
3702 """
3703 return copy.deepcopy(self._properties)
3704
3705
3706def _item_to_row(iterator, resource):
3707 """Convert a JSON row to the native object.
3708
3709 .. note::
3710
3711 This assumes that the ``schema`` attribute has been
3712 added to the iterator after being created, which
3713 should be done by the caller.
3714
3715 Args:
3716 iterator (google.api_core.page_iterator.Iterator): The iterator that is currently in use.
3717 resource (Dict): An item to be converted to a row.
3718
3719 Returns:
3720 google.cloud.bigquery.table.Row: The next row in the page.
3721 """
3722 return Row(
3723 _helpers._row_tuple_from_json(resource, iterator.schema),
3724 iterator._field_to_index,
3725 )
3726
3727
3728def _row_iterator_page_columns(schema, response):
3729 """Make a generator of all the columns in a page from tabledata.list.
3730
3731 This enables creating a :class:`pandas.DataFrame` and other
3732 column-oriented data structures such as :class:`pyarrow.RecordBatch`
3733 """
3734 columns = []
3735 rows = response.get("rows", [])
3736
3737 def get_column_data(field_index, field):
3738 for row in rows:
3739 yield _helpers.DATA_FRAME_CELL_DATA_PARSER.to_py(
3740 row["f"][field_index]["v"], field
3741 )
3742
3743 for field_index, field in enumerate(schema):
3744 columns.append(get_column_data(field_index, field))
3745
3746 return columns
3747
3748
3749# pylint: disable=unused-argument
3750def _rows_page_start(iterator, page, response):
3751 """Grab total rows when :class:`~google.cloud.iterator.Page` starts.
3752
3753 Args:
3754 iterator (google.api_core.page_iterator.Iterator): The iterator that is currently in use.
3755 page (google.api_core.page_iterator.Page): The page that was just created.
3756 response (Dict): The JSON API response for a page of rows in a table.
3757 """
3758 # Make a (lazy) copy of the page in column-oriented format for use in data
3759 # science packages.
3760 page._columns = _row_iterator_page_columns(iterator._schema, response)
3761
3762 total_rows = response.get("totalRows")
3763 # Don't reset total_rows if it's not present in the next API response.
3764 if total_rows is not None:
3765 iterator._total_rows = int(total_rows)
3766
3767
3768# pylint: enable=unused-argument
3769
3770
3771def _table_arg_to_table_ref(value, default_project=None) -> TableReference:
3772 """Helper to convert a string or Table to TableReference.
3773
3774 This function keeps TableReference and other kinds of objects unchanged.
3775 """
3776 if isinstance(value, str):
3777 value = TableReference.from_string(value, default_project=default_project)
3778 if isinstance(value, (Table, TableListItem)):
3779 value = value.reference
3780 return value
3781
3782
3783def _table_arg_to_table(value, default_project=None) -> Table:
3784 """Helper to convert a string or TableReference to a Table.
3785
3786 This function keeps Table and other kinds of objects unchanged.
3787 """
3788 if isinstance(value, str):
3789 value = TableReference.from_string(value, default_project=default_project)
3790 if isinstance(value, TableReference):
3791 value = Table(value)
3792 if isinstance(value, TableListItem):
3793 newvalue = Table(value.reference)
3794 newvalue._properties = value._properties
3795 value = newvalue
3796
3797 return value