Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/google/cloud/bigquery/dataset.py: 48%
347 statements
« prev ^ index » next coverage.py v7.2.2, created at 2023-03-26 06:07 +0000
« prev ^ index » next coverage.py v7.2.2, created at 2023-03-26 06:07 +0000
1# Copyright 2015 Google LLC
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
15"""Define API Datasets."""
17from __future__ import absolute_import
19import copy
21import typing
23import google.cloud._helpers # type: ignore
25from google.cloud.bigquery import _helpers
26from google.cloud.bigquery.model import ModelReference
27from google.cloud.bigquery.routine import Routine, RoutineReference
28from google.cloud.bigquery.table import Table, TableReference
29from google.cloud.bigquery.encryption_configuration import EncryptionConfiguration
31from typing import Optional, List, Dict, Any, Union
34def _get_table_reference(self, table_id: str) -> TableReference:
35 """Constructs a TableReference.
37 Args:
38 table_id (str): The ID of the table.
40 Returns:
41 google.cloud.bigquery.table.TableReference:
42 A table reference for a table in this dataset.
43 """
44 return TableReference(self, table_id)
47def _get_model_reference(self, model_id):
48 """Constructs a ModelReference.
50 Args:
51 model_id (str): the ID of the model.
53 Returns:
54 google.cloud.bigquery.model.ModelReference:
55 A ModelReference for a model in this dataset.
56 """
57 return ModelReference.from_api_repr(
58 {"projectId": self.project, "datasetId": self.dataset_id, "modelId": model_id}
59 )
62def _get_routine_reference(self, routine_id):
63 """Constructs a RoutineReference.
65 Args:
66 routine_id (str): the ID of the routine.
68 Returns:
69 google.cloud.bigquery.routine.RoutineReference:
70 A RoutineReference for a routine in this dataset.
71 """
72 return RoutineReference.from_api_repr(
73 {
74 "projectId": self.project,
75 "datasetId": self.dataset_id,
76 "routineId": routine_id,
77 }
78 )
81class DatasetReference(object):
82 """DatasetReferences are pointers to datasets.
84 See
85 https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets#datasetreference
87 Args:
88 project (str): The ID of the project
89 dataset_id (str): The ID of the dataset
91 Raises:
92 ValueError: If either argument is not of type ``str``.
93 """
95 def __init__(self, project, dataset_id):
96 if not isinstance(project, str):
97 raise ValueError("Pass a string for project")
98 if not isinstance(dataset_id, str):
99 raise ValueError("Pass a string for dataset_id")
100 self._project = project
101 self._dataset_id = dataset_id
103 @property
104 def project(self):
105 """str: Project ID of the dataset."""
106 return self._project
108 @property
109 def dataset_id(self):
110 """str: Dataset ID."""
111 return self._dataset_id
113 @property
114 def path(self):
115 """str: URL path for the dataset based on project and dataset ID."""
116 return "/projects/%s/datasets/%s" % (self.project, self.dataset_id)
118 table = _get_table_reference
120 model = _get_model_reference
122 routine = _get_routine_reference
124 @classmethod
125 def from_api_repr(cls, resource: dict) -> "DatasetReference":
126 """Factory: construct a dataset reference given its API representation
128 Args:
129 resource (Dict[str, str]):
130 Dataset reference resource representation returned from the API
132 Returns:
133 google.cloud.bigquery.dataset.DatasetReference:
134 Dataset reference parsed from ``resource``.
135 """
136 project = resource["projectId"]
137 dataset_id = resource["datasetId"]
138 return cls(project, dataset_id)
140 @classmethod
141 def from_string(
142 cls, dataset_id: str, default_project: str = None
143 ) -> "DatasetReference":
144 """Construct a dataset reference from dataset ID string.
146 Args:
147 dataset_id (str):
148 A dataset ID in standard SQL format. If ``default_project``
149 is not specified, this must include both the project ID and
150 the dataset ID, separated by ``.``.
151 default_project (Optional[str]):
152 The project ID to use when ``dataset_id`` does not include a
153 project ID.
155 Returns:
156 DatasetReference:
157 Dataset reference parsed from ``dataset_id``.
159 Examples:
160 >>> DatasetReference.from_string('my-project-id.some_dataset')
161 DatasetReference('my-project-id', 'some_dataset')
163 Raises:
164 ValueError:
165 If ``dataset_id`` is not a fully-qualified dataset ID in
166 standard SQL format.
167 """
168 output_dataset_id = dataset_id
169 output_project_id = default_project
170 parts = _helpers._split_id(dataset_id)
172 if len(parts) == 1 and not default_project:
173 raise ValueError(
174 "When default_project is not set, dataset_id must be a "
175 "fully-qualified dataset ID in standard SQL format, "
176 'e.g., "project.dataset_id" got {}'.format(dataset_id)
177 )
178 elif len(parts) == 2:
179 output_project_id, output_dataset_id = parts
180 elif len(parts) > 2:
181 raise ValueError(
182 "Too many parts in dataset_id. Expected a fully-qualified "
183 "dataset ID in standard SQL format. e.g. "
184 '"project.dataset_id", got {}'.format(dataset_id)
185 )
187 return cls(output_project_id, output_dataset_id)
189 def to_api_repr(self) -> dict:
190 """Construct the API resource representation of this dataset reference
192 Returns:
193 Dict[str, str]: dataset reference represented as an API resource
194 """
195 return {"projectId": self._project, "datasetId": self._dataset_id}
197 def _key(self):
198 """A tuple key that uniquely describes this field.
200 Used to compute this instance's hashcode and evaluate equality.
202 Returns:
203 Tuple[str]: The contents of this :class:`.DatasetReference`.
204 """
205 return (self._project, self._dataset_id)
207 def __eq__(self, other):
208 if not isinstance(other, DatasetReference):
209 return NotImplemented
210 return self._key() == other._key()
212 def __ne__(self, other):
213 return not self == other
215 def __hash__(self):
216 return hash(self._key())
218 def __str__(self):
219 return f"{self.project}.{self._dataset_id}"
221 def __repr__(self):
222 return "DatasetReference{}".format(self._key())
225class AccessEntry(object):
226 """Represents grant of an access role to an entity.
228 An entry must have exactly one of the allowed
229 :class:`google.cloud.bigquery.enums.EntityTypes`. If anything but ``view``, ``routine``,
230 or ``dataset`` are set, a ``role`` is also required. ``role`` is omitted for ``view``,
231 ``routine``, ``dataset``, because they are always read-only.
233 See https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets.
235 Args:
236 role:
237 Role granted to the entity. The following string values are
238 supported: `'READER'`, `'WRITER'`, `'OWNER'`. It may also be
239 :data:`None` if the ``entity_type`` is ``view``, ``routine``, or ``dataset``.
241 entity_type:
242 Type of entity being granted the role. See
243 :class:`google.cloud.bigquery.enums.EntityTypes` for supported types.
245 entity_id:
246 If the ``entity_type`` is not 'view', 'routine', or 'dataset', the
247 ``entity_id`` is the ``str`` ID of the entity being granted the role. If
248 the ``entity_type`` is 'view' or 'routine', the ``entity_id`` is a ``dict``
249 representing the view or routine from a different dataset to grant access
250 to in the following format for views::
252 {
253 'projectId': string,
254 'datasetId': string,
255 'tableId': string
256 }
258 For routines::
260 {
261 'projectId': string,
262 'datasetId': string,
263 'routineId': string
264 }
266 If the ``entity_type`` is 'dataset', the ``entity_id`` is a ``dict`` that includes
267 a 'dataset' field with a ``dict`` representing the dataset and a 'target_types'
268 field with a ``str`` value of the dataset's resource type::
270 {
271 'dataset': {
272 'projectId': string,
273 'datasetId': string,
274 },
275 'target_types: 'VIEWS'
276 }
278 Raises:
279 ValueError:
280 If a ``view``, ``routine``, or ``dataset`` has ``role`` set, or a non ``view``,
281 non ``routine``, and non ``dataset`` **does not** have a ``role`` set.
283 Examples:
284 >>> entry = AccessEntry('OWNER', 'userByEmail', 'user@example.com')
286 >>> view = {
287 ... 'projectId': 'my-project',
288 ... 'datasetId': 'my_dataset',
289 ... 'tableId': 'my_table'
290 ... }
291 >>> entry = AccessEntry(None, 'view', view)
292 """
294 def __init__(
295 self,
296 role: Optional[str] = None,
297 entity_type: Optional[str] = None,
298 entity_id: Optional[Union[Dict[str, Any], str]] = None,
299 ):
300 self._properties = {}
301 if entity_type is not None:
302 self._properties[entity_type] = entity_id
303 self._properties["role"] = role
304 self._entity_type = entity_type
306 @property
307 def role(self) -> Optional[str]:
308 """The role of the entry."""
309 return typing.cast(Optional[str], self._properties.get("role"))
311 @role.setter
312 def role(self, value):
313 self._properties["role"] = value
315 @property
316 def dataset(self) -> Optional[DatasetReference]:
317 """API resource representation of a dataset reference."""
318 value = _helpers._get_sub_prop(self._properties, ["dataset", "dataset"])
319 return DatasetReference.from_api_repr(value) if value else None
321 @dataset.setter
322 def dataset(self, value):
323 if self.role is not None:
324 raise ValueError(
325 "Role must be None for a dataset. Current " "role: %r" % (self.role)
326 )
328 if isinstance(value, str):
329 value = DatasetReference.from_string(value).to_api_repr()
331 if isinstance(value, (Dataset, DatasetListItem)):
332 value = value.reference.to_api_repr()
334 _helpers._set_sub_prop(self._properties, ["dataset", "dataset"], value)
335 _helpers._set_sub_prop(
336 self._properties,
337 ["dataset", "targetTypes"],
338 self._properties.get("targetTypes"),
339 )
341 @property
342 def dataset_target_types(self) -> Optional[List[str]]:
343 """Which resources that the dataset in this entry applies to."""
344 return typing.cast(
345 Optional[List[str]],
346 _helpers._get_sub_prop(self._properties, ["dataset", "targetTypes"]),
347 )
349 @dataset_target_types.setter
350 def dataset_target_types(self, value):
351 self._properties.setdefault("dataset", {})
352 _helpers._set_sub_prop(self._properties, ["dataset", "targetTypes"], value)
354 @property
355 def routine(self) -> Optional[RoutineReference]:
356 """API resource representation of a routine reference."""
357 value = typing.cast(Optional[Dict], self._properties.get("routine"))
358 return RoutineReference.from_api_repr(value) if value else None
360 @routine.setter
361 def routine(self, value):
362 if self.role is not None:
363 raise ValueError(
364 "Role must be None for a routine. Current " "role: %r" % (self.role)
365 )
367 if isinstance(value, str):
368 value = RoutineReference.from_string(value).to_api_repr()
370 if isinstance(value, RoutineReference):
371 value = value.to_api_repr()
373 if isinstance(value, Routine):
374 value = value.reference.to_api_repr()
376 self._properties["routine"] = value
378 @property
379 def view(self) -> Optional[TableReference]:
380 """API resource representation of a view reference."""
381 value = typing.cast(Optional[Dict], self._properties.get("view"))
382 return TableReference.from_api_repr(value) if value else None
384 @view.setter
385 def view(self, value):
386 if self.role is not None:
387 raise ValueError(
388 "Role must be None for a view. Current " "role: %r" % (self.role)
389 )
391 if isinstance(value, str):
392 value = TableReference.from_string(value).to_api_repr()
394 if isinstance(value, TableReference):
395 value = value.to_api_repr()
397 if isinstance(value, Table):
398 value = value.reference.to_api_repr()
400 self._properties["view"] = value
402 @property
403 def group_by_email(self) -> Optional[str]:
404 """An email address of a Google Group to grant access to."""
405 return typing.cast(Optional[str], self._properties.get("groupByEmail"))
407 @group_by_email.setter
408 def group_by_email(self, value):
409 self._properties["groupByEmail"] = value
411 @property
412 def user_by_email(self) -> Optional[str]:
413 """An email address of a user to grant access to."""
414 return typing.cast(Optional[str], self._properties.get("userByEmail"))
416 @user_by_email.setter
417 def user_by_email(self, value):
418 self._properties["userByEmail"] = value
420 @property
421 def domain(self) -> Optional[str]:
422 """A domain to grant access to."""
423 return typing.cast(Optional[str], self._properties.get("domain"))
425 @domain.setter
426 def domain(self, value):
427 self._properties["domain"] = value
429 @property
430 def special_group(self) -> Optional[str]:
431 """A special group to grant access to."""
432 return typing.cast(Optional[str], self._properties.get("specialGroup"))
434 @special_group.setter
435 def special_group(self, value):
436 self._properties["specialGroup"] = value
438 @property
439 def entity_type(self) -> Optional[str]:
440 """The entity_type of the entry."""
441 return self._entity_type
443 @property
444 def entity_id(self) -> Optional[Union[Dict[str, Any], str]]:
445 """The entity_id of the entry."""
446 return self._properties.get(self._entity_type) if self._entity_type else None
448 def __eq__(self, other):
449 if not isinstance(other, AccessEntry):
450 return NotImplemented
451 return self._key() == other._key()
453 def __ne__(self, other):
454 return not self == other
456 def __repr__(self):
458 return f"<AccessEntry: role={self.role}, {self._entity_type}={self.entity_id}>"
460 def _key(self):
461 """A tuple key that uniquely describes this field.
462 Used to compute this instance's hashcode and evaluate equality.
463 Returns:
464 Tuple: The contents of this :class:`~google.cloud.bigquery.dataset.AccessEntry`.
465 """
466 properties = self._properties.copy()
467 prop_tup = tuple(sorted(properties.items()))
468 return (self.role, self._entity_type, self.entity_id, prop_tup)
470 def __hash__(self):
471 return hash(self._key())
473 def to_api_repr(self):
474 """Construct the API resource representation of this access entry
476 Returns:
477 Dict[str, object]: Access entry represented as an API resource
478 """
479 resource = copy.deepcopy(self._properties)
480 return resource
482 @classmethod
483 def from_api_repr(cls, resource: dict) -> "AccessEntry":
484 """Factory: construct an access entry given its API representation
486 Args:
487 resource (Dict[str, object]):
488 Access entry resource representation returned from the API
490 Returns:
491 google.cloud.bigquery.dataset.AccessEntry:
492 Access entry parsed from ``resource``.
494 Raises:
495 ValueError:
496 If the resource has more keys than ``role`` and one additional
497 key.
498 """
499 entry = resource.copy()
500 role = entry.pop("role", None)
501 entity_type, entity_id = entry.popitem()
502 if len(entry) != 0:
503 raise ValueError("Entry has unexpected keys remaining.", entry)
505 config = cls(role, entity_type, entity_id)
506 config._properties = copy.deepcopy(resource)
507 return config
510class Dataset(object):
511 """Datasets are containers for tables.
513 See
514 https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets#resource-dataset
516 Args:
517 dataset_ref (Union[google.cloud.bigquery.dataset.DatasetReference, str]):
518 A pointer to a dataset. If ``dataset_ref`` is a string, it must
519 include both the project ID and the dataset ID, separated by
520 ``.``.
521 """
523 _PROPERTY_TO_API_FIELD = {
524 "access_entries": "access",
525 "created": "creationTime",
526 "default_partition_expiration_ms": "defaultPartitionExpirationMs",
527 "default_table_expiration_ms": "defaultTableExpirationMs",
528 "friendly_name": "friendlyName",
529 "default_encryption_configuration": "defaultEncryptionConfiguration",
530 }
532 def __init__(self, dataset_ref) -> None:
533 if isinstance(dataset_ref, str):
534 dataset_ref = DatasetReference.from_string(dataset_ref)
535 self._properties = {"datasetReference": dataset_ref.to_api_repr(), "labels": {}}
537 @property
538 def project(self):
539 """str: Project ID of the project bound to the dataset."""
540 return self._properties["datasetReference"]["projectId"]
542 @property
543 def path(self):
544 """str: URL path for the dataset based on project and dataset ID."""
545 return "/projects/%s/datasets/%s" % (self.project, self.dataset_id)
547 @property
548 def access_entries(self):
549 """List[google.cloud.bigquery.dataset.AccessEntry]: Dataset's access
550 entries.
552 ``role`` augments the entity type and must be present **unless** the
553 entity type is ``view`` or ``routine``.
555 Raises:
556 TypeError: If 'value' is not a sequence
557 ValueError:
558 If any item in the sequence is not an
559 :class:`~google.cloud.bigquery.dataset.AccessEntry`.
560 """
561 entries = self._properties.get("access", [])
562 return [AccessEntry.from_api_repr(entry) for entry in entries]
564 @access_entries.setter
565 def access_entries(self, value):
566 if not all(isinstance(field, AccessEntry) for field in value):
567 raise ValueError("Values must be AccessEntry instances")
568 entries = [entry.to_api_repr() for entry in value]
569 self._properties["access"] = entries
571 @property
572 def created(self):
573 """Union[datetime.datetime, None]: Datetime at which the dataset was
574 created (:data:`None` until set from the server).
575 """
576 creation_time = self._properties.get("creationTime")
577 if creation_time is not None:
578 # creation_time will be in milliseconds.
579 return google.cloud._helpers._datetime_from_microseconds(
580 1000.0 * float(creation_time)
581 )
583 @property
584 def dataset_id(self):
585 """str: Dataset ID."""
586 return self._properties["datasetReference"]["datasetId"]
588 @property
589 def full_dataset_id(self):
590 """Union[str, None]: ID for the dataset resource (:data:`None` until
591 set from the server)
593 In the format ``project_id:dataset_id``.
594 """
595 return self._properties.get("id")
597 @property
598 def reference(self):
599 """google.cloud.bigquery.dataset.DatasetReference: A reference to this
600 dataset.
601 """
602 return DatasetReference(self.project, self.dataset_id)
604 @property
605 def etag(self):
606 """Union[str, None]: ETag for the dataset resource (:data:`None` until
607 set from the server).
608 """
609 return self._properties.get("etag")
611 @property
612 def modified(self):
613 """Union[datetime.datetime, None]: Datetime at which the dataset was
614 last modified (:data:`None` until set from the server).
615 """
616 modified_time = self._properties.get("lastModifiedTime")
617 if modified_time is not None:
618 # modified_time will be in milliseconds.
619 return google.cloud._helpers._datetime_from_microseconds(
620 1000.0 * float(modified_time)
621 )
623 @property
624 def self_link(self):
625 """Union[str, None]: URL for the dataset resource (:data:`None` until
626 set from the server).
627 """
628 return self._properties.get("selfLink")
630 @property
631 def default_partition_expiration_ms(self):
632 """Optional[int]: The default partition expiration for all
633 partitioned tables in the dataset, in milliseconds.
635 Once this property is set, all newly-created partitioned tables in
636 the dataset will have an ``time_paritioning.expiration_ms`` property
637 set to this value, and changing the value will only affect new
638 tables, not existing ones. The storage in a partition will have an
639 expiration time of its partition time plus this value.
641 Setting this property overrides the use of
642 ``default_table_expiration_ms`` for partitioned tables: only one of
643 ``default_table_expiration_ms`` and
644 ``default_partition_expiration_ms`` will be used for any new
645 partitioned table. If you provide an explicit
646 ``time_partitioning.expiration_ms`` when creating or updating a
647 partitioned table, that value takes precedence over the default
648 partition expiration time indicated by this property.
649 """
650 return _helpers._int_or_none(
651 self._properties.get("defaultPartitionExpirationMs")
652 )
654 @default_partition_expiration_ms.setter
655 def default_partition_expiration_ms(self, value):
656 self._properties["defaultPartitionExpirationMs"] = _helpers._str_or_none(value)
658 @property
659 def default_table_expiration_ms(self):
660 """Union[int, None]: Default expiration time for tables in the dataset
661 (defaults to :data:`None`).
663 Raises:
664 ValueError: For invalid value types.
665 """
666 return _helpers._int_or_none(self._properties.get("defaultTableExpirationMs"))
668 @default_table_expiration_ms.setter
669 def default_table_expiration_ms(self, value):
670 if not isinstance(value, int) and value is not None:
671 raise ValueError("Pass an integer, or None")
672 self._properties["defaultTableExpirationMs"] = _helpers._str_or_none(value)
674 @property
675 def description(self):
676 """Optional[str]: Description of the dataset as set by the user
677 (defaults to :data:`None`).
679 Raises:
680 ValueError: for invalid value types.
681 """
682 return self._properties.get("description")
684 @description.setter
685 def description(self, value):
686 if not isinstance(value, str) and value is not None:
687 raise ValueError("Pass a string, or None")
688 self._properties["description"] = value
690 @property
691 def friendly_name(self):
692 """Union[str, None]: Title of the dataset as set by the user
693 (defaults to :data:`None`).
695 Raises:
696 ValueError: for invalid value types.
697 """
698 return self._properties.get("friendlyName")
700 @friendly_name.setter
701 def friendly_name(self, value):
702 if not isinstance(value, str) and value is not None:
703 raise ValueError("Pass a string, or None")
704 self._properties["friendlyName"] = value
706 @property
707 def location(self):
708 """Union[str, None]: Location in which the dataset is hosted as set by
709 the user (defaults to :data:`None`).
711 Raises:
712 ValueError: for invalid value types.
713 """
714 return self._properties.get("location")
716 @location.setter
717 def location(self, value):
718 if not isinstance(value, str) and value is not None:
719 raise ValueError("Pass a string, or None")
720 self._properties["location"] = value
722 @property
723 def labels(self):
724 """Dict[str, str]: Labels for the dataset.
726 This method always returns a dict. To change a dataset's labels,
727 modify the dict, then call
728 :meth:`google.cloud.bigquery.client.Client.update_dataset`. To delete
729 a label, set its value to :data:`None` before updating.
731 Raises:
732 ValueError: for invalid value types.
733 """
734 return self._properties.setdefault("labels", {})
736 @labels.setter
737 def labels(self, value):
738 if not isinstance(value, dict):
739 raise ValueError("Pass a dict")
740 self._properties["labels"] = value
742 @property
743 def default_encryption_configuration(self):
744 """google.cloud.bigquery.encryption_configuration.EncryptionConfiguration: Custom
745 encryption configuration for all tables in the dataset.
747 Custom encryption configuration (e.g., Cloud KMS keys) or :data:`None`
748 if using default encryption.
750 See `protecting data with Cloud KMS keys
751 <https://cloud.google.com/bigquery/docs/customer-managed-encryption>`_
752 in the BigQuery documentation.
753 """
754 prop = self._properties.get("defaultEncryptionConfiguration")
755 if prop:
756 prop = EncryptionConfiguration.from_api_repr(prop)
757 return prop
759 @default_encryption_configuration.setter
760 def default_encryption_configuration(self, value):
761 api_repr = value
762 if value:
763 api_repr = value.to_api_repr()
764 self._properties["defaultEncryptionConfiguration"] = api_repr
766 @classmethod
767 def from_string(cls, full_dataset_id: str) -> "Dataset":
768 """Construct a dataset from fully-qualified dataset ID.
770 Args:
771 full_dataset_id (str):
772 A fully-qualified dataset ID in standard SQL format. Must
773 include both the project ID and the dataset ID, separated by
774 ``.``.
776 Returns:
777 Dataset: Dataset parsed from ``full_dataset_id``.
779 Examples:
780 >>> Dataset.from_string('my-project-id.some_dataset')
781 Dataset(DatasetReference('my-project-id', 'some_dataset'))
783 Raises:
784 ValueError:
785 If ``full_dataset_id`` is not a fully-qualified dataset ID in
786 standard SQL format.
787 """
788 return cls(DatasetReference.from_string(full_dataset_id))
790 @classmethod
791 def from_api_repr(cls, resource: dict) -> "Dataset":
792 """Factory: construct a dataset given its API representation
794 Args:
795 resource (Dict[str: object]):
796 Dataset resource representation returned from the API
798 Returns:
799 google.cloud.bigquery.dataset.Dataset:
800 Dataset parsed from ``resource``.
801 """
802 if (
803 "datasetReference" not in resource
804 or "datasetId" not in resource["datasetReference"]
805 ):
806 raise KeyError(
807 "Resource lacks required identity information:"
808 '["datasetReference"]["datasetId"]'
809 )
810 project_id = resource["datasetReference"]["projectId"]
811 dataset_id = resource["datasetReference"]["datasetId"]
812 dataset = cls(DatasetReference(project_id, dataset_id))
813 dataset._properties = copy.deepcopy(resource)
814 return dataset
816 def to_api_repr(self) -> dict:
817 """Construct the API resource representation of this dataset
819 Returns:
820 Dict[str, object]: The dataset represented as an API resource
821 """
822 return copy.deepcopy(self._properties)
824 def _build_resource(self, filter_fields):
825 """Generate a resource for ``update``."""
826 return _helpers._build_resource_from_properties(self, filter_fields)
828 table = _get_table_reference
830 model = _get_model_reference
832 routine = _get_routine_reference
834 def __repr__(self):
835 return "Dataset({})".format(repr(self.reference))
838class DatasetListItem(object):
839 """A read-only dataset resource from a list operation.
841 For performance reasons, the BigQuery API only includes some of the
842 dataset properties when listing datasets. Notably,
843 :attr:`~google.cloud.bigquery.dataset.Dataset.access_entries` is missing.
845 For a full list of the properties that the BigQuery API returns, see the
846 `REST documentation for datasets.list
847 <https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets/list>`_.
850 Args:
851 resource (Dict[str, str]):
852 A dataset-like resource object from a dataset list response. A
853 ``datasetReference`` property is required.
855 Raises:
856 ValueError:
857 If ``datasetReference`` or one of its required members is missing
858 from ``resource``.
859 """
861 def __init__(self, resource):
862 if "datasetReference" not in resource:
863 raise ValueError("resource must contain a datasetReference value")
864 if "projectId" not in resource["datasetReference"]:
865 raise ValueError(
866 "resource['datasetReference'] must contain a projectId value"
867 )
868 if "datasetId" not in resource["datasetReference"]:
869 raise ValueError(
870 "resource['datasetReference'] must contain a datasetId value"
871 )
872 self._properties = resource
874 @property
875 def project(self):
876 """str: Project bound to the dataset."""
877 return self._properties["datasetReference"]["projectId"]
879 @property
880 def dataset_id(self):
881 """str: Dataset ID."""
882 return self._properties["datasetReference"]["datasetId"]
884 @property
885 def full_dataset_id(self):
886 """Union[str, None]: ID for the dataset resource (:data:`None` until
887 set from the server)
889 In the format ``project_id:dataset_id``.
890 """
891 return self._properties.get("id")
893 @property
894 def friendly_name(self):
895 """Union[str, None]: Title of the dataset as set by the user
896 (defaults to :data:`None`).
897 """
898 return self._properties.get("friendlyName")
900 @property
901 def labels(self):
902 """Dict[str, str]: Labels for the dataset."""
903 return self._properties.setdefault("labels", {})
905 @property
906 def reference(self):
907 """google.cloud.bigquery.dataset.DatasetReference: A reference to this
908 dataset.
909 """
910 return DatasetReference(self.project, self.dataset_id)
912 table = _get_table_reference
914 model = _get_model_reference
916 routine = _get_routine_reference