Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/google/cloud/storage/bucket.py: 36%
674 statements
« prev ^ index » next coverage.py v7.3.1, created at 2023-09-25 06:17 +0000
« prev ^ index » next coverage.py v7.3.1, created at 2023-09-25 06:17 +0000
1# Copyright 2014 Google LLC
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
15"""Create / interact with Google Cloud Storage buckets."""
17import base64
18import copy
19import datetime
20import json
21from urllib.parse import urlsplit
22import warnings
24from google.api_core import datetime_helpers
25from google.cloud._helpers import _datetime_to_rfc3339
26from google.cloud._helpers import _NOW
27from google.cloud._helpers import _rfc3339_nanos_to_datetime
28from google.cloud.exceptions import NotFound
29from google.api_core.iam import Policy
30from google.cloud.storage import _signing
31from google.cloud.storage._helpers import _add_etag_match_headers
32from google.cloud.storage._helpers import _add_generation_match_parameters
33from google.cloud.storage._helpers import _PropertyMixin
34from google.cloud.storage._helpers import _scalar_property
35from google.cloud.storage._helpers import _validate_name
36from google.cloud.storage._signing import generate_signed_url_v2
37from google.cloud.storage._signing import generate_signed_url_v4
38from google.cloud.storage._helpers import _bucket_bound_hostname_url
39from google.cloud.storage.acl import BucketACL
40from google.cloud.storage.acl import DefaultObjectACL
41from google.cloud.storage.blob import Blob
42from google.cloud.storage.constants import _DEFAULT_TIMEOUT
43from google.cloud.storage.constants import ARCHIVE_STORAGE_CLASS
44from google.cloud.storage.constants import COLDLINE_STORAGE_CLASS
45from google.cloud.storage.constants import DUAL_REGION_LOCATION_TYPE
46from google.cloud.storage.constants import (
47 DURABLE_REDUCED_AVAILABILITY_LEGACY_STORAGE_CLASS,
48)
49from google.cloud.storage.constants import MULTI_REGIONAL_LEGACY_STORAGE_CLASS
50from google.cloud.storage.constants import MULTI_REGION_LOCATION_TYPE
51from google.cloud.storage.constants import NEARLINE_STORAGE_CLASS
52from google.cloud.storage.constants import PUBLIC_ACCESS_PREVENTION_INHERITED
53from google.cloud.storage.constants import REGIONAL_LEGACY_STORAGE_CLASS
54from google.cloud.storage.constants import REGION_LOCATION_TYPE
55from google.cloud.storage.constants import STANDARD_STORAGE_CLASS
56from google.cloud.storage.notification import BucketNotification
57from google.cloud.storage.notification import NONE_PAYLOAD_FORMAT
58from google.cloud.storage.retry import DEFAULT_RETRY
59from google.cloud.storage.retry import DEFAULT_RETRY_IF_GENERATION_SPECIFIED
60from google.cloud.storage.retry import DEFAULT_RETRY_IF_ETAG_IN_JSON
61from google.cloud.storage.retry import DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED
64_UBLA_BPO_ENABLED_MESSAGE = (
65 "Pass only one of 'uniform_bucket_level_access_enabled' / "
66 "'bucket_policy_only_enabled' to 'IAMConfiguration'."
67)
68_BPO_ENABLED_MESSAGE = (
69 "'IAMConfiguration.bucket_policy_only_enabled' is deprecated. "
70 "Instead, use 'IAMConfiguration.uniform_bucket_level_access_enabled'."
71)
72_UBLA_BPO_LOCK_TIME_MESSAGE = (
73 "Pass only one of 'uniform_bucket_level_access_lock_time' / "
74 "'bucket_policy_only_lock_time' to 'IAMConfiguration'."
75)
76_BPO_LOCK_TIME_MESSAGE = (
77 "'IAMConfiguration.bucket_policy_only_lock_time' is deprecated. "
78 "Instead, use 'IAMConfiguration.uniform_bucket_level_access_lock_time'."
79)
80_LOCATION_SETTER_MESSAGE = (
81 "Assignment to 'Bucket.location' is deprecated, as it is only "
82 "valid before the bucket is created. Instead, pass the location "
83 "to `Bucket.create`."
84)
85_API_ACCESS_ENDPOINT = "https://storage.googleapis.com"
88def _blobs_page_start(iterator, page, response):
89 """Grab prefixes after a :class:`~google.cloud.iterator.Page` started.
91 :type iterator: :class:`~google.api_core.page_iterator.Iterator`
92 :param iterator: The iterator that is currently in use.
94 :type page: :class:`~google.cloud.api.core.page_iterator.Page`
95 :param page: The page that was just created.
97 :type response: dict
98 :param response: The JSON API response for a page of blobs.
99 """
100 page.prefixes = tuple(response.get("prefixes", ()))
101 iterator.prefixes.update(page.prefixes)
104def _item_to_blob(iterator, item):
105 """Convert a JSON blob to the native object.
107 .. note::
109 This assumes that the ``bucket`` attribute has been
110 added to the iterator after being created.
112 :type iterator: :class:`~google.api_core.page_iterator.Iterator`
113 :param iterator: The iterator that has retrieved the item.
115 :type item: dict
116 :param item: An item to be converted to a blob.
118 :rtype: :class:`.Blob`
119 :returns: The next blob in the page.
120 """
121 name = item.get("name")
122 blob = Blob(name, bucket=iterator.bucket)
123 blob._set_properties(item)
124 return blob
127def _item_to_notification(iterator, item):
128 """Convert a JSON blob to the native object.
130 .. note::
132 This assumes that the ``bucket`` attribute has been
133 added to the iterator after being created.
135 :type iterator: :class:`~google.api_core.page_iterator.Iterator`
136 :param iterator: The iterator that has retrieved the item.
138 :type item: dict
139 :param item: An item to be converted to a blob.
141 :rtype: :class:`.BucketNotification`
142 :returns: The next notification being iterated.
143 """
144 return BucketNotification.from_api_repr(item, bucket=iterator.bucket)
147class LifecycleRuleConditions(dict):
148 """Map a single lifecycle rule for a bucket.
150 See: https://cloud.google.com/storage/docs/lifecycle
152 :type age: int
153 :param age: (Optional) Apply rule action to items whose age, in days,
154 exceeds this value.
156 :type created_before: datetime.date
157 :param created_before: (Optional) Apply rule action to items created
158 before this date.
160 :type is_live: bool
161 :param is_live: (Optional) If true, apply rule action to non-versioned
162 items, or to items with no newer versions. If false, apply
163 rule action to versioned items with at least one newer
164 version.
166 :type matches_prefix: list(str)
167 :param matches_prefix: (Optional) Apply rule action to items which
168 any prefix matches the beginning of the item name.
170 :type matches_storage_class: list(str), one or more of
171 :attr:`Bucket.STORAGE_CLASSES`.
172 :param matches_storage_class: (Optional) Apply rule action to items
173 whose storage class matches this value.
175 :type matches_suffix: list(str)
176 :param matches_suffix: (Optional) Apply rule action to items which
177 any suffix matches the end of the item name.
179 :type number_of_newer_versions: int
180 :param number_of_newer_versions: (Optional) Apply rule action to versioned
181 items having N newer versions.
183 :type days_since_custom_time: int
184 :param days_since_custom_time: (Optional) Apply rule action to items whose number of days
185 elapsed since the custom timestamp. This condition is relevant
186 only for versioned objects. The value of the field must be a non
187 negative integer. If it's zero, the object version will become
188 eligible for lifecycle action as soon as it becomes custom.
190 :type custom_time_before: :class:`datetime.date`
191 :param custom_time_before: (Optional) Date object parsed from RFC3339 valid date, apply rule action
192 to items whose custom time is before this date. This condition is relevant
193 only for versioned objects, e.g., 2019-03-16.
195 :type days_since_noncurrent_time: int
196 :param days_since_noncurrent_time: (Optional) Apply rule action to items whose number of days
197 elapsed since the non current timestamp. This condition
198 is relevant only for versioned objects. The value of the field
199 must be a non negative integer. If it's zero, the object version
200 will become eligible for lifecycle action as soon as it becomes
201 non current.
203 :type noncurrent_time_before: :class:`datetime.date`
204 :param noncurrent_time_before: (Optional) Date object parsed from RFC3339 valid date, apply
205 rule action to items whose non current time is before this date.
206 This condition is relevant only for versioned objects, e.g, 2019-03-16.
208 :raises ValueError: if no arguments are passed.
209 """
211 def __init__(
212 self,
213 age=None,
214 created_before=None,
215 is_live=None,
216 matches_storage_class=None,
217 number_of_newer_versions=None,
218 days_since_custom_time=None,
219 custom_time_before=None,
220 days_since_noncurrent_time=None,
221 noncurrent_time_before=None,
222 matches_prefix=None,
223 matches_suffix=None,
224 _factory=False,
225 ):
226 conditions = {}
228 if age is not None:
229 conditions["age"] = age
231 if created_before is not None:
232 conditions["createdBefore"] = created_before.isoformat()
234 if is_live is not None:
235 conditions["isLive"] = is_live
237 if matches_storage_class is not None:
238 conditions["matchesStorageClass"] = matches_storage_class
240 if number_of_newer_versions is not None:
241 conditions["numNewerVersions"] = number_of_newer_versions
243 if days_since_custom_time is not None:
244 conditions["daysSinceCustomTime"] = days_since_custom_time
246 if custom_time_before is not None:
247 conditions["customTimeBefore"] = custom_time_before.isoformat()
249 if days_since_noncurrent_time is not None:
250 conditions["daysSinceNoncurrentTime"] = days_since_noncurrent_time
252 if noncurrent_time_before is not None:
253 conditions["noncurrentTimeBefore"] = noncurrent_time_before.isoformat()
255 if matches_prefix is not None:
256 conditions["matchesPrefix"] = matches_prefix
258 if matches_suffix is not None:
259 conditions["matchesSuffix"] = matches_suffix
261 if not _factory and not conditions:
262 raise ValueError("Supply at least one condition")
264 super(LifecycleRuleConditions, self).__init__(conditions)
266 @classmethod
267 def from_api_repr(cls, resource):
268 """Factory: construct instance from resource.
270 :type resource: dict
271 :param resource: mapping as returned from API call.
273 :rtype: :class:`LifecycleRuleConditions`
274 :returns: Instance created from resource.
275 """
276 instance = cls(_factory=True)
277 instance.update(resource)
278 return instance
280 @property
281 def age(self):
282 """Conditon's age value."""
283 return self.get("age")
285 @property
286 def created_before(self):
287 """Conditon's created_before value."""
288 before = self.get("createdBefore")
289 if before is not None:
290 return datetime_helpers.from_iso8601_date(before)
292 @property
293 def is_live(self):
294 """Conditon's 'is_live' value."""
295 return self.get("isLive")
297 @property
298 def matches_prefix(self):
299 """Conditon's 'matches_prefix' value."""
300 return self.get("matchesPrefix")
302 @property
303 def matches_storage_class(self):
304 """Conditon's 'matches_storage_class' value."""
305 return self.get("matchesStorageClass")
307 @property
308 def matches_suffix(self):
309 """Conditon's 'matches_suffix' value."""
310 return self.get("matchesSuffix")
312 @property
313 def number_of_newer_versions(self):
314 """Conditon's 'number_of_newer_versions' value."""
315 return self.get("numNewerVersions")
317 @property
318 def days_since_custom_time(self):
319 """Conditon's 'days_since_custom_time' value."""
320 return self.get("daysSinceCustomTime")
322 @property
323 def custom_time_before(self):
324 """Conditon's 'custom_time_before' value."""
325 before = self.get("customTimeBefore")
326 if before is not None:
327 return datetime_helpers.from_iso8601_date(before)
329 @property
330 def days_since_noncurrent_time(self):
331 """Conditon's 'days_since_noncurrent_time' value."""
332 return self.get("daysSinceNoncurrentTime")
334 @property
335 def noncurrent_time_before(self):
336 """Conditon's 'noncurrent_time_before' value."""
337 before = self.get("noncurrentTimeBefore")
338 if before is not None:
339 return datetime_helpers.from_iso8601_date(before)
342class LifecycleRuleDelete(dict):
343 """Map a lifecycle rule deleting matching items.
345 :type kw: dict
346 :params kw: arguments passed to :class:`LifecycleRuleConditions`.
347 """
349 def __init__(self, **kw):
350 conditions = LifecycleRuleConditions(**kw)
351 rule = {"action": {"type": "Delete"}, "condition": dict(conditions)}
352 super().__init__(rule)
354 @classmethod
355 def from_api_repr(cls, resource):
356 """Factory: construct instance from resource.
358 :type resource: dict
359 :param resource: mapping as returned from API call.
361 :rtype: :class:`LifecycleRuleDelete`
362 :returns: Instance created from resource.
363 """
364 instance = cls(_factory=True)
365 instance.update(resource)
366 return instance
369class LifecycleRuleSetStorageClass(dict):
370 """Map a lifecycle rule updating storage class of matching items.
372 :type storage_class: str, one of :attr:`Bucket.STORAGE_CLASSES`.
373 :param storage_class: new storage class to assign to matching items.
375 :type kw: dict
376 :params kw: arguments passed to :class:`LifecycleRuleConditions`.
377 """
379 def __init__(self, storage_class, **kw):
380 conditions = LifecycleRuleConditions(**kw)
381 rule = {
382 "action": {"type": "SetStorageClass", "storageClass": storage_class},
383 "condition": dict(conditions),
384 }
385 super().__init__(rule)
387 @classmethod
388 def from_api_repr(cls, resource):
389 """Factory: construct instance from resource.
391 :type resource: dict
392 :param resource: mapping as returned from API call.
394 :rtype: :class:`LifecycleRuleSetStorageClass`
395 :returns: Instance created from resource.
396 """
397 action = resource["action"]
398 instance = cls(action["storageClass"], _factory=True)
399 instance.update(resource)
400 return instance
403class LifecycleRuleAbortIncompleteMultipartUpload(dict):
404 """Map a rule aborting incomplete multipart uploads of matching items.
406 The "age" lifecycle condition is the only supported condition for this rule.
408 :type kw: dict
409 :params kw: arguments passed to :class:`LifecycleRuleConditions`.
410 """
412 def __init__(self, **kw):
413 conditions = LifecycleRuleConditions(**kw)
414 rule = {
415 "action": {"type": "AbortIncompleteMultipartUpload"},
416 "condition": dict(conditions),
417 }
418 super().__init__(rule)
420 @classmethod
421 def from_api_repr(cls, resource):
422 """Factory: construct instance from resource.
424 :type resource: dict
425 :param resource: mapping as returned from API call.
427 :rtype: :class:`LifecycleRuleAbortIncompleteMultipartUpload`
428 :returns: Instance created from resource.
429 """
430 instance = cls(_factory=True)
431 instance.update(resource)
432 return instance
435_default = object()
438class IAMConfiguration(dict):
439 """Map a bucket's IAM configuration.
441 :type bucket: :class:`Bucket`
442 :params bucket: Bucket for which this instance is the policy.
444 :type public_access_prevention: str
445 :params public_access_prevention:
446 (Optional) Whether the public access prevention policy is 'inherited' (default) or 'enforced'
447 See: https://cloud.google.com/storage/docs/public-access-prevention
449 :type uniform_bucket_level_access_enabled: bool
450 :params bucket_policy_only_enabled:
451 (Optional) Whether the IAM-only policy is enabled for the bucket.
453 :type uniform_bucket_level_access_locked_time: :class:`datetime.datetime`
454 :params uniform_bucket_level_locked_time:
455 (Optional) When the bucket's IAM-only policy was enabled.
456 This value should normally only be set by the back-end API.
458 :type bucket_policy_only_enabled: bool
459 :params bucket_policy_only_enabled:
460 Deprecated alias for :data:`uniform_bucket_level_access_enabled`.
462 :type bucket_policy_only_locked_time: :class:`datetime.datetime`
463 :params bucket_policy_only_locked_time:
464 Deprecated alias for :data:`uniform_bucket_level_access_locked_time`.
465 """
467 def __init__(
468 self,
469 bucket,
470 public_access_prevention=_default,
471 uniform_bucket_level_access_enabled=_default,
472 uniform_bucket_level_access_locked_time=_default,
473 bucket_policy_only_enabled=_default,
474 bucket_policy_only_locked_time=_default,
475 ):
476 if bucket_policy_only_enabled is not _default:
478 if uniform_bucket_level_access_enabled is not _default:
479 raise ValueError(_UBLA_BPO_ENABLED_MESSAGE)
481 warnings.warn(_BPO_ENABLED_MESSAGE, DeprecationWarning, stacklevel=2)
482 uniform_bucket_level_access_enabled = bucket_policy_only_enabled
484 if bucket_policy_only_locked_time is not _default:
486 if uniform_bucket_level_access_locked_time is not _default:
487 raise ValueError(_UBLA_BPO_LOCK_TIME_MESSAGE)
489 warnings.warn(_BPO_LOCK_TIME_MESSAGE, DeprecationWarning, stacklevel=2)
490 uniform_bucket_level_access_locked_time = bucket_policy_only_locked_time
492 if uniform_bucket_level_access_enabled is _default:
493 uniform_bucket_level_access_enabled = False
495 if public_access_prevention is _default:
496 public_access_prevention = PUBLIC_ACCESS_PREVENTION_INHERITED
498 data = {
499 "uniformBucketLevelAccess": {
500 "enabled": uniform_bucket_level_access_enabled
501 },
502 "publicAccessPrevention": public_access_prevention,
503 }
504 if uniform_bucket_level_access_locked_time is not _default:
505 data["uniformBucketLevelAccess"]["lockedTime"] = _datetime_to_rfc3339(
506 uniform_bucket_level_access_locked_time
507 )
508 super(IAMConfiguration, self).__init__(data)
509 self._bucket = bucket
511 @classmethod
512 def from_api_repr(cls, resource, bucket):
513 """Factory: construct instance from resource.
515 :type bucket: :class:`Bucket`
516 :params bucket: Bucket for which this instance is the policy.
518 :type resource: dict
519 :param resource: mapping as returned from API call.
521 :rtype: :class:`IAMConfiguration`
522 :returns: Instance created from resource.
523 """
524 instance = cls(bucket)
525 instance.update(resource)
526 return instance
528 @property
529 def bucket(self):
530 """Bucket for which this instance is the policy.
532 :rtype: :class:`Bucket`
533 :returns: the instance's bucket.
534 """
535 return self._bucket
537 @property
538 def public_access_prevention(self):
539 """Setting for public access prevention policy. Options are 'inherited' (default) or 'enforced'.
541 See: https://cloud.google.com/storage/docs/public-access-prevention
543 :rtype: string
544 :returns: the public access prevention status, either 'enforced' or 'inherited'.
545 """
546 return self["publicAccessPrevention"]
548 @public_access_prevention.setter
549 def public_access_prevention(self, value):
550 self["publicAccessPrevention"] = value
551 self.bucket._patch_property("iamConfiguration", self)
553 @property
554 def uniform_bucket_level_access_enabled(self):
555 """If set, access checks only use bucket-level IAM policies or above.
557 :rtype: bool
558 :returns: whether the bucket is configured to allow only IAM.
559 """
560 ubla = self.get("uniformBucketLevelAccess", {})
561 return ubla.get("enabled", False)
563 @uniform_bucket_level_access_enabled.setter
564 def uniform_bucket_level_access_enabled(self, value):
565 ubla = self.setdefault("uniformBucketLevelAccess", {})
566 ubla["enabled"] = bool(value)
567 self.bucket._patch_property("iamConfiguration", self)
569 @property
570 def uniform_bucket_level_access_locked_time(self):
571 """Deadline for changing :attr:`uniform_bucket_level_access_enabled` from true to false.
573 If the bucket's :attr:`uniform_bucket_level_access_enabled` is true, this property
574 is time time after which that setting becomes immutable.
576 If the bucket's :attr:`uniform_bucket_level_access_enabled` is false, this property
577 is ``None``.
579 :rtype: Union[:class:`datetime.datetime`, None]
580 :returns: (readonly) Time after which :attr:`uniform_bucket_level_access_enabled` will
581 be frozen as true.
582 """
583 ubla = self.get("uniformBucketLevelAccess", {})
584 stamp = ubla.get("lockedTime")
585 if stamp is not None:
586 stamp = _rfc3339_nanos_to_datetime(stamp)
587 return stamp
589 @property
590 def bucket_policy_only_enabled(self):
591 """Deprecated alias for :attr:`uniform_bucket_level_access_enabled`.
593 :rtype: bool
594 :returns: whether the bucket is configured to allow only IAM.
595 """
596 return self.uniform_bucket_level_access_enabled
598 @bucket_policy_only_enabled.setter
599 def bucket_policy_only_enabled(self, value):
600 warnings.warn(_BPO_ENABLED_MESSAGE, DeprecationWarning, stacklevel=2)
601 self.uniform_bucket_level_access_enabled = value
603 @property
604 def bucket_policy_only_locked_time(self):
605 """Deprecated alias for :attr:`uniform_bucket_level_access_locked_time`.
607 :rtype: Union[:class:`datetime.datetime`, None]
608 :returns:
609 (readonly) Time after which :attr:`bucket_policy_only_enabled` will
610 be frozen as true.
611 """
612 return self.uniform_bucket_level_access_locked_time
615class Bucket(_PropertyMixin):
616 """A class representing a Bucket on Cloud Storage.
618 :type client: :class:`google.cloud.storage.client.Client`
619 :param client: A client which holds credentials and project configuration
620 for the bucket (which requires a project).
622 :type name: str
623 :param name: The name of the bucket. Bucket names must start and end with a
624 number or letter.
626 :type user_project: str
627 :param user_project: (Optional) the project ID to be billed for API
628 requests made via this instance.
629 """
631 _MAX_OBJECTS_FOR_ITERATION = 256
632 """Maximum number of existing objects allowed in iteration.
634 This is used in Bucket.delete() and Bucket.make_public().
635 """
637 STORAGE_CLASSES = (
638 STANDARD_STORAGE_CLASS,
639 NEARLINE_STORAGE_CLASS,
640 COLDLINE_STORAGE_CLASS,
641 ARCHIVE_STORAGE_CLASS,
642 MULTI_REGIONAL_LEGACY_STORAGE_CLASS, # legacy
643 REGIONAL_LEGACY_STORAGE_CLASS, # legacy
644 DURABLE_REDUCED_AVAILABILITY_LEGACY_STORAGE_CLASS, # legacy
645 )
646 """Allowed values for :attr:`storage_class`.
648 Default value is :attr:`STANDARD_STORAGE_CLASS`.
650 See
651 https://cloud.google.com/storage/docs/json_api/v1/buckets#storageClass
652 https://cloud.google.com/storage/docs/storage-classes
653 """
655 _LOCATION_TYPES = (
656 MULTI_REGION_LOCATION_TYPE,
657 REGION_LOCATION_TYPE,
658 DUAL_REGION_LOCATION_TYPE,
659 )
660 """Allowed values for :attr:`location_type`."""
662 def __init__(self, client, name=None, user_project=None):
663 """
664 property :attr:`name`
665 Get the bucket's name.
666 """
667 name = _validate_name(name)
668 super(Bucket, self).__init__(name=name)
669 self._client = client
670 self._acl = BucketACL(self)
671 self._default_object_acl = DefaultObjectACL(self)
672 self._label_removals = set()
673 self._user_project = user_project
675 def __repr__(self):
676 return f"<Bucket: {self.name}>"
678 @property
679 def client(self):
680 """The client bound to this bucket."""
681 return self._client
683 def _set_properties(self, value):
684 """Set the properties for the current object.
686 :type value: dict or :class:`google.cloud.storage.batch._FutureDict`
687 :param value: The properties to be set.
688 """
689 self._label_removals.clear()
690 return super(Bucket, self)._set_properties(value)
692 @property
693 def rpo(self):
694 """Get the RPO (Recovery Point Objective) of this bucket
696 See: https://cloud.google.com/storage/docs/managing-turbo-replication
698 "ASYNC_TURBO" or "DEFAULT"
699 :rtype: str
700 """
701 return self._properties.get("rpo")
703 @rpo.setter
704 def rpo(self, value):
705 """
706 Set the RPO (Recovery Point Objective) of this bucket.
708 See: https://cloud.google.com/storage/docs/managing-turbo-replication
710 :type value: str
711 :param value: "ASYNC_TURBO" or "DEFAULT"
712 """
713 self._patch_property("rpo", value)
715 @property
716 def user_project(self):
717 """Project ID to be billed for API requests made via this bucket.
719 If unset, API requests are billed to the bucket owner.
721 A user project is required for all operations on Requester Pays buckets.
723 See https://cloud.google.com/storage/docs/requester-pays#requirements for details.
725 :rtype: str
726 """
727 return self._user_project
729 @classmethod
730 def from_string(cls, uri, client=None):
731 """Get a constructor for bucket object by URI.
733 .. code-block:: python
735 from google.cloud import storage
736 from google.cloud.storage.bucket import Bucket
737 client = storage.Client()
738 bucket = Bucket.from_string("gs://bucket", client=client)
740 :type uri: str
741 :param uri: The bucket uri pass to get bucket object.
743 :type client: :class:`~google.cloud.storage.client.Client` or
744 ``NoneType``
745 :param client: (Optional) The client to use. Application code should
746 *always* pass ``client``.
748 :rtype: :class:`google.cloud.storage.bucket.Bucket`
749 :returns: The bucket object created.
750 """
751 scheme, netloc, path, query, frag = urlsplit(uri)
753 if scheme != "gs":
754 raise ValueError("URI scheme must be gs")
756 return cls(client, name=netloc)
758 def blob(
759 self,
760 blob_name,
761 chunk_size=None,
762 encryption_key=None,
763 kms_key_name=None,
764 generation=None,
765 ):
766 """Factory constructor for blob object.
768 .. note::
769 This will not make an HTTP request; it simply instantiates
770 a blob object owned by this bucket.
772 :type blob_name: str
773 :param blob_name: The name of the blob to be instantiated.
775 :type chunk_size: int
776 :param chunk_size: The size of a chunk of data whenever iterating
777 (in bytes). This must be a multiple of 256 KB per
778 the API specification.
780 :type encryption_key: bytes
781 :param encryption_key:
782 (Optional) 32 byte encryption key for customer-supplied encryption.
784 :type kms_key_name: str
785 :param kms_key_name:
786 (Optional) Resource name of KMS key used to encrypt blob's content.
788 :type generation: long
789 :param generation: (Optional) If present, selects a specific revision of
790 this object.
792 :rtype: :class:`google.cloud.storage.blob.Blob`
793 :returns: The blob object created.
794 """
795 return Blob(
796 name=blob_name,
797 bucket=self,
798 chunk_size=chunk_size,
799 encryption_key=encryption_key,
800 kms_key_name=kms_key_name,
801 generation=generation,
802 )
804 def notification(
805 self,
806 topic_name=None,
807 topic_project=None,
808 custom_attributes=None,
809 event_types=None,
810 blob_name_prefix=None,
811 payload_format=NONE_PAYLOAD_FORMAT,
812 notification_id=None,
813 ):
814 """Factory: create a notification resource for the bucket.
816 See: :class:`.BucketNotification` for parameters.
818 :rtype: :class:`.BucketNotification`
819 """
820 return BucketNotification(
821 self,
822 topic_name=topic_name,
823 topic_project=topic_project,
824 custom_attributes=custom_attributes,
825 event_types=event_types,
826 blob_name_prefix=blob_name_prefix,
827 payload_format=payload_format,
828 notification_id=notification_id,
829 )
831 def exists(
832 self,
833 client=None,
834 timeout=_DEFAULT_TIMEOUT,
835 if_etag_match=None,
836 if_etag_not_match=None,
837 if_metageneration_match=None,
838 if_metageneration_not_match=None,
839 retry=DEFAULT_RETRY,
840 ):
841 """Determines whether or not this bucket exists.
843 If :attr:`user_project` is set, bills the API request to that project.
845 :type client: :class:`~google.cloud.storage.client.Client` or
846 ``NoneType``
847 :param client: (Optional) The client to use. If not passed, falls back
848 to the ``client`` stored on the current bucket.
850 :type timeout: float or tuple
851 :param timeout:
852 (Optional) The amount of time, in seconds, to wait
853 for the server response. See: :ref:`configuring_timeouts`
855 :type if_etag_match: Union[str, Set[str]]
856 :param if_etag_match: (Optional) Make the operation conditional on whether the
857 bucket's current ETag matches the given value.
859 :type if_etag_not_match: Union[str, Set[str]])
860 :param if_etag_not_match: (Optional) Make the operation conditional on whether the
861 bucket's current ETag does not match the given value.
863 :type if_metageneration_match: long
864 :param if_metageneration_match: (Optional) Make the operation conditional on whether the
865 bucket's current metageneration matches the given value.
867 :type if_metageneration_not_match: long
868 :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the
869 bucket's current metageneration does not match the given value.
871 :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy
872 :param retry:
873 (Optional) How to retry the RPC. See: :ref:`configuring_retries`
875 :rtype: bool
876 :returns: True if the bucket exists in Cloud Storage.
877 """
878 client = self._require_client(client)
879 # We only need the status code (200 or not) so we seek to
880 # minimize the returned payload.
881 query_params = {"fields": "name"}
883 if self.user_project is not None:
884 query_params["userProject"] = self.user_project
886 _add_generation_match_parameters(
887 query_params,
888 if_metageneration_match=if_metageneration_match,
889 if_metageneration_not_match=if_metageneration_not_match,
890 )
892 headers = {}
893 _add_etag_match_headers(
894 headers, if_etag_match=if_etag_match, if_etag_not_match=if_etag_not_match
895 )
897 try:
898 # We intentionally pass `_target_object=None` since fields=name
899 # would limit the local properties.
900 client._get_resource(
901 self.path,
902 query_params=query_params,
903 headers=headers,
904 timeout=timeout,
905 retry=retry,
906 _target_object=None,
907 )
908 except NotFound:
909 # NOTE: This will not fail immediately in a batch. However, when
910 # Batch.finish() is called, the resulting `NotFound` will be
911 # raised.
912 return False
913 return True
915 def create(
916 self,
917 client=None,
918 project=None,
919 location=None,
920 predefined_acl=None,
921 predefined_default_object_acl=None,
922 timeout=_DEFAULT_TIMEOUT,
923 retry=DEFAULT_RETRY,
924 ):
925 """Creates current bucket.
927 If the bucket already exists, will raise
928 :class:`google.cloud.exceptions.Conflict`.
930 This implements "storage.buckets.insert".
932 If :attr:`user_project` is set, bills the API request to that project.
934 :type client: :class:`~google.cloud.storage.client.Client` or
935 ``NoneType``
936 :param client: (Optional) The client to use. If not passed, falls back
937 to the ``client`` stored on the current bucket.
939 :type project: str
940 :param project: (Optional) The project under which the bucket is to
941 be created. If not passed, uses the project set on
942 the client.
943 :raises ValueError: if ``project`` is None and client's
944 :attr:`project` is also None.
946 :type location: str
947 :param location: (Optional) The location of the bucket. If not passed,
948 the default location, US, will be used. See
949 https://cloud.google.com/storage/docs/bucket-locations
951 :type predefined_acl: str
952 :param predefined_acl:
953 (Optional) Name of predefined ACL to apply to bucket. See:
954 https://cloud.google.com/storage/docs/access-control/lists#predefined-acl
956 :type predefined_default_object_acl: str
957 :param predefined_default_object_acl:
958 (Optional) Name of predefined ACL to apply to bucket's objects. See:
959 https://cloud.google.com/storage/docs/access-control/lists#predefined-acl
961 :type timeout: float or tuple
962 :param timeout:
963 (Optional) The amount of time, in seconds, to wait
964 for the server response. See: :ref:`configuring_timeouts`
966 :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy
967 :param retry:
968 (Optional) How to retry the RPC. See: :ref:`configuring_retries`
969 """
971 client = self._require_client(client)
972 client.create_bucket(
973 bucket_or_name=self,
974 project=project,
975 user_project=self.user_project,
976 location=location,
977 predefined_acl=predefined_acl,
978 predefined_default_object_acl=predefined_default_object_acl,
979 timeout=timeout,
980 retry=retry,
981 )
983 def update(
984 self,
985 client=None,
986 timeout=_DEFAULT_TIMEOUT,
987 if_metageneration_match=None,
988 if_metageneration_not_match=None,
989 retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED,
990 ):
991 """Sends all properties in a PUT request.
993 Updates the ``_properties`` with the response from the backend.
995 If :attr:`user_project` is set, bills the API request to that project.
997 :type client: :class:`~google.cloud.storage.client.Client` or
998 ``NoneType``
999 :param client: the client to use. If not passed, falls back to the
1000 ``client`` stored on the current object.
1002 :type timeout: float or tuple
1003 :param timeout:
1004 (Optional) The amount of time, in seconds, to wait
1005 for the server response. See: :ref:`configuring_timeouts`
1007 :type if_metageneration_match: long
1008 :param if_metageneration_match: (Optional) Make the operation conditional on whether the
1009 blob's current metageneration matches the given value.
1011 :type if_metageneration_not_match: long
1012 :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the
1013 blob's current metageneration does not match the given value.
1015 :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy
1016 :param retry:
1017 (Optional) How to retry the RPC. See: :ref:`configuring_retries`
1018 """
1019 super(Bucket, self).update(
1020 client=client,
1021 timeout=timeout,
1022 if_metageneration_match=if_metageneration_match,
1023 if_metageneration_not_match=if_metageneration_not_match,
1024 retry=retry,
1025 )
1027 def reload(
1028 self,
1029 client=None,
1030 projection="noAcl",
1031 timeout=_DEFAULT_TIMEOUT,
1032 if_etag_match=None,
1033 if_etag_not_match=None,
1034 if_metageneration_match=None,
1035 if_metageneration_not_match=None,
1036 retry=DEFAULT_RETRY,
1037 ):
1038 """Reload properties from Cloud Storage.
1040 If :attr:`user_project` is set, bills the API request to that project.
1042 :type client: :class:`~google.cloud.storage.client.Client` or
1043 ``NoneType``
1044 :param client: the client to use. If not passed, falls back to the
1045 ``client`` stored on the current object.
1047 :type projection: str
1048 :param projection: (Optional) If used, must be 'full' or 'noAcl'.
1049 Defaults to ``'noAcl'``. Specifies the set of
1050 properties to return.
1052 :type timeout: float or tuple
1053 :param timeout:
1054 (Optional) The amount of time, in seconds, to wait
1055 for the server response. See: :ref:`configuring_timeouts`
1057 :type if_etag_match: Union[str, Set[str]]
1058 :param if_etag_match: (Optional) Make the operation conditional on whether the
1059 bucket's current ETag matches the given value.
1061 :type if_etag_not_match: Union[str, Set[str]])
1062 :param if_etag_not_match: (Optional) Make the operation conditional on whether the
1063 bucket's current ETag does not match the given value.
1065 :type if_metageneration_match: long
1066 :param if_metageneration_match: (Optional) Make the operation conditional on whether the
1067 bucket's current metageneration matches the given value.
1069 :type if_metageneration_not_match: long
1070 :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the
1071 bucket's current metageneration does not match the given value.
1073 :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy
1074 :param retry:
1075 (Optional) How to retry the RPC. See: :ref:`configuring_retries`
1076 """
1077 super(Bucket, self).reload(
1078 client=client,
1079 projection=projection,
1080 timeout=timeout,
1081 if_etag_match=if_etag_match,
1082 if_etag_not_match=if_etag_not_match,
1083 if_metageneration_match=if_metageneration_match,
1084 if_metageneration_not_match=if_metageneration_not_match,
1085 retry=retry,
1086 )
1088 def patch(
1089 self,
1090 client=None,
1091 timeout=_DEFAULT_TIMEOUT,
1092 if_metageneration_match=None,
1093 if_metageneration_not_match=None,
1094 retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED,
1095 ):
1096 """Sends all changed properties in a PATCH request.
1098 Updates the ``_properties`` with the response from the backend.
1100 If :attr:`user_project` is set, bills the API request to that project.
1102 :type client: :class:`~google.cloud.storage.client.Client` or
1103 ``NoneType``
1104 :param client: the client to use. If not passed, falls back to the
1105 ``client`` stored on the current object.
1107 :type timeout: float or tuple
1108 :param timeout:
1109 (Optional) The amount of time, in seconds, to wait
1110 for the server response. See: :ref:`configuring_timeouts`
1112 :type if_metageneration_match: long
1113 :param if_metageneration_match: (Optional) Make the operation conditional on whether the
1114 blob's current metageneration matches the given value.
1116 :type if_metageneration_not_match: long
1117 :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the
1118 blob's current metageneration does not match the given value.
1120 :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy
1121 :param retry:
1122 (Optional) How to retry the RPC. See: :ref:`configuring_retries`
1123 """
1124 # Special case: For buckets, it is possible that labels are being
1125 # removed; this requires special handling.
1126 if self._label_removals:
1127 self._changes.add("labels")
1128 self._properties.setdefault("labels", {})
1129 for removed_label in self._label_removals:
1130 self._properties["labels"][removed_label] = None
1132 # Call the superclass method.
1133 super(Bucket, self).patch(
1134 client=client,
1135 if_metageneration_match=if_metageneration_match,
1136 if_metageneration_not_match=if_metageneration_not_match,
1137 timeout=timeout,
1138 retry=retry,
1139 )
1141 @property
1142 def acl(self):
1143 """Create our ACL on demand."""
1144 return self._acl
1146 @property
1147 def default_object_acl(self):
1148 """Create our defaultObjectACL on demand."""
1149 return self._default_object_acl
1151 @staticmethod
1152 def path_helper(bucket_name):
1153 """Relative URL path for a bucket.
1155 :type bucket_name: str
1156 :param bucket_name: The bucket name in the path.
1158 :rtype: str
1159 :returns: The relative URL path for ``bucket_name``.
1160 """
1161 return "/b/" + bucket_name
1163 @property
1164 def path(self):
1165 """The URL path to this bucket."""
1166 if not self.name:
1167 raise ValueError("Cannot determine path without bucket name.")
1169 return self.path_helper(self.name)
1171 def get_blob(
1172 self,
1173 blob_name,
1174 client=None,
1175 encryption_key=None,
1176 generation=None,
1177 if_etag_match=None,
1178 if_etag_not_match=None,
1179 if_generation_match=None,
1180 if_generation_not_match=None,
1181 if_metageneration_match=None,
1182 if_metageneration_not_match=None,
1183 timeout=_DEFAULT_TIMEOUT,
1184 retry=DEFAULT_RETRY,
1185 **kwargs,
1186 ):
1187 """Get a blob object by name.
1189 See a [code sample](https://cloud.google.com/storage/docs/samples/storage-get-metadata#storage_get_metadata-python)
1190 on how to retrieve metadata of an object.
1192 If :attr:`user_project` is set, bills the API request to that project.
1194 :type blob_name: str
1195 :param blob_name: The name of the blob to retrieve.
1197 :type client: :class:`~google.cloud.storage.client.Client` or
1198 ``NoneType``
1199 :param client: (Optional) The client to use. If not passed, falls back
1200 to the ``client`` stored on the current bucket.
1202 :type encryption_key: bytes
1203 :param encryption_key:
1204 (Optional) 32 byte encryption key for customer-supplied encryption.
1205 See
1206 https://cloud.google.com/storage/docs/encryption#customer-supplied.
1208 :type generation: long
1209 :param generation:
1210 (Optional) If present, selects a specific revision of this object.
1212 :type if_etag_match: Union[str, Set[str]]
1213 :param if_etag_match:
1214 (Optional) See :ref:`using-if-etag-match`
1216 :type if_etag_not_match: Union[str, Set[str]]
1217 :param if_etag_not_match:
1218 (Optional) See :ref:`using-if-etag-not-match`
1220 :type if_generation_match: long
1221 :param if_generation_match:
1222 (Optional) See :ref:`using-if-generation-match`
1224 :type if_generation_not_match: long
1225 :param if_generation_not_match:
1226 (Optional) See :ref:`using-if-generation-not-match`
1228 :type if_metageneration_match: long
1229 :param if_metageneration_match:
1230 (Optional) See :ref:`using-if-metageneration-match`
1232 :type if_metageneration_not_match: long
1233 :param if_metageneration_not_match:
1234 (Optional) See :ref:`using-if-metageneration-not-match`
1236 :type timeout: float or tuple
1237 :param timeout:
1238 (Optional) The amount of time, in seconds, to wait
1239 for the server response. See: :ref:`configuring_timeouts`
1241 :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy
1242 :param retry:
1243 (Optional) How to retry the RPC. See: :ref:`configuring_retries`
1245 :param kwargs: Keyword arguments to pass to the
1246 :class:`~google.cloud.storage.blob.Blob` constructor.
1248 :rtype: :class:`google.cloud.storage.blob.Blob` or None
1249 :returns: The blob object if it exists, otherwise None.
1250 """
1251 blob = Blob(
1252 bucket=self,
1253 name=blob_name,
1254 encryption_key=encryption_key,
1255 generation=generation,
1256 **kwargs,
1257 )
1258 try:
1259 # NOTE: This will not fail immediately in a batch. However, when
1260 # Batch.finish() is called, the resulting `NotFound` will be
1261 # raised.
1262 blob.reload(
1263 client=client,
1264 timeout=timeout,
1265 if_etag_match=if_etag_match,
1266 if_etag_not_match=if_etag_not_match,
1267 if_generation_match=if_generation_match,
1268 if_generation_not_match=if_generation_not_match,
1269 if_metageneration_match=if_metageneration_match,
1270 if_metageneration_not_match=if_metageneration_not_match,
1271 retry=retry,
1272 )
1273 except NotFound:
1274 return None
1275 else:
1276 return blob
1278 def list_blobs(
1279 self,
1280 max_results=None,
1281 page_token=None,
1282 prefix=None,
1283 delimiter=None,
1284 start_offset=None,
1285 end_offset=None,
1286 include_trailing_delimiter=None,
1287 versions=None,
1288 projection="noAcl",
1289 fields=None,
1290 client=None,
1291 timeout=_DEFAULT_TIMEOUT,
1292 retry=DEFAULT_RETRY,
1293 match_glob=None,
1294 ):
1295 """Return an iterator used to find blobs in the bucket.
1297 If :attr:`user_project` is set, bills the API request to that project.
1299 :type max_results: int
1300 :param max_results:
1301 (Optional) The maximum number of blobs to return.
1303 :type page_token: str
1304 :param page_token:
1305 (Optional) If present, return the next batch of blobs, using the
1306 value, which must correspond to the ``nextPageToken`` value
1307 returned in the previous response. Deprecated: use the ``pages``
1308 property of the returned iterator instead of manually passing the
1309 token.
1311 :type prefix: str
1312 :param prefix: (Optional) Prefix used to filter blobs.
1314 :type delimiter: str
1315 :param delimiter: (Optional) Delimiter, used with ``prefix`` to
1316 emulate hierarchy.
1318 :type start_offset: str
1319 :param start_offset:
1320 (Optional) Filter results to objects whose names are
1321 lexicographically equal to or after ``startOffset``. If
1322 ``endOffset`` is also set, the objects listed will have names
1323 between ``startOffset`` (inclusive) and ``endOffset`` (exclusive).
1325 :type end_offset: str
1326 :param end_offset:
1327 (Optional) Filter results to objects whose names are
1328 lexicographically before ``endOffset``. If ``startOffset`` is also
1329 set, the objects listed will have names between ``startOffset``
1330 (inclusive) and ``endOffset`` (exclusive).
1332 :type include_trailing_delimiter: boolean
1333 :param include_trailing_delimiter:
1334 (Optional) If true, objects that end in exactly one instance of
1335 ``delimiter`` will have their metadata included in ``items`` in
1336 addition to ``prefixes``.
1338 :type versions: bool
1339 :param versions: (Optional) Whether object versions should be returned
1340 as separate blobs.
1342 :type projection: str
1343 :param projection: (Optional) If used, must be 'full' or 'noAcl'.
1344 Defaults to ``'noAcl'``. Specifies the set of
1345 properties to return.
1347 :type fields: str
1348 :param fields:
1349 (Optional) Selector specifying which fields to include
1350 in a partial response. Must be a list of fields. For
1351 example to get a partial response with just the next
1352 page token and the name and language of each blob returned:
1353 ``'items(name,contentLanguage),nextPageToken'``.
1354 See: https://cloud.google.com/storage/docs/json_api/v1/parameters#fields
1356 :type client: :class:`~google.cloud.storage.client.Client`
1357 :param client: (Optional) The client to use. If not passed, falls back
1358 to the ``client`` stored on the current bucket.
1360 :type timeout: float or tuple
1361 :param timeout:
1362 (Optional) The amount of time, in seconds, to wait
1363 for the server response. See: :ref:`configuring_timeouts`
1365 :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy
1366 :param retry:
1367 (Optional) How to retry the RPC. See: :ref:`configuring_retries`
1369 :type match_glob: str
1370 :param match_glob:
1371 (Optional) A glob pattern used to filter results (for example, foo*bar).
1372 The string value must be UTF-8 encoded. See:
1373 https://cloud.google.com/storage/docs/json_api/v1/objects/list#list-object-glob
1375 :rtype: :class:`~google.api_core.page_iterator.Iterator`
1376 :returns: Iterator of all :class:`~google.cloud.storage.blob.Blob`
1377 in this bucket matching the arguments.
1378 """
1379 client = self._require_client(client)
1380 return client.list_blobs(
1381 self,
1382 max_results=max_results,
1383 page_token=page_token,
1384 prefix=prefix,
1385 delimiter=delimiter,
1386 start_offset=start_offset,
1387 end_offset=end_offset,
1388 include_trailing_delimiter=include_trailing_delimiter,
1389 versions=versions,
1390 projection=projection,
1391 fields=fields,
1392 timeout=timeout,
1393 retry=retry,
1394 match_glob=match_glob,
1395 )
1397 def list_notifications(
1398 self, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY
1399 ):
1400 """List Pub / Sub notifications for this bucket.
1402 See:
1403 https://cloud.google.com/storage/docs/json_api/v1/notifications/list
1405 If :attr:`user_project` is set, bills the API request to that project.
1407 :type client: :class:`~google.cloud.storage.client.Client` or
1408 ``NoneType``
1409 :param client: (Optional) The client to use. If not passed, falls back
1410 to the ``client`` stored on the current bucket.
1411 :type timeout: float or tuple
1412 :param timeout:
1413 (Optional) The amount of time, in seconds, to wait
1414 for the server response. See: :ref:`configuring_timeouts`
1416 :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy
1417 :param retry:
1418 (Optional) How to retry the RPC. See: :ref:`configuring_retries`
1420 :rtype: list of :class:`.BucketNotification`
1421 :returns: notification instances
1422 """
1423 client = self._require_client(client)
1424 path = self.path + "/notificationConfigs"
1425 iterator = client._list_resource(
1426 path,
1427 _item_to_notification,
1428 timeout=timeout,
1429 retry=retry,
1430 )
1431 iterator.bucket = self
1432 return iterator
1434 def get_notification(
1435 self,
1436 notification_id,
1437 client=None,
1438 timeout=_DEFAULT_TIMEOUT,
1439 retry=DEFAULT_RETRY,
1440 ):
1441 """Get Pub / Sub notification for this bucket.
1443 See [API reference docs](https://cloud.google.com/storage/docs/json_api/v1/notifications/get)
1444 and a [code sample](https://cloud.google.com/storage/docs/samples/storage-print-pubsub-bucket-notification#storage_print_pubsub_bucket_notification-python).
1446 If :attr:`user_project` is set, bills the API request to that project.
1448 :type notification_id: str
1449 :param notification_id: The notification id to retrieve the notification configuration.
1451 :type client: :class:`~google.cloud.storage.client.Client` or
1452 ``NoneType``
1453 :param client: (Optional) The client to use. If not passed, falls back
1454 to the ``client`` stored on the current bucket.
1455 :type timeout: float or tuple
1456 :param timeout:
1457 (Optional) The amount of time, in seconds, to wait
1458 for the server response. See: :ref:`configuring_timeouts`
1460 :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy
1461 :param retry:
1462 (Optional) How to retry the RPC. See: :ref:`configuring_retries`
1464 :rtype: :class:`.BucketNotification`
1465 :returns: notification instance.
1466 """
1467 notification = self.notification(notification_id=notification_id)
1468 notification.reload(client=client, timeout=timeout, retry=retry)
1469 return notification
1471 def delete(
1472 self,
1473 force=False,
1474 client=None,
1475 if_metageneration_match=None,
1476 if_metageneration_not_match=None,
1477 timeout=_DEFAULT_TIMEOUT,
1478 retry=DEFAULT_RETRY,
1479 ):
1480 """Delete this bucket.
1482 The bucket **must** be empty in order to submit a delete request. If
1483 ``force=True`` is passed, this will first attempt to delete all the
1484 objects / blobs in the bucket (i.e. try to empty the bucket).
1486 If the bucket doesn't exist, this will raise
1487 :class:`google.cloud.exceptions.NotFound`. If the bucket is not empty
1488 (and ``force=False``), will raise :class:`google.cloud.exceptions.Conflict`.
1490 If ``force=True`` and the bucket contains more than 256 objects / blobs
1491 this will cowardly refuse to delete the objects (or the bucket). This
1492 is to prevent accidental bucket deletion and to prevent extremely long
1493 runtime of this method. Also note that ``force=True`` is not supported
1494 in a ``Batch`` context.
1496 If :attr:`user_project` is set, bills the API request to that project.
1498 :type force: bool
1499 :param force: If True, empties the bucket's objects then deletes it.
1501 :type client: :class:`~google.cloud.storage.client.Client` or
1502 ``NoneType``
1503 :param client: (Optional) The client to use. If not passed, falls back
1504 to the ``client`` stored on the current bucket.
1506 :type if_metageneration_match: long
1507 :param if_metageneration_match: (Optional) Make the operation conditional on whether the
1508 blob's current metageneration matches the given value.
1510 :type if_metageneration_not_match: long
1511 :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the
1512 blob's current metageneration does not match the given value.
1514 :type timeout: float or tuple
1515 :param timeout:
1516 (Optional) The amount of time, in seconds, to wait
1517 for the server response. See: :ref:`configuring_timeouts`
1519 :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy
1520 :param retry:
1521 (Optional) How to retry the RPC. See: :ref:`configuring_retries`
1523 :raises: :class:`ValueError` if ``force`` is ``True`` and the bucket
1524 contains more than 256 objects / blobs.
1525 """
1526 client = self._require_client(client)
1527 query_params = {}
1529 if self.user_project is not None:
1530 query_params["userProject"] = self.user_project
1532 _add_generation_match_parameters(
1533 query_params,
1534 if_metageneration_match=if_metageneration_match,
1535 if_metageneration_not_match=if_metageneration_not_match,
1536 )
1537 if force:
1538 blobs = list(
1539 self.list_blobs(
1540 max_results=self._MAX_OBJECTS_FOR_ITERATION + 1,
1541 client=client,
1542 timeout=timeout,
1543 retry=retry,
1544 )
1545 )
1546 if len(blobs) > self._MAX_OBJECTS_FOR_ITERATION:
1547 message = (
1548 "Refusing to delete bucket with more than "
1549 "%d objects. If you actually want to delete "
1550 "this bucket, please delete the objects "
1551 "yourself before calling Bucket.delete()."
1552 ) % (self._MAX_OBJECTS_FOR_ITERATION,)
1553 raise ValueError(message)
1555 # Ignore 404 errors on delete.
1556 self.delete_blobs(
1557 blobs,
1558 on_error=lambda blob: None,
1559 client=client,
1560 timeout=timeout,
1561 retry=retry,
1562 )
1564 # We intentionally pass `_target_object=None` since a DELETE
1565 # request has no response value (whether in a standard request or
1566 # in a batch request).
1567 client._delete_resource(
1568 self.path,
1569 query_params=query_params,
1570 timeout=timeout,
1571 retry=retry,
1572 _target_object=None,
1573 )
1575 def delete_blob(
1576 self,
1577 blob_name,
1578 client=None,
1579 generation=None,
1580 if_generation_match=None,
1581 if_generation_not_match=None,
1582 if_metageneration_match=None,
1583 if_metageneration_not_match=None,
1584 timeout=_DEFAULT_TIMEOUT,
1585 retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED,
1586 ):
1587 """Deletes a blob from the current bucket.
1589 If :attr:`user_project` is set, bills the API request to that project.
1591 :type blob_name: str
1592 :param blob_name: A blob name to delete.
1594 :type client: :class:`~google.cloud.storage.client.Client` or
1595 ``NoneType``
1596 :param client: (Optional) The client to use. If not passed, falls back
1597 to the ``client`` stored on the current bucket.
1599 :type generation: long
1600 :param generation: (Optional) If present, permanently deletes a specific
1601 revision of this object.
1603 :type if_generation_match: long
1604 :param if_generation_match:
1605 (Optional) See :ref:`using-if-generation-match`
1607 :type if_generation_not_match: long
1608 :param if_generation_not_match:
1609 (Optional) See :ref:`using-if-generation-not-match`
1611 :type if_metageneration_match: long
1612 :param if_metageneration_match:
1613 (Optional) See :ref:`using-if-metageneration-match`
1615 :type if_metageneration_not_match: long
1616 :param if_metageneration_not_match:
1617 (Optional) See :ref:`using-if-metageneration-not-match`
1619 :type timeout: float or tuple
1620 :param timeout:
1621 (Optional) The amount of time, in seconds, to wait
1622 for the server response. See: :ref:`configuring_timeouts`
1624 :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy
1625 :param retry:
1626 (Optional) How to retry the RPC. See: :ref:`configuring_retries`
1628 :raises: :class:`google.cloud.exceptions.NotFound` Raises a NotFound
1629 if the blob isn't found. To suppress
1630 the exception, use :meth:`delete_blobs` by passing a no-op
1631 ``on_error`` callback.
1632 """
1633 client = self._require_client(client)
1634 blob = Blob(blob_name, bucket=self, generation=generation)
1636 query_params = copy.deepcopy(blob._query_params)
1637 _add_generation_match_parameters(
1638 query_params,
1639 if_generation_match=if_generation_match,
1640 if_generation_not_match=if_generation_not_match,
1641 if_metageneration_match=if_metageneration_match,
1642 if_metageneration_not_match=if_metageneration_not_match,
1643 )
1644 # We intentionally pass `_target_object=None` since a DELETE
1645 # request has no response value (whether in a standard request or
1646 # in a batch request).
1647 client._delete_resource(
1648 blob.path,
1649 query_params=query_params,
1650 timeout=timeout,
1651 retry=retry,
1652 _target_object=None,
1653 )
1655 def delete_blobs(
1656 self,
1657 blobs,
1658 on_error=None,
1659 client=None,
1660 preserve_generation=False,
1661 timeout=_DEFAULT_TIMEOUT,
1662 if_generation_match=None,
1663 if_generation_not_match=None,
1664 if_metageneration_match=None,
1665 if_metageneration_not_match=None,
1666 retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED,
1667 ):
1668 """Deletes a list of blobs from the current bucket.
1670 Uses :meth:`delete_blob` to delete each individual blob.
1672 By default, any generation information in the list of blobs is ignored, and the
1673 live versions of all blobs are deleted. Set `preserve_generation` to True
1674 if blob generation should instead be propagated from the list of blobs.
1676 If :attr:`user_project` is set, bills the API request to that project.
1678 :type blobs: list
1679 :param blobs: A list of :class:`~google.cloud.storage.blob.Blob`-s or
1680 blob names to delete.
1682 :type on_error: callable
1683 :param on_error: (Optional) Takes single argument: ``blob``.
1684 Called once for each blob raising
1685 :class:`~google.cloud.exceptions.NotFound`;
1686 otherwise, the exception is propagated.
1687 Note that ``on_error`` is not supported in a ``Batch`` context.
1689 :type client: :class:`~google.cloud.storage.client.Client`
1690 :param client: (Optional) The client to use. If not passed, falls back
1691 to the ``client`` stored on the current bucket.
1693 :type preserve_generation: bool
1694 :param preserve_generation: (Optional) Deletes only the generation specified on the blob object,
1695 instead of the live version, if set to True. Only :class:~google.cloud.storage.blob.Blob
1696 objects can have their generation set in this way.
1697 Default: False.
1699 :type if_generation_match: list of long
1700 :param if_generation_match:
1701 (Optional) See :ref:`using-if-generation-match`
1702 Note that the length of the list must match the length of
1703 The list must match ``blobs`` item-to-item.
1705 :type if_generation_not_match: list of long
1706 :param if_generation_not_match:
1707 (Optional) See :ref:`using-if-generation-not-match`
1708 The list must match ``blobs`` item-to-item.
1710 :type if_metageneration_match: list of long
1711 :param if_metageneration_match:
1712 (Optional) See :ref:`using-if-metageneration-match`
1713 The list must match ``blobs`` item-to-item.
1715 :type if_metageneration_not_match: list of long
1716 :param if_metageneration_not_match:
1717 (Optional) See :ref:`using-if-metageneration-not-match`
1718 The list must match ``blobs`` item-to-item.
1720 :type timeout: float or tuple
1721 :param timeout:
1722 (Optional) The amount of time, in seconds, to wait
1723 for the server response. See: :ref:`configuring_timeouts`
1725 :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy
1726 :param retry:
1727 (Optional) How to retry the RPC. See: :ref:`configuring_retries`
1729 :raises: :class:`~google.cloud.exceptions.NotFound` (if
1730 `on_error` is not passed).
1731 """
1732 _raise_if_len_differs(
1733 len(blobs),
1734 if_generation_match=if_generation_match,
1735 if_generation_not_match=if_generation_not_match,
1736 if_metageneration_match=if_metageneration_match,
1737 if_metageneration_not_match=if_metageneration_not_match,
1738 )
1739 if_generation_match = iter(if_generation_match or [])
1740 if_generation_not_match = iter(if_generation_not_match or [])
1741 if_metageneration_match = iter(if_metageneration_match or [])
1742 if_metageneration_not_match = iter(if_metageneration_not_match or [])
1744 for blob in blobs:
1745 try:
1746 blob_name = blob
1747 generation = None
1748 if not isinstance(blob_name, str):
1749 blob_name = blob.name
1750 generation = blob.generation if preserve_generation else None
1752 self.delete_blob(
1753 blob_name,
1754 client=client,
1755 generation=generation,
1756 if_generation_match=next(if_generation_match, None),
1757 if_generation_not_match=next(if_generation_not_match, None),
1758 if_metageneration_match=next(if_metageneration_match, None),
1759 if_metageneration_not_match=next(if_metageneration_not_match, None),
1760 timeout=timeout,
1761 retry=retry,
1762 )
1763 except NotFound:
1764 if on_error is not None:
1765 on_error(blob)
1766 else:
1767 raise
1769 def copy_blob(
1770 self,
1771 blob,
1772 destination_bucket,
1773 new_name=None,
1774 client=None,
1775 preserve_acl=True,
1776 source_generation=None,
1777 if_generation_match=None,
1778 if_generation_not_match=None,
1779 if_metageneration_match=None,
1780 if_metageneration_not_match=None,
1781 if_source_generation_match=None,
1782 if_source_generation_not_match=None,
1783 if_source_metageneration_match=None,
1784 if_source_metageneration_not_match=None,
1785 timeout=_DEFAULT_TIMEOUT,
1786 retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED,
1787 ):
1788 """Copy the given blob to the given bucket, optionally with a new name.
1790 If :attr:`user_project` is set, bills the API request to that project.
1792 See [API reference docs](https://cloud.google.com/storage/docs/json_api/v1/objects/copy)
1793 and a [code sample](https://cloud.google.com/storage/docs/samples/storage-copy-file#storage_copy_file-python).
1795 :type blob: :class:`google.cloud.storage.blob.Blob`
1796 :param blob: The blob to be copied.
1798 :type destination_bucket: :class:`google.cloud.storage.bucket.Bucket`
1799 :param destination_bucket: The bucket into which the blob should be
1800 copied.
1802 :type new_name: str
1803 :param new_name: (Optional) The new name for the copied file.
1805 :type client: :class:`~google.cloud.storage.client.Client` or
1806 ``NoneType``
1807 :param client: (Optional) The client to use. If not passed, falls back
1808 to the ``client`` stored on the current bucket.
1810 :type preserve_acl: bool
1811 :param preserve_acl: DEPRECATED. This argument is not functional!
1812 (Optional) Copies ACL from old blob to new blob.
1813 Default: True.
1814 Note that ``preserve_acl`` is not supported in a
1815 ``Batch`` context.
1817 :type source_generation: long
1818 :param source_generation: (Optional) The generation of the blob to be
1819 copied.
1821 :type if_generation_match: long
1822 :param if_generation_match:
1823 (Optional) See :ref:`using-if-generation-match`
1824 Note that the generation to be matched is that of the
1825 ``destination`` blob.
1827 :type if_generation_not_match: long
1828 :param if_generation_not_match:
1829 (Optional) See :ref:`using-if-generation-not-match`
1830 Note that the generation to be matched is that of the
1831 ``destination`` blob.
1833 :type if_metageneration_match: long
1834 :param if_metageneration_match:
1835 (Optional) See :ref:`using-if-metageneration-match`
1836 Note that the metageneration to be matched is that of the
1837 ``destination`` blob.
1839 :type if_metageneration_not_match: long
1840 :param if_metageneration_not_match:
1841 (Optional) See :ref:`using-if-metageneration-not-match`
1842 Note that the metageneration to be matched is that of the
1843 ``destination`` blob.
1845 :type if_source_generation_match: long
1846 :param if_source_generation_match:
1847 (Optional) Makes the operation conditional on whether the source
1848 object's generation matches the given value.
1850 :type if_source_generation_not_match: long
1851 :param if_source_generation_not_match:
1852 (Optional) Makes the operation conditional on whether the source
1853 object's generation does not match the given value.
1855 :type if_source_metageneration_match: long
1856 :param if_source_metageneration_match:
1857 (Optional) Makes the operation conditional on whether the source
1858 object's current metageneration matches the given value.
1860 :type if_source_metageneration_not_match: long
1861 :param if_source_metageneration_not_match:
1862 (Optional) Makes the operation conditional on whether the source
1863 object's current metageneration does not match the given value.
1865 :type timeout: float or tuple
1866 :param timeout:
1867 (Optional) The amount of time, in seconds, to wait
1868 for the server response. See: :ref:`configuring_timeouts`
1870 :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy
1871 :param retry:
1872 (Optional) How to retry the RPC. See: :ref:`configuring_retries`
1874 :rtype: :class:`google.cloud.storage.blob.Blob`
1875 :returns: The new Blob.
1876 """
1877 client = self._require_client(client)
1878 query_params = {}
1880 if self.user_project is not None:
1881 query_params["userProject"] = self.user_project
1883 if source_generation is not None:
1884 query_params["sourceGeneration"] = source_generation
1886 _add_generation_match_parameters(
1887 query_params,
1888 if_generation_match=if_generation_match,
1889 if_generation_not_match=if_generation_not_match,
1890 if_metageneration_match=if_metageneration_match,
1891 if_metageneration_not_match=if_metageneration_not_match,
1892 if_source_generation_match=if_source_generation_match,
1893 if_source_generation_not_match=if_source_generation_not_match,
1894 if_source_metageneration_match=if_source_metageneration_match,
1895 if_source_metageneration_not_match=if_source_metageneration_not_match,
1896 )
1898 if new_name is None:
1899 new_name = blob.name
1901 new_blob = Blob(bucket=destination_bucket, name=new_name)
1902 api_path = blob.path + "/copyTo" + new_blob.path
1903 copy_result = client._post_resource(
1904 api_path,
1905 None,
1906 query_params=query_params,
1907 timeout=timeout,
1908 retry=retry,
1909 _target_object=new_blob,
1910 )
1912 if not preserve_acl:
1913 new_blob.acl.save(acl={}, client=client, timeout=timeout)
1915 new_blob._set_properties(copy_result)
1916 return new_blob
1918 def rename_blob(
1919 self,
1920 blob,
1921 new_name,
1922 client=None,
1923 if_generation_match=None,
1924 if_generation_not_match=None,
1925 if_metageneration_match=None,
1926 if_metageneration_not_match=None,
1927 if_source_generation_match=None,
1928 if_source_generation_not_match=None,
1929 if_source_metageneration_match=None,
1930 if_source_metageneration_not_match=None,
1931 timeout=_DEFAULT_TIMEOUT,
1932 retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED,
1933 ):
1934 """Rename the given blob using copy and delete operations.
1936 If :attr:`user_project` is set, bills the API request to that project.
1938 Effectively, copies blob to the same bucket with a new name, then
1939 deletes the blob.
1941 .. warning::
1943 This method will first duplicate the data and then delete the
1944 old blob. This means that with very large objects renaming
1945 could be a very (temporarily) costly or a very slow operation.
1946 If you need more control over the copy and deletion, instead
1947 use ``google.cloud.storage.blob.Blob.copy_to`` and
1948 ``google.cloud.storage.blob.Blob.delete`` directly.
1950 Also note that this method is not fully supported in a
1951 ``Batch`` context.
1953 :type blob: :class:`google.cloud.storage.blob.Blob`
1954 :param blob: The blob to be renamed.
1956 :type new_name: str
1957 :param new_name: The new name for this blob.
1959 :type client: :class:`~google.cloud.storage.client.Client` or
1960 ``NoneType``
1961 :param client: (Optional) The client to use. If not passed, falls back
1962 to the ``client`` stored on the current bucket.
1964 :type if_generation_match: long
1965 :param if_generation_match:
1966 (Optional) See :ref:`using-if-generation-match`
1967 Note that the generation to be matched is that of the
1968 ``destination`` blob.
1970 :type if_generation_not_match: long
1971 :param if_generation_not_match:
1972 (Optional) See :ref:`using-if-generation-not-match`
1973 Note that the generation to be matched is that of the
1974 ``destination`` blob.
1976 :type if_metageneration_match: long
1977 :param if_metageneration_match:
1978 (Optional) See :ref:`using-if-metageneration-match`
1979 Note that the metageneration to be matched is that of the
1980 ``destination`` blob.
1982 :type if_metageneration_not_match: long
1983 :param if_metageneration_not_match:
1984 (Optional) See :ref:`using-if-metageneration-not-match`
1985 Note that the metageneration to be matched is that of the
1986 ``destination`` blob.
1988 :type if_source_generation_match: long
1989 :param if_source_generation_match:
1990 (Optional) Makes the operation conditional on whether the source
1991 object's generation matches the given value. Also used in the
1992 (implied) delete request.
1994 :type if_source_generation_not_match: long
1995 :param if_source_generation_not_match:
1996 (Optional) Makes the operation conditional on whether the source
1997 object's generation does not match the given value. Also used in
1998 the (implied) delete request.
2000 :type if_source_metageneration_match: long
2001 :param if_source_metageneration_match:
2002 (Optional) Makes the operation conditional on whether the source
2003 object's current metageneration matches the given value. Also used
2004 in the (implied) delete request.
2006 :type if_source_metageneration_not_match: long
2007 :param if_source_metageneration_not_match:
2008 (Optional) Makes the operation conditional on whether the source
2009 object's current metageneration does not match the given value.
2010 Also used in the (implied) delete request.
2012 :type timeout: float or tuple
2013 :param timeout:
2014 (Optional) The amount of time, in seconds, to wait
2015 for the server response. See: :ref:`configuring_timeouts`
2017 :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy
2018 :param retry:
2019 (Optional) How to retry the RPC. See: :ref:`configuring_retries`
2021 :rtype: :class:`Blob`
2022 :returns: The newly-renamed blob.
2023 """
2024 same_name = blob.name == new_name
2026 new_blob = self.copy_blob(
2027 blob,
2028 self,
2029 new_name,
2030 client=client,
2031 timeout=timeout,
2032 if_generation_match=if_generation_match,
2033 if_generation_not_match=if_generation_not_match,
2034 if_metageneration_match=if_metageneration_match,
2035 if_metageneration_not_match=if_metageneration_not_match,
2036 if_source_generation_match=if_source_generation_match,
2037 if_source_generation_not_match=if_source_generation_not_match,
2038 if_source_metageneration_match=if_source_metageneration_match,
2039 if_source_metageneration_not_match=if_source_metageneration_not_match,
2040 retry=retry,
2041 )
2043 if not same_name:
2044 blob.delete(
2045 client=client,
2046 timeout=timeout,
2047 if_generation_match=if_source_generation_match,
2048 if_generation_not_match=if_source_generation_not_match,
2049 if_metageneration_match=if_source_metageneration_match,
2050 if_metageneration_not_match=if_source_metageneration_not_match,
2051 retry=retry,
2052 )
2053 return new_blob
2055 @property
2056 def cors(self):
2057 """Retrieve or set CORS policies configured for this bucket.
2059 See http://www.w3.org/TR/cors/ and
2060 https://cloud.google.com/storage/docs/json_api/v1/buckets
2062 .. note::
2064 The getter for this property returns a list which contains
2065 *copies* of the bucket's CORS policy mappings. Mutating the list
2066 or one of its dicts has no effect unless you then re-assign the
2067 dict via the setter. E.g.:
2069 >>> policies = bucket.cors
2070 >>> policies.append({'origin': '/foo', ...})
2071 >>> policies[1]['maxAgeSeconds'] = 3600
2072 >>> del policies[0]
2073 >>> bucket.cors = policies
2074 >>> bucket.update()
2076 :setter: Set CORS policies for this bucket.
2077 :getter: Gets the CORS policies for this bucket.
2079 :rtype: list of dictionaries
2080 :returns: A sequence of mappings describing each CORS policy.
2081 """
2082 return [copy.deepcopy(policy) for policy in self._properties.get("cors", ())]
2084 @cors.setter
2085 def cors(self, entries):
2086 """Set CORS policies configured for this bucket.
2088 See http://www.w3.org/TR/cors/ and
2089 https://cloud.google.com/storage/docs/json_api/v1/buckets
2091 :type entries: list of dictionaries
2092 :param entries: A sequence of mappings describing each CORS policy.
2093 """
2094 self._patch_property("cors", entries)
2096 default_event_based_hold = _scalar_property("defaultEventBasedHold")
2097 """Are uploaded objects automatically placed under an even-based hold?
2099 If True, uploaded objects will be placed under an event-based hold to
2100 be released at a future time. When released an object will then begin
2101 the retention period determined by the policy retention period for the
2102 object bucket.
2104 See https://cloud.google.com/storage/docs/json_api/v1/buckets
2106 If the property is not set locally, returns ``None``.
2108 :rtype: bool or ``NoneType``
2109 """
2111 @property
2112 def default_kms_key_name(self):
2113 """Retrieve / set default KMS encryption key for objects in the bucket.
2115 See https://cloud.google.com/storage/docs/json_api/v1/buckets
2117 :setter: Set default KMS encryption key for items in this bucket.
2118 :getter: Get default KMS encryption key for items in this bucket.
2120 :rtype: str
2121 :returns: Default KMS encryption key, or ``None`` if not set.
2122 """
2123 encryption_config = self._properties.get("encryption", {})
2124 return encryption_config.get("defaultKmsKeyName")
2126 @default_kms_key_name.setter
2127 def default_kms_key_name(self, value):
2128 """Set default KMS encryption key for objects in the bucket.
2130 :type value: str or None
2131 :param value: new KMS key name (None to clear any existing key).
2132 """
2133 encryption_config = self._properties.get("encryption", {})
2134 encryption_config["defaultKmsKeyName"] = value
2135 self._patch_property("encryption", encryption_config)
2137 @property
2138 def labels(self):
2139 """Retrieve or set labels assigned to this bucket.
2141 See
2142 https://cloud.google.com/storage/docs/json_api/v1/buckets#labels
2144 .. note::
2146 The getter for this property returns a dict which is a *copy*
2147 of the bucket's labels. Mutating that dict has no effect unless
2148 you then re-assign the dict via the setter. E.g.:
2150 >>> labels = bucket.labels
2151 >>> labels['new_key'] = 'some-label'
2152 >>> del labels['old_key']
2153 >>> bucket.labels = labels
2154 >>> bucket.update()
2156 :setter: Set labels for this bucket.
2157 :getter: Gets the labels for this bucket.
2159 :rtype: :class:`dict`
2160 :returns: Name-value pairs (string->string) labelling the bucket.
2161 """
2162 labels = self._properties.get("labels")
2163 if labels is None:
2164 return {}
2165 return copy.deepcopy(labels)
2167 @labels.setter
2168 def labels(self, mapping):
2169 """Set labels assigned to this bucket.
2171 See
2172 https://cloud.google.com/storage/docs/json_api/v1/buckets#labels
2174 :type mapping: :class:`dict`
2175 :param mapping: Name-value pairs (string->string) labelling the bucket.
2176 """
2177 # If any labels have been expressly removed, we need to track this
2178 # so that a future .patch() call can do the correct thing.
2179 existing = set([k for k in self.labels.keys()])
2180 incoming = set([k for k in mapping.keys()])
2181 self._label_removals = self._label_removals.union(existing.difference(incoming))
2182 mapping = {k: str(v) for k, v in mapping.items()}
2184 # Actually update the labels on the object.
2185 self._patch_property("labels", copy.deepcopy(mapping))
2187 @property
2188 def etag(self):
2189 """Retrieve the ETag for the bucket.
2191 See https://tools.ietf.org/html/rfc2616#section-3.11 and
2192 https://cloud.google.com/storage/docs/json_api/v1/buckets
2194 :rtype: str or ``NoneType``
2195 :returns: The bucket etag or ``None`` if the bucket's
2196 resource has not been loaded from the server.
2197 """
2198 return self._properties.get("etag")
2200 @property
2201 def id(self):
2202 """Retrieve the ID for the bucket.
2204 See https://cloud.google.com/storage/docs/json_api/v1/buckets
2206 :rtype: str or ``NoneType``
2207 :returns: The ID of the bucket or ``None`` if the bucket's
2208 resource has not been loaded from the server.
2209 """
2210 return self._properties.get("id")
2212 @property
2213 def iam_configuration(self):
2214 """Retrieve IAM configuration for this bucket.
2216 :rtype: :class:`IAMConfiguration`
2217 :returns: an instance for managing the bucket's IAM configuration.
2218 """
2219 info = self._properties.get("iamConfiguration", {})
2220 return IAMConfiguration.from_api_repr(info, self)
2222 @property
2223 def lifecycle_rules(self):
2224 """Retrieve or set lifecycle rules configured for this bucket.
2226 See https://cloud.google.com/storage/docs/lifecycle and
2227 https://cloud.google.com/storage/docs/json_api/v1/buckets
2229 .. note::
2231 The getter for this property returns a generator which yields
2232 *copies* of the bucket's lifecycle rules mappings. Mutating the
2233 output dicts has no effect unless you then re-assign the dict via
2234 the setter. E.g.:
2236 >>> rules = list(bucket.lifecycle_rules)
2237 >>> rules.append({'origin': '/foo', ...})
2238 >>> rules[1]['rule']['action']['type'] = 'Delete'
2239 >>> del rules[0]
2240 >>> bucket.lifecycle_rules = rules
2241 >>> bucket.update()
2243 :setter: Set lifecycle rules for this bucket.
2244 :getter: Gets the lifecycle rules for this bucket.
2246 :rtype: generator(dict)
2247 :returns: A sequence of mappings describing each lifecycle rule.
2248 """
2249 info = self._properties.get("lifecycle", {})
2250 for rule in info.get("rule", ()):
2251 action_type = rule["action"]["type"]
2252 if action_type == "Delete":
2253 yield LifecycleRuleDelete.from_api_repr(rule)
2254 elif action_type == "SetStorageClass":
2255 yield LifecycleRuleSetStorageClass.from_api_repr(rule)
2256 elif action_type == "AbortIncompleteMultipartUpload":
2257 yield LifecycleRuleAbortIncompleteMultipartUpload.from_api_repr(rule)
2258 else:
2259 warnings.warn(
2260 "Unknown lifecycle rule type received: {}. Please upgrade to the latest version of google-cloud-storage.".format(
2261 rule
2262 ),
2263 UserWarning,
2264 stacklevel=1,
2265 )
2267 @lifecycle_rules.setter
2268 def lifecycle_rules(self, rules):
2269 """Set lifecycle rules configured for this bucket.
2271 See https://cloud.google.com/storage/docs/lifecycle and
2272 https://cloud.google.com/storage/docs/json_api/v1/buckets
2274 :type rules: list of dictionaries
2275 :param rules: A sequence of mappings describing each lifecycle rule.
2276 """
2277 rules = [dict(rule) for rule in rules] # Convert helpers if needed
2278 self._patch_property("lifecycle", {"rule": rules})
2280 def clear_lifecyle_rules(self):
2281 """Clear lifecycle rules configured for this bucket.
2283 See https://cloud.google.com/storage/docs/lifecycle and
2284 https://cloud.google.com/storage/docs/json_api/v1/buckets
2285 """
2286 self.lifecycle_rules = []
2288 def add_lifecycle_delete_rule(self, **kw):
2289 """Add a "delete" rule to lifecycle rules configured for this bucket.
2291 This defines a [lifecycle configuration](https://cloud.google.com/storage/docs/lifecycle),
2292 which is set on the bucket. For the general format of a lifecycle configuration, see the
2293 [bucket resource representation for JSON](https://cloud.google.com/storage/docs/json_api/v1/buckets).
2294 See also a [code sample](https://cloud.google.com/storage/docs/samples/storage-enable-bucket-lifecycle-management#storage_enable_bucket_lifecycle_management-python).
2296 :type kw: dict
2297 :params kw: arguments passed to :class:`LifecycleRuleConditions`.
2298 """
2299 rules = list(self.lifecycle_rules)
2300 rules.append(LifecycleRuleDelete(**kw))
2301 self.lifecycle_rules = rules
2303 def add_lifecycle_set_storage_class_rule(self, storage_class, **kw):
2304 """Add a "set storage class" rule to lifecycle rules.
2306 This defines a [lifecycle configuration](https://cloud.google.com/storage/docs/lifecycle),
2307 which is set on the bucket. For the general format of a lifecycle configuration, see the
2308 [bucket resource representation for JSON](https://cloud.google.com/storage/docs/json_api/v1/buckets).
2310 :type storage_class: str, one of :attr:`STORAGE_CLASSES`.
2311 :param storage_class: new storage class to assign to matching items.
2313 :type kw: dict
2314 :params kw: arguments passed to :class:`LifecycleRuleConditions`.
2315 """
2316 rules = list(self.lifecycle_rules)
2317 rules.append(LifecycleRuleSetStorageClass(storage_class, **kw))
2318 self.lifecycle_rules = rules
2320 def add_lifecycle_abort_incomplete_multipart_upload_rule(self, **kw):
2321 """Add a "abort incomplete multipart upload" rule to lifecycle rules.
2323 .. note::
2324 The "age" lifecycle condition is the only supported condition
2325 for this rule.
2327 This defines a [lifecycle configuration](https://cloud.google.com/storage/docs/lifecycle),
2328 which is set on the bucket. For the general format of a lifecycle configuration, see the
2329 [bucket resource representation for JSON](https://cloud.google.com/storage/docs/json_api/v1/buckets).
2331 :type kw: dict
2332 :params kw: arguments passed to :class:`LifecycleRuleConditions`.
2333 """
2334 rules = list(self.lifecycle_rules)
2335 rules.append(LifecycleRuleAbortIncompleteMultipartUpload(**kw))
2336 self.lifecycle_rules = rules
2338 _location = _scalar_property("location")
2340 @property
2341 def location(self):
2342 """Retrieve location configured for this bucket.
2344 See https://cloud.google.com/storage/docs/json_api/v1/buckets and
2345 https://cloud.google.com/storage/docs/locations
2347 Returns ``None`` if the property has not been set before creation,
2348 or if the bucket's resource has not been loaded from the server.
2349 :rtype: str or ``NoneType``
2350 """
2351 return self._location
2353 @location.setter
2354 def location(self, value):
2355 """(Deprecated) Set `Bucket.location`
2357 This can only be set at bucket **creation** time.
2359 See https://cloud.google.com/storage/docs/json_api/v1/buckets and
2360 https://cloud.google.com/storage/docs/bucket-locations
2362 .. warning::
2364 Assignment to 'Bucket.location' is deprecated, as it is only
2365 valid before the bucket is created. Instead, pass the location
2366 to `Bucket.create`.
2367 """
2368 warnings.warn(_LOCATION_SETTER_MESSAGE, DeprecationWarning, stacklevel=2)
2369 self._location = value
2371 @property
2372 def data_locations(self):
2373 """Retrieve the list of regional locations for custom dual-region buckets.
2375 See https://cloud.google.com/storage/docs/json_api/v1/buckets and
2376 https://cloud.google.com/storage/docs/locations
2378 Returns ``None`` if the property has not been set before creation,
2379 if the bucket's resource has not been loaded from the server,
2380 or if the bucket is not a dual-regions bucket.
2381 :rtype: list of str or ``NoneType``
2382 """
2383 custom_placement_config = self._properties.get("customPlacementConfig", {})
2384 return custom_placement_config.get("dataLocations")
2386 @property
2387 def location_type(self):
2388 """Retrieve the location type for the bucket.
2390 See https://cloud.google.com/storage/docs/storage-classes
2392 :getter: Gets the the location type for this bucket.
2394 :rtype: str or ``NoneType``
2395 :returns:
2396 If set, one of
2397 :attr:`~google.cloud.storage.constants.MULTI_REGION_LOCATION_TYPE`,
2398 :attr:`~google.cloud.storage.constants.REGION_LOCATION_TYPE`, or
2399 :attr:`~google.cloud.storage.constants.DUAL_REGION_LOCATION_TYPE`,
2400 else ``None``.
2401 """
2402 return self._properties.get("locationType")
2404 def get_logging(self):
2405 """Return info about access logging for this bucket.
2407 See https://cloud.google.com/storage/docs/access-logs#status
2409 :rtype: dict or None
2410 :returns: a dict w/ keys, ``logBucket`` and ``logObjectPrefix``
2411 (if logging is enabled), or None (if not).
2412 """
2413 info = self._properties.get("logging")
2414 return copy.deepcopy(info)
2416 def enable_logging(self, bucket_name, object_prefix=""):
2417 """Enable access logging for this bucket.
2419 See https://cloud.google.com/storage/docs/access-logs
2421 :type bucket_name: str
2422 :param bucket_name: name of bucket in which to store access logs
2424 :type object_prefix: str
2425 :param object_prefix: prefix for access log filenames
2426 """
2427 info = {"logBucket": bucket_name, "logObjectPrefix": object_prefix}
2428 self._patch_property("logging", info)
2430 def disable_logging(self):
2431 """Disable access logging for this bucket.
2433 See https://cloud.google.com/storage/docs/access-logs#disabling
2434 """
2435 self._patch_property("logging", None)
2437 @property
2438 def metageneration(self):
2439 """Retrieve the metageneration for the bucket.
2441 See https://cloud.google.com/storage/docs/json_api/v1/buckets
2443 :rtype: int or ``NoneType``
2444 :returns: The metageneration of the bucket or ``None`` if the bucket's
2445 resource has not been loaded from the server.
2446 """
2447 metageneration = self._properties.get("metageneration")
2448 if metageneration is not None:
2449 return int(metageneration)
2451 @property
2452 def owner(self):
2453 """Retrieve info about the owner of the bucket.
2455 See https://cloud.google.com/storage/docs/json_api/v1/buckets
2457 :rtype: dict or ``NoneType``
2458 :returns: Mapping of owner's role/ID. Returns ``None`` if the bucket's
2459 resource has not been loaded from the server.
2460 """
2461 return copy.deepcopy(self._properties.get("owner"))
2463 @property
2464 def project_number(self):
2465 """Retrieve the number of the project to which the bucket is assigned.
2467 See https://cloud.google.com/storage/docs/json_api/v1/buckets
2469 :rtype: int or ``NoneType``
2470 :returns: The project number that owns the bucket or ``None`` if
2471 the bucket's resource has not been loaded from the server.
2472 """
2473 project_number = self._properties.get("projectNumber")
2474 if project_number is not None:
2475 return int(project_number)
2477 @property
2478 def retention_policy_effective_time(self):
2479 """Retrieve the effective time of the bucket's retention policy.
2481 :rtype: datetime.datetime or ``NoneType``
2482 :returns: point-in time at which the bucket's retention policy is
2483 effective, or ``None`` if the property is not
2484 set locally.
2485 """
2486 policy = self._properties.get("retentionPolicy")
2487 if policy is not None:
2488 timestamp = policy.get("effectiveTime")
2489 if timestamp is not None:
2490 return _rfc3339_nanos_to_datetime(timestamp)
2492 @property
2493 def retention_policy_locked(self):
2494 """Retrieve whthere the bucket's retention policy is locked.
2496 :rtype: bool
2497 :returns: True if the bucket's policy is locked, or else False
2498 if the policy is not locked, or the property is not
2499 set locally.
2500 """
2501 policy = self._properties.get("retentionPolicy")
2502 if policy is not None:
2503 return policy.get("isLocked")
2505 @property
2506 def retention_period(self):
2507 """Retrieve or set the retention period for items in the bucket.
2509 :rtype: int or ``NoneType``
2510 :returns: number of seconds to retain items after upload or release
2511 from event-based lock, or ``None`` if the property is not
2512 set locally.
2513 """
2514 policy = self._properties.get("retentionPolicy")
2515 if policy is not None:
2516 period = policy.get("retentionPeriod")
2517 if period is not None:
2518 return int(period)
2520 @retention_period.setter
2521 def retention_period(self, value):
2522 """Set the retention period for items in the bucket.
2524 :type value: int
2525 :param value:
2526 number of seconds to retain items after upload or release from
2527 event-based lock.
2529 :raises ValueError: if the bucket's retention policy is locked.
2530 """
2531 policy = self._properties.setdefault("retentionPolicy", {})
2532 if value is not None:
2533 policy["retentionPeriod"] = str(value)
2534 else:
2535 policy = None
2536 self._patch_property("retentionPolicy", policy)
2538 @property
2539 def self_link(self):
2540 """Retrieve the URI for the bucket.
2542 See https://cloud.google.com/storage/docs/json_api/v1/buckets
2544 :rtype: str or ``NoneType``
2545 :returns: The self link for the bucket or ``None`` if
2546 the bucket's resource has not been loaded from the server.
2547 """
2548 return self._properties.get("selfLink")
2550 @property
2551 def storage_class(self):
2552 """Retrieve or set the storage class for the bucket.
2554 See https://cloud.google.com/storage/docs/storage-classes
2556 :setter: Set the storage class for this bucket.
2557 :getter: Gets the the storage class for this bucket.
2559 :rtype: str or ``NoneType``
2560 :returns:
2561 If set, one of
2562 :attr:`~google.cloud.storage.constants.NEARLINE_STORAGE_CLASS`,
2563 :attr:`~google.cloud.storage.constants.COLDLINE_STORAGE_CLASS`,
2564 :attr:`~google.cloud.storage.constants.ARCHIVE_STORAGE_CLASS`,
2565 :attr:`~google.cloud.storage.constants.STANDARD_STORAGE_CLASS`,
2566 :attr:`~google.cloud.storage.constants.MULTI_REGIONAL_LEGACY_STORAGE_CLASS`,
2567 :attr:`~google.cloud.storage.constants.REGIONAL_LEGACY_STORAGE_CLASS`,
2568 or
2569 :attr:`~google.cloud.storage.constants.DURABLE_REDUCED_AVAILABILITY_LEGACY_STORAGE_CLASS`,
2570 else ``None``.
2571 """
2572 return self._properties.get("storageClass")
2574 @storage_class.setter
2575 def storage_class(self, value):
2576 """Set the storage class for the bucket.
2578 See https://cloud.google.com/storage/docs/storage-classes
2580 :type value: str
2581 :param value:
2582 One of
2583 :attr:`~google.cloud.storage.constants.NEARLINE_STORAGE_CLASS`,
2584 :attr:`~google.cloud.storage.constants.COLDLINE_STORAGE_CLASS`,
2585 :attr:`~google.cloud.storage.constants.ARCHIVE_STORAGE_CLASS`,
2586 :attr:`~google.cloud.storage.constants.STANDARD_STORAGE_CLASS`,
2587 :attr:`~google.cloud.storage.constants.MULTI_REGIONAL_LEGACY_STORAGE_CLASS`,
2588 :attr:`~google.cloud.storage.constants.REGIONAL_LEGACY_STORAGE_CLASS`,
2589 or
2590 :attr:`~google.cloud.storage.constants.DURABLE_REDUCED_AVAILABILITY_LEGACY_STORAGE_CLASS`,
2591 """
2592 self._patch_property("storageClass", value)
2594 @property
2595 def time_created(self):
2596 """Retrieve the timestamp at which the bucket was created.
2598 See https://cloud.google.com/storage/docs/json_api/v1/buckets
2600 :rtype: :class:`datetime.datetime` or ``NoneType``
2601 :returns: Datetime object parsed from RFC3339 valid timestamp, or
2602 ``None`` if the bucket's resource has not been loaded
2603 from the server.
2604 """
2605 value = self._properties.get("timeCreated")
2606 if value is not None:
2607 return _rfc3339_nanos_to_datetime(value)
2609 @property
2610 def versioning_enabled(self):
2611 """Is versioning enabled for this bucket?
2613 See https://cloud.google.com/storage/docs/object-versioning for
2614 details.
2616 :setter: Update whether versioning is enabled for this bucket.
2617 :getter: Query whether versioning is enabled for this bucket.
2619 :rtype: bool
2620 :returns: True if enabled, else False.
2621 """
2622 versioning = self._properties.get("versioning", {})
2623 return versioning.get("enabled", False)
2625 @versioning_enabled.setter
2626 def versioning_enabled(self, value):
2627 """Enable versioning for this bucket.
2629 See https://cloud.google.com/storage/docs/object-versioning for
2630 details.
2632 :type value: convertible to boolean
2633 :param value: should versioning be enabled for the bucket?
2634 """
2635 self._patch_property("versioning", {"enabled": bool(value)})
2637 @property
2638 def requester_pays(self):
2639 """Does the requester pay for API requests for this bucket?
2641 See https://cloud.google.com/storage/docs/requester-pays for
2642 details.
2644 :setter: Update whether requester pays for this bucket.
2645 :getter: Query whether requester pays for this bucket.
2647 :rtype: bool
2648 :returns: True if requester pays for API requests for the bucket,
2649 else False.
2650 """
2651 versioning = self._properties.get("billing", {})
2652 return versioning.get("requesterPays", False)
2654 @requester_pays.setter
2655 def requester_pays(self, value):
2656 """Update whether requester pays for API requests for this bucket.
2658 See https://cloud.google.com/storage/docs/using-requester-pays for
2659 details.
2661 :type value: convertible to boolean
2662 :param value: should requester pay for API requests for the bucket?
2663 """
2664 self._patch_property("billing", {"requesterPays": bool(value)})
2666 @property
2667 def autoclass_enabled(self):
2668 """Whether Autoclass is enabled for this bucket.
2670 See https://cloud.google.com/storage/docs/using-autoclass for details.
2672 :setter: Update whether autoclass is enabled for this bucket.
2673 :getter: Query whether autoclass is enabled for this bucket.
2675 :rtype: bool
2676 :returns: True if enabled, else False.
2677 """
2678 autoclass = self._properties.get("autoclass", {})
2679 return autoclass.get("enabled", False)
2681 @autoclass_enabled.setter
2682 def autoclass_enabled(self, value):
2683 """Enable or disable Autoclass at the bucket-level.
2685 See https://cloud.google.com/storage/docs/using-autoclass for details.
2687 :type value: convertible to boolean
2688 :param value: If true, enable Autoclass for this bucket.
2689 If false, disable Autoclass for this bucket.
2691 .. note::
2692 To enable autoclass, you must set it at bucket creation time.
2693 Currently, only patch requests that disable autoclass are supported.
2695 """
2696 self._patch_property("autoclass", {"enabled": bool(value)})
2698 @property
2699 def autoclass_toggle_time(self):
2700 """Retrieve the toggle time when Autoclaass was last enabled or disabled for the bucket.
2701 :rtype: datetime.datetime or ``NoneType``
2702 :returns: point-in time at which the bucket's autoclass is toggled, or ``None`` if the property is not set locally.
2703 """
2704 autoclass = self._properties.get("autoclass")
2705 if autoclass is not None:
2706 timestamp = autoclass.get("toggleTime")
2707 if timestamp is not None:
2708 return _rfc3339_nanos_to_datetime(timestamp)
2710 def configure_website(self, main_page_suffix=None, not_found_page=None):
2711 """Configure website-related properties.
2713 See https://cloud.google.com/storage/docs/static-website
2715 .. note::
2716 This configures the bucket's website-related properties,controlling how
2717 the service behaves when accessing bucket contents as a web site.
2718 See [tutorials](https://cloud.google.com/storage/docs/hosting-static-website) and
2719 [code samples](https://cloud.google.com/storage/docs/samples/storage-define-bucket-website-configuration#storage_define_bucket_website_configuration-python)
2720 for more information.
2722 :type main_page_suffix: str
2723 :param main_page_suffix: The page to use as the main page
2724 of a directory.
2725 Typically something like index.html.
2727 :type not_found_page: str
2728 :param not_found_page: The file to use when a page isn't found.
2729 """
2730 data = {"mainPageSuffix": main_page_suffix, "notFoundPage": not_found_page}
2731 self._patch_property("website", data)
2733 def disable_website(self):
2734 """Disable the website configuration for this bucket.
2736 This is really just a shortcut for setting the website-related
2737 attributes to ``None``.
2738 """
2739 return self.configure_website(None, None)
2741 def get_iam_policy(
2742 self,
2743 client=None,
2744 requested_policy_version=None,
2745 timeout=_DEFAULT_TIMEOUT,
2746 retry=DEFAULT_RETRY,
2747 ):
2748 """Retrieve the IAM policy for the bucket.
2750 See [API reference docs](https://cloud.google.com/storage/docs/json_api/v1/buckets/getIamPolicy)
2751 and a [code sample](https://cloud.google.com/storage/docs/samples/storage-view-bucket-iam-members#storage_view_bucket_iam_members-python).
2753 If :attr:`user_project` is set, bills the API request to that project.
2755 :type client: :class:`~google.cloud.storage.client.Client` or
2756 ``NoneType``
2757 :param client: (Optional) The client to use. If not passed, falls back
2758 to the ``client`` stored on the current bucket.
2760 :type requested_policy_version: int or ``NoneType``
2761 :param requested_policy_version: (Optional) The version of IAM policies to request.
2762 If a policy with a condition is requested without
2763 setting this, the server will return an error.
2764 This must be set to a value of 3 to retrieve IAM
2765 policies containing conditions. This is to prevent
2766 client code that isn't aware of IAM conditions from
2767 interpreting and modifying policies incorrectly.
2768 The service might return a policy with version lower
2769 than the one that was requested, based on the
2770 feature syntax in the policy fetched.
2772 :type timeout: float or tuple
2773 :param timeout:
2774 (Optional) The amount of time, in seconds, to wait
2775 for the server response. See: :ref:`configuring_timeouts`
2777 :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy
2778 :param retry:
2779 (Optional) How to retry the RPC. See: :ref:`configuring_retries`
2781 :rtype: :class:`google.api_core.iam.Policy`
2782 :returns: the policy instance, based on the resource returned from
2783 the ``getIamPolicy`` API request.
2784 """
2785 client = self._require_client(client)
2786 query_params = {}
2788 if self.user_project is not None:
2789 query_params["userProject"] = self.user_project
2791 if requested_policy_version is not None:
2792 query_params["optionsRequestedPolicyVersion"] = requested_policy_version
2794 info = client._get_resource(
2795 f"{self.path}/iam",
2796 query_params=query_params,
2797 timeout=timeout,
2798 retry=retry,
2799 _target_object=None,
2800 )
2801 return Policy.from_api_repr(info)
2803 def set_iam_policy(
2804 self,
2805 policy,
2806 client=None,
2807 timeout=_DEFAULT_TIMEOUT,
2808 retry=DEFAULT_RETRY_IF_ETAG_IN_JSON,
2809 ):
2810 """Update the IAM policy for the bucket.
2812 See
2813 https://cloud.google.com/storage/docs/json_api/v1/buckets/setIamPolicy
2815 If :attr:`user_project` is set, bills the API request to that project.
2817 :type policy: :class:`google.api_core.iam.Policy`
2818 :param policy: policy instance used to update bucket's IAM policy.
2820 :type client: :class:`~google.cloud.storage.client.Client` or
2821 ``NoneType``
2822 :param client: (Optional) The client to use. If not passed, falls back
2823 to the ``client`` stored on the current bucket.
2825 :type timeout: float or tuple
2826 :param timeout:
2827 (Optional) The amount of time, in seconds, to wait
2828 for the server response. See: :ref:`configuring_timeouts`
2830 :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy
2831 :param retry:
2832 (Optional) How to retry the RPC. See: :ref:`configuring_retries`
2834 :rtype: :class:`google.api_core.iam.Policy`
2835 :returns: the policy instance, based on the resource returned from
2836 the ``setIamPolicy`` API request.
2837 """
2838 client = self._require_client(client)
2839 query_params = {}
2841 if self.user_project is not None:
2842 query_params["userProject"] = self.user_project
2844 path = f"{self.path}/iam"
2845 resource = policy.to_api_repr()
2846 resource["resourceId"] = self.path
2848 info = client._put_resource(
2849 path,
2850 resource,
2851 query_params=query_params,
2852 timeout=timeout,
2853 retry=retry,
2854 _target_object=None,
2855 )
2857 return Policy.from_api_repr(info)
2859 def test_iam_permissions(
2860 self, permissions, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY
2861 ):
2862 """API call: test permissions
2864 See
2865 https://cloud.google.com/storage/docs/json_api/v1/buckets/testIamPermissions
2867 If :attr:`user_project` is set, bills the API request to that project.
2869 :type permissions: list of string
2870 :param permissions: the permissions to check
2872 :type client: :class:`~google.cloud.storage.client.Client` or
2873 ``NoneType``
2874 :param client: (Optional) The client to use. If not passed, falls back
2875 to the ``client`` stored on the current bucket.
2877 :type timeout: float or tuple
2878 :param timeout:
2879 (Optional) The amount of time, in seconds, to wait
2880 for the server response. See: :ref:`configuring_timeouts`
2882 :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy
2883 :param retry:
2884 (Optional) How to retry the RPC. See: :ref:`configuring_retries`
2886 :rtype: list of string
2887 :returns: the permissions returned by the ``testIamPermissions`` API
2888 request.
2889 """
2890 client = self._require_client(client)
2891 query_params = {"permissions": permissions}
2893 if self.user_project is not None:
2894 query_params["userProject"] = self.user_project
2896 path = f"{self.path}/iam/testPermissions"
2897 resp = client._get_resource(
2898 path,
2899 query_params=query_params,
2900 timeout=timeout,
2901 retry=retry,
2902 _target_object=None,
2903 )
2904 return resp.get("permissions", [])
2906 def make_public(
2907 self,
2908 recursive=False,
2909 future=False,
2910 client=None,
2911 timeout=_DEFAULT_TIMEOUT,
2912 if_metageneration_match=None,
2913 if_metageneration_not_match=None,
2914 retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED,
2915 ):
2916 """Update bucket's ACL, granting read access to anonymous users.
2918 :type recursive: bool
2919 :param recursive: If True, this will make all blobs inside the bucket
2920 public as well.
2922 :type future: bool
2923 :param future: If True, this will make all objects created in the
2924 future public as well.
2926 :type client: :class:`~google.cloud.storage.client.Client` or
2927 ``NoneType``
2928 :param client: (Optional) The client to use. If not passed, falls back
2929 to the ``client`` stored on the current bucket.
2930 :type timeout: float or tuple
2931 :param timeout:
2932 (Optional) The amount of time, in seconds, to wait
2933 for the server response. See: :ref:`configuring_timeouts`
2935 :type if_metageneration_match: long
2936 :param if_metageneration_match: (Optional) Make the operation conditional on whether the
2937 blob's current metageneration matches the given value.
2939 :type if_metageneration_not_match: long
2940 :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the
2941 blob's current metageneration does not match the given value.
2943 :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy
2944 :param retry:
2945 (Optional) How to retry the RPC. See: :ref:`configuring_retries`
2947 :raises ValueError:
2948 If ``recursive`` is True, and the bucket contains more than 256
2949 blobs. This is to prevent extremely long runtime of this
2950 method. For such buckets, iterate over the blobs returned by
2951 :meth:`list_blobs` and call
2952 :meth:`~google.cloud.storage.blob.Blob.make_public`
2953 for each blob.
2954 """
2955 self.acl.all().grant_read()
2956 self.acl.save(
2957 client=client,
2958 timeout=timeout,
2959 if_metageneration_match=if_metageneration_match,
2960 if_metageneration_not_match=if_metageneration_not_match,
2961 retry=retry,
2962 )
2964 if future:
2965 doa = self.default_object_acl
2966 if not doa.loaded:
2967 doa.reload(client=client, timeout=timeout)
2968 doa.all().grant_read()
2969 doa.save(
2970 client=client,
2971 timeout=timeout,
2972 if_metageneration_match=if_metageneration_match,
2973 if_metageneration_not_match=if_metageneration_not_match,
2974 retry=retry,
2975 )
2977 if recursive:
2978 blobs = list(
2979 self.list_blobs(
2980 projection="full",
2981 max_results=self._MAX_OBJECTS_FOR_ITERATION + 1,
2982 client=client,
2983 timeout=timeout,
2984 )
2985 )
2986 if len(blobs) > self._MAX_OBJECTS_FOR_ITERATION:
2987 message = (
2988 "Refusing to make public recursively with more than "
2989 "%d objects. If you actually want to make every object "
2990 "in this bucket public, iterate through the blobs "
2991 "returned by 'Bucket.list_blobs()' and call "
2992 "'make_public' on each one."
2993 ) % (self._MAX_OBJECTS_FOR_ITERATION,)
2994 raise ValueError(message)
2996 for blob in blobs:
2997 blob.acl.all().grant_read()
2998 blob.acl.save(
2999 client=client,
3000 timeout=timeout,
3001 )
3003 def make_private(
3004 self,
3005 recursive=False,
3006 future=False,
3007 client=None,
3008 timeout=_DEFAULT_TIMEOUT,
3009 if_metageneration_match=None,
3010 if_metageneration_not_match=None,
3011 retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED,
3012 ):
3013 """Update bucket's ACL, revoking read access for anonymous users.
3015 :type recursive: bool
3016 :param recursive: If True, this will make all blobs inside the bucket
3017 private as well.
3019 :type future: bool
3020 :param future: If True, this will make all objects created in the
3021 future private as well.
3023 :type client: :class:`~google.cloud.storage.client.Client` or
3024 ``NoneType``
3025 :param client: (Optional) The client to use. If not passed, falls back
3026 to the ``client`` stored on the current bucket.
3028 :type timeout: float or tuple
3029 :param timeout:
3030 (Optional) The amount of time, in seconds, to wait
3031 for the server response. See: :ref:`configuring_timeouts`
3033 :type if_metageneration_match: long
3034 :param if_metageneration_match: (Optional) Make the operation conditional on whether the
3035 blob's current metageneration matches the given value.
3036 :type if_metageneration_not_match: long
3037 :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the
3038 blob's current metageneration does not match the given value.
3039 :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy
3040 :param retry:
3041 (Optional) How to retry the RPC. See: :ref:`configuring_retries`
3043 :raises ValueError:
3044 If ``recursive`` is True, and the bucket contains more than 256
3045 blobs. This is to prevent extremely long runtime of this
3046 method. For such buckets, iterate over the blobs returned by
3047 :meth:`list_blobs` and call
3048 :meth:`~google.cloud.storage.blob.Blob.make_private`
3049 for each blob.
3050 """
3051 self.acl.all().revoke_read()
3052 self.acl.save(
3053 client=client,
3054 timeout=timeout,
3055 if_metageneration_match=if_metageneration_match,
3056 if_metageneration_not_match=if_metageneration_not_match,
3057 retry=retry,
3058 )
3060 if future:
3061 doa = self.default_object_acl
3062 if not doa.loaded:
3063 doa.reload(client=client, timeout=timeout)
3064 doa.all().revoke_read()
3065 doa.save(
3066 client=client,
3067 timeout=timeout,
3068 if_metageneration_match=if_metageneration_match,
3069 if_metageneration_not_match=if_metageneration_not_match,
3070 retry=retry,
3071 )
3073 if recursive:
3074 blobs = list(
3075 self.list_blobs(
3076 projection="full",
3077 max_results=self._MAX_OBJECTS_FOR_ITERATION + 1,
3078 client=client,
3079 timeout=timeout,
3080 )
3081 )
3082 if len(blobs) > self._MAX_OBJECTS_FOR_ITERATION:
3083 message = (
3084 "Refusing to make private recursively with more than "
3085 "%d objects. If you actually want to make every object "
3086 "in this bucket private, iterate through the blobs "
3087 "returned by 'Bucket.list_blobs()' and call "
3088 "'make_private' on each one."
3089 ) % (self._MAX_OBJECTS_FOR_ITERATION,)
3090 raise ValueError(message)
3092 for blob in blobs:
3093 blob.acl.all().revoke_read()
3094 blob.acl.save(client=client, timeout=timeout)
3096 def generate_upload_policy(self, conditions, expiration=None, client=None):
3097 """Create a signed upload policy for uploading objects.
3099 This method generates and signs a policy document. You can use
3100 [`policy documents`](https://cloud.google.com/storage/docs/xml-api/post-object-forms)
3101 to allow visitors to a website to upload files to
3102 Google Cloud Storage without giving them direct write access.
3103 See a [code sample](https://cloud.google.com/storage/docs/xml-api/post-object-forms#python).
3105 :type expiration: datetime
3106 :param expiration: (Optional) Expiration in UTC. If not specified, the
3107 policy will expire in 1 hour.
3109 :type conditions: list
3110 :param conditions: A list of conditions as described in the
3111 `policy documents` documentation.
3113 :type client: :class:`~google.cloud.storage.client.Client`
3114 :param client: (Optional) The client to use. If not passed, falls back
3115 to the ``client`` stored on the current bucket.
3117 :rtype: dict
3118 :returns: A dictionary of (form field name, form field value) of form
3119 fields that should be added to your HTML upload form in order
3120 to attach the signature.
3121 """
3122 client = self._require_client(client)
3123 credentials = client._credentials
3124 _signing.ensure_signed_credentials(credentials)
3126 if expiration is None:
3127 expiration = _NOW() + datetime.timedelta(hours=1)
3129 conditions = conditions + [{"bucket": self.name}]
3131 policy_document = {
3132 "expiration": _datetime_to_rfc3339(expiration),
3133 "conditions": conditions,
3134 }
3136 encoded_policy_document = base64.b64encode(
3137 json.dumps(policy_document).encode("utf-8")
3138 )
3139 signature = base64.b64encode(credentials.sign_bytes(encoded_policy_document))
3141 fields = {
3142 "bucket": self.name,
3143 "GoogleAccessId": credentials.signer_email,
3144 "policy": encoded_policy_document.decode("utf-8"),
3145 "signature": signature.decode("utf-8"),
3146 }
3148 return fields
3150 def lock_retention_policy(
3151 self, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY
3152 ):
3153 """Lock the bucket's retention policy.
3155 :type client: :class:`~google.cloud.storage.client.Client` or
3156 ``NoneType``
3157 :param client: (Optional) The client to use. If not passed, falls back
3158 to the ``client`` stored on the blob's bucket.
3160 :type timeout: float or tuple
3161 :param timeout:
3162 (Optional) The amount of time, in seconds, to wait
3163 for the server response. See: :ref:`configuring_timeouts`
3165 :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy
3166 :param retry:
3167 (Optional) How to retry the RPC. See: :ref:`configuring_retries`
3169 :raises ValueError:
3170 if the bucket has no metageneration (i.e., new or never reloaded);
3171 if the bucket has no retention policy assigned;
3172 if the bucket's retention policy is already locked.
3173 """
3174 if "metageneration" not in self._properties:
3175 raise ValueError("Bucket has no retention policy assigned: try 'reload'?")
3177 policy = self._properties.get("retentionPolicy")
3179 if policy is None:
3180 raise ValueError("Bucket has no retention policy assigned: try 'reload'?")
3182 if policy.get("isLocked"):
3183 raise ValueError("Bucket's retention policy is already locked.")
3185 client = self._require_client(client)
3187 query_params = {"ifMetagenerationMatch": self.metageneration}
3189 if self.user_project is not None:
3190 query_params["userProject"] = self.user_project
3192 path = f"/b/{self.name}/lockRetentionPolicy"
3193 api_response = client._post_resource(
3194 path,
3195 None,
3196 query_params=query_params,
3197 timeout=timeout,
3198 retry=retry,
3199 _target_object=self,
3200 )
3201 self._set_properties(api_response)
3203 def generate_signed_url(
3204 self,
3205 expiration=None,
3206 api_access_endpoint=_API_ACCESS_ENDPOINT,
3207 method="GET",
3208 headers=None,
3209 query_parameters=None,
3210 client=None,
3211 credentials=None,
3212 version=None,
3213 virtual_hosted_style=False,
3214 bucket_bound_hostname=None,
3215 scheme="http",
3216 ):
3217 """Generates a signed URL for this bucket.
3219 .. note::
3221 If you are on Google Compute Engine, you can't generate a signed
3222 URL using GCE service account. If you'd like to be able to generate
3223 a signed URL from GCE, you can use a standard service account from a
3224 JSON file rather than a GCE service account.
3226 If you have a bucket that you want to allow access to for a set
3227 amount of time, you can use this method to generate a URL that
3228 is only valid within a certain time period.
3230 If ``bucket_bound_hostname`` is set as an argument of :attr:`api_access_endpoint`,
3231 ``https`` works only if using a ``CDN``.
3233 :type expiration: Union[Integer, datetime.datetime, datetime.timedelta]
3234 :param expiration: Point in time when the signed URL should expire. If
3235 a ``datetime`` instance is passed without an explicit
3236 ``tzinfo`` set, it will be assumed to be ``UTC``.
3238 :type api_access_endpoint: str
3239 :param api_access_endpoint: (Optional) URI base.
3241 :type method: str
3242 :param method: The HTTP verb that will be used when requesting the URL.
3244 :type headers: dict
3245 :param headers:
3246 (Optional) Additional HTTP headers to be included as part of the
3247 signed URLs. See:
3248 https://cloud.google.com/storage/docs/xml-api/reference-headers
3249 Requests using the signed URL *must* pass the specified header
3250 (name and value) with each request for the URL.
3252 :type query_parameters: dict
3253 :param query_parameters:
3254 (Optional) Additional query parameters to be included as part of the
3255 signed URLs. See:
3256 https://cloud.google.com/storage/docs/xml-api/reference-headers#query
3258 :type client: :class:`~google.cloud.storage.client.Client` or
3259 ``NoneType``
3260 :param client: (Optional) The client to use. If not passed, falls back
3261 to the ``client`` stored on the blob's bucket.
3264 :type credentials: :class:`google.auth.credentials.Credentials` or
3265 :class:`NoneType`
3266 :param credentials: The authorization credentials to attach to requests.
3267 These credentials identify this application to the service.
3268 If none are specified, the client will attempt to ascertain
3269 the credentials from the environment.
3271 :type version: str
3272 :param version: (Optional) The version of signed credential to create.
3273 Must be one of 'v2' | 'v4'.
3275 :type virtual_hosted_style: bool
3276 :param virtual_hosted_style:
3277 (Optional) If true, then construct the URL relative the bucket's
3278 virtual hostname, e.g., '<bucket-name>.storage.googleapis.com'.
3280 :type bucket_bound_hostname: str
3281 :param bucket_bound_hostname:
3282 (Optional) If pass, then construct the URL relative to the bucket-bound hostname.
3283 Value cane be a bare or with scheme, e.g., 'example.com' or 'http://example.com'.
3284 See: https://cloud.google.com/storage/docs/request-endpoints#cname
3286 :type scheme: str
3287 :param scheme:
3288 (Optional) If ``bucket_bound_hostname`` is passed as a bare hostname, use
3289 this value as the scheme. ``https`` will work only when using a CDN.
3290 Defaults to ``"http"``.
3292 :raises: :exc:`ValueError` when version is invalid.
3293 :raises: :exc:`TypeError` when expiration is not a valid type.
3294 :raises: :exc:`AttributeError` if credentials is not an instance
3295 of :class:`google.auth.credentials.Signing`.
3297 :rtype: str
3298 :returns: A signed URL you can use to access the resource
3299 until expiration.
3300 """
3301 if version is None:
3302 version = "v2"
3303 elif version not in ("v2", "v4"):
3304 raise ValueError("'version' must be either 'v2' or 'v4'")
3306 # If you are on Google Compute Engine, you can't generate a signed URL
3307 # using GCE service account.
3308 # See https://github.com/googleapis/google-auth-library-python/issues/50
3309 if virtual_hosted_style:
3310 api_access_endpoint = f"https://{self.name}.storage.googleapis.com"
3311 elif bucket_bound_hostname:
3312 api_access_endpoint = _bucket_bound_hostname_url(
3313 bucket_bound_hostname, scheme
3314 )
3315 else:
3316 resource = f"/{self.name}"
3318 if virtual_hosted_style or bucket_bound_hostname:
3319 resource = "/"
3321 if credentials is None:
3322 client = self._require_client(client)
3323 credentials = client._credentials
3325 if version == "v2":
3326 helper = generate_signed_url_v2
3327 else:
3328 helper = generate_signed_url_v4
3330 return helper(
3331 credentials,
3332 resource=resource,
3333 expiration=expiration,
3334 api_access_endpoint=api_access_endpoint,
3335 method=method.upper(),
3336 headers=headers,
3337 query_parameters=query_parameters,
3338 )
3341def _raise_if_len_differs(expected_len, **generation_match_args):
3342 """
3343 Raise an error if any generation match argument
3344 is set and its len differs from the given value.
3346 :type expected_len: int
3347 :param expected_len: Expected argument length in case it's set.
3349 :type generation_match_args: dict
3350 :param generation_match_args: Lists, which length must be checked.
3352 :raises: :exc:`ValueError` if any argument set, but has an unexpected length.
3353 """
3354 for name, value in generation_match_args.items():
3355 if value is not None and len(value) != expected_len:
3356 raise ValueError(f"'{name}' length must be the same as 'blobs' length")