Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.11/site-packages/google/cloud/storage/bucket.py: 35%

Shortcuts on this page

r m x   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

853 statements  

1# Copyright 2014 Google LLC 

2# 

3# Licensed under the Apache License, Version 2.0 (the "License"); 

4# you may not use this file except in compliance with the License. 

5# You may obtain a copy of the License at 

6# 

7# http://www.apache.org/licenses/LICENSE-2.0 

8# 

9# Unless required by applicable law or agreed to in writing, software 

10# distributed under the License is distributed on an "AS IS" BASIS, 

11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 

12# See the License for the specific language governing permissions and 

13# limitations under the License. 

14 

15"""Create / interact with Google Cloud Storage buckets.""" 

16 

17import base64 

18import copy 

19import datetime 

20import json 

21from urllib.parse import urlsplit 

22import warnings 

23 

24from google.api_core import datetime_helpers 

25from google.cloud._helpers import _datetime_to_rfc3339 

26from google.cloud._helpers import _rfc3339_nanos_to_datetime 

27from google.cloud.exceptions import NotFound 

28from google.api_core.iam import Policy 

29from google.cloud.storage import _signing 

30from google.cloud.storage._helpers import _add_etag_match_headers 

31from google.cloud.storage._helpers import _add_generation_match_parameters 

32from google.cloud.storage._helpers import _NOW 

33from google.cloud.storage._helpers import _PropertyMixin 

34from google.cloud.storage._helpers import _UTC 

35from google.cloud.storage._helpers import _scalar_property 

36from google.cloud.storage._helpers import _validate_name 

37from google.cloud.storage._signing import generate_signed_url_v2 

38from google.cloud.storage._signing import generate_signed_url_v4 

39from google.cloud.storage._helpers import _bucket_bound_hostname_url 

40from google.cloud.storage._helpers import _virtual_hosted_style_base_url 

41from google.cloud.storage._opentelemetry_tracing import create_trace_span 

42from google.cloud.storage.acl import BucketACL 

43from google.cloud.storage.acl import DefaultObjectACL 

44from google.cloud.storage.blob import _quote 

45from google.cloud.storage.blob import Blob 

46from google.cloud.storage.constants import _DEFAULT_TIMEOUT 

47from google.cloud.storage.constants import ARCHIVE_STORAGE_CLASS 

48from google.cloud.storage.constants import COLDLINE_STORAGE_CLASS 

49from google.cloud.storage.constants import DUAL_REGION_LOCATION_TYPE 

50from google.cloud.storage.constants import ( 

51 DURABLE_REDUCED_AVAILABILITY_LEGACY_STORAGE_CLASS, 

52) 

53from google.cloud.storage.constants import MULTI_REGIONAL_LEGACY_STORAGE_CLASS 

54from google.cloud.storage.constants import MULTI_REGION_LOCATION_TYPE 

55from google.cloud.storage.constants import NEARLINE_STORAGE_CLASS 

56from google.cloud.storage.constants import PUBLIC_ACCESS_PREVENTION_INHERITED 

57from google.cloud.storage.constants import REGIONAL_LEGACY_STORAGE_CLASS 

58from google.cloud.storage.constants import REGION_LOCATION_TYPE 

59from google.cloud.storage.constants import STANDARD_STORAGE_CLASS 

60from google.cloud.storage.ip_filter import IPFilter 

61from google.cloud.storage.notification import BucketNotification 

62from google.cloud.storage.notification import NONE_PAYLOAD_FORMAT 

63from google.cloud.storage.retry import DEFAULT_RETRY 

64from google.cloud.storage.retry import DEFAULT_RETRY_IF_GENERATION_SPECIFIED 

65from google.cloud.storage.retry import DEFAULT_RETRY_IF_ETAG_IN_JSON 

66from google.cloud.storage.retry import DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED 

67 

68 

69_UBLA_BPO_ENABLED_MESSAGE = ( 

70 "Pass only one of 'uniform_bucket_level_access_enabled' / " 

71 "'bucket_policy_only_enabled' to 'IAMConfiguration'." 

72) 

73_BPO_ENABLED_MESSAGE = ( 

74 "'IAMConfiguration.bucket_policy_only_enabled' is deprecated. " 

75 "Instead, use 'IAMConfiguration.uniform_bucket_level_access_enabled'." 

76) 

77_UBLA_BPO_LOCK_TIME_MESSAGE = ( 

78 "Pass only one of 'uniform_bucket_level_access_lock_time' / " 

79 "'bucket_policy_only_lock_time' to 'IAMConfiguration'." 

80) 

81_BPO_LOCK_TIME_MESSAGE = ( 

82 "'IAMConfiguration.bucket_policy_only_lock_time' is deprecated. " 

83 "Instead, use 'IAMConfiguration.uniform_bucket_level_access_lock_time'." 

84) 

85_LOCATION_SETTER_MESSAGE = ( 

86 "Assignment to 'Bucket.location' is deprecated, as it is only " 

87 "valid before the bucket is created. Instead, pass the location " 

88 "to `Bucket.create`." 

89) 

90_FROM_STRING_MESSAGE = ( 

91 "Bucket.from_string() is deprecated. " "Use Bucket.from_uri() instead." 

92) 

93_IP_FILTER_PROPERTY = "ipFilter" 

94 

95 

96def _blobs_page_start(iterator, page, response): 

97 """Grab prefixes after a :class:`~google.cloud.iterator.Page` started. 

98 

99 :type iterator: :class:`~google.api_core.page_iterator.Iterator` 

100 :param iterator: The iterator that is currently in use. 

101 

102 :type page: :class:`~google.cloud.api.core.page_iterator.Page` 

103 :param page: The page that was just created. 

104 

105 :type response: dict 

106 :param response: The JSON API response for a page of blobs. 

107 """ 

108 page.prefixes = tuple(response.get("prefixes", ())) 

109 iterator.prefixes.update(page.prefixes) 

110 

111 

112def _item_to_blob(iterator, item): 

113 """Convert a JSON blob to the native object. 

114 

115 .. note:: 

116 

117 This assumes that the ``bucket`` attribute has been 

118 added to the iterator after being created. 

119 

120 :type iterator: :class:`~google.api_core.page_iterator.Iterator` 

121 :param iterator: The iterator that has retrieved the item. 

122 

123 :type item: dict 

124 :param item: An item to be converted to a blob. 

125 

126 :rtype: :class:`.Blob` 

127 :returns: The next blob in the page. 

128 """ 

129 name = item.get("name") 

130 blob = Blob(name, bucket=iterator.bucket) 

131 blob._set_properties(item) 

132 return blob 

133 

134 

135def _item_to_notification(iterator, item): 

136 """Convert a JSON blob to the native object. 

137 

138 .. note:: 

139 

140 This assumes that the ``bucket`` attribute has been 

141 added to the iterator after being created. 

142 

143 :type iterator: :class:`~google.api_core.page_iterator.Iterator` 

144 :param iterator: The iterator that has retrieved the item. 

145 

146 :type item: dict 

147 :param item: An item to be converted to a blob. 

148 

149 :rtype: :class:`.BucketNotification` 

150 :returns: The next notification being iterated. 

151 """ 

152 return BucketNotification.from_api_repr(item, bucket=iterator.bucket) 

153 

154 

155class LifecycleRuleConditions(dict): 

156 """Map a single lifecycle rule for a bucket. 

157 

158 See: https://cloud.google.com/storage/docs/lifecycle 

159 

160 :type age: int 

161 :param age: (Optional) Apply rule action to items whose age, in days, 

162 exceeds this value. 

163 

164 :type created_before: datetime.date 

165 :param created_before: (Optional) Apply rule action to items created 

166 before this date. 

167 

168 :type is_live: bool 

169 :param is_live: (Optional) If true, apply rule action to non-versioned 

170 items, or to items with no newer versions. If false, apply 

171 rule action to versioned items with at least one newer 

172 version. 

173 

174 :type matches_prefix: list(str) 

175 :param matches_prefix: (Optional) Apply rule action to items which 

176 any prefix matches the beginning of the item name. 

177 

178 :type matches_storage_class: list(str), one or more of 

179 :attr:`Bucket.STORAGE_CLASSES`. 

180 :param matches_storage_class: (Optional) Apply rule action to items 

181 whose storage class matches this value. 

182 

183 :type matches_suffix: list(str) 

184 :param matches_suffix: (Optional) Apply rule action to items which 

185 any suffix matches the end of the item name. 

186 

187 :type number_of_newer_versions: int 

188 :param number_of_newer_versions: (Optional) Apply rule action to versioned 

189 items having N newer versions. 

190 

191 :type days_since_custom_time: int 

192 :param days_since_custom_time: (Optional) Apply rule action to items whose number of days 

193 elapsed since the custom timestamp. This condition is relevant 

194 only for versioned objects. The value of the field must be a non 

195 negative integer. If it's zero, the object version will become 

196 eligible for lifecycle action as soon as it becomes custom. 

197 

198 :type custom_time_before: :class:`datetime.date` 

199 :param custom_time_before: (Optional) Date object parsed from RFC3339 valid date, apply rule action 

200 to items whose custom time is before this date. This condition is relevant 

201 only for versioned objects, e.g., 2019-03-16. 

202 

203 :type days_since_noncurrent_time: int 

204 :param days_since_noncurrent_time: (Optional) Apply rule action to items whose number of days 

205 elapsed since the non current timestamp. This condition 

206 is relevant only for versioned objects. The value of the field 

207 must be a non negative integer. If it's zero, the object version 

208 will become eligible for lifecycle action as soon as it becomes 

209 non current. 

210 

211 :type noncurrent_time_before: :class:`datetime.date` 

212 :param noncurrent_time_before: (Optional) Date object parsed from RFC3339 valid date, apply 

213 rule action to items whose non current time is before this date. 

214 This condition is relevant only for versioned objects, e.g, 2019-03-16. 

215 

216 :raises ValueError: if no arguments are passed. 

217 """ 

218 

219 def __init__( 

220 self, 

221 age=None, 

222 created_before=None, 

223 is_live=None, 

224 matches_storage_class=None, 

225 number_of_newer_versions=None, 

226 days_since_custom_time=None, 

227 custom_time_before=None, 

228 days_since_noncurrent_time=None, 

229 noncurrent_time_before=None, 

230 matches_prefix=None, 

231 matches_suffix=None, 

232 _factory=False, 

233 ): 

234 conditions = {} 

235 

236 if age is not None: 

237 conditions["age"] = age 

238 

239 if created_before is not None: 

240 conditions["createdBefore"] = created_before.isoformat() 

241 

242 if is_live is not None: 

243 conditions["isLive"] = is_live 

244 

245 if matches_storage_class is not None: 

246 conditions["matchesStorageClass"] = matches_storage_class 

247 

248 if number_of_newer_versions is not None: 

249 conditions["numNewerVersions"] = number_of_newer_versions 

250 

251 if days_since_custom_time is not None: 

252 conditions["daysSinceCustomTime"] = days_since_custom_time 

253 

254 if custom_time_before is not None: 

255 conditions["customTimeBefore"] = custom_time_before.isoformat() 

256 

257 if days_since_noncurrent_time is not None: 

258 conditions["daysSinceNoncurrentTime"] = days_since_noncurrent_time 

259 

260 if noncurrent_time_before is not None: 

261 conditions["noncurrentTimeBefore"] = noncurrent_time_before.isoformat() 

262 

263 if matches_prefix is not None: 

264 conditions["matchesPrefix"] = matches_prefix 

265 

266 if matches_suffix is not None: 

267 conditions["matchesSuffix"] = matches_suffix 

268 

269 if not _factory and not conditions: 

270 raise ValueError("Supply at least one condition") 

271 

272 super(LifecycleRuleConditions, self).__init__(conditions) 

273 

274 @classmethod 

275 def from_api_repr(cls, resource): 

276 """Factory: construct instance from resource. 

277 

278 :type resource: dict 

279 :param resource: mapping as returned from API call. 

280 

281 :rtype: :class:`LifecycleRuleConditions` 

282 :returns: Instance created from resource. 

283 """ 

284 instance = cls(_factory=True) 

285 instance.update(resource) 

286 return instance 

287 

288 @property 

289 def age(self): 

290 """Conditon's age value.""" 

291 return self.get("age") 

292 

293 @property 

294 def created_before(self): 

295 """Conditon's created_before value.""" 

296 before = self.get("createdBefore") 

297 if before is not None: 

298 return datetime_helpers.from_iso8601_date(before) 

299 

300 @property 

301 def is_live(self): 

302 """Conditon's 'is_live' value.""" 

303 return self.get("isLive") 

304 

305 @property 

306 def matches_prefix(self): 

307 """Conditon's 'matches_prefix' value.""" 

308 return self.get("matchesPrefix") 

309 

310 @property 

311 def matches_storage_class(self): 

312 """Conditon's 'matches_storage_class' value.""" 

313 return self.get("matchesStorageClass") 

314 

315 @property 

316 def matches_suffix(self): 

317 """Conditon's 'matches_suffix' value.""" 

318 return self.get("matchesSuffix") 

319 

320 @property 

321 def number_of_newer_versions(self): 

322 """Conditon's 'number_of_newer_versions' value.""" 

323 return self.get("numNewerVersions") 

324 

325 @property 

326 def days_since_custom_time(self): 

327 """Conditon's 'days_since_custom_time' value.""" 

328 return self.get("daysSinceCustomTime") 

329 

330 @property 

331 def custom_time_before(self): 

332 """Conditon's 'custom_time_before' value.""" 

333 before = self.get("customTimeBefore") 

334 if before is not None: 

335 return datetime_helpers.from_iso8601_date(before) 

336 

337 @property 

338 def days_since_noncurrent_time(self): 

339 """Conditon's 'days_since_noncurrent_time' value.""" 

340 return self.get("daysSinceNoncurrentTime") 

341 

342 @property 

343 def noncurrent_time_before(self): 

344 """Conditon's 'noncurrent_time_before' value.""" 

345 before = self.get("noncurrentTimeBefore") 

346 if before is not None: 

347 return datetime_helpers.from_iso8601_date(before) 

348 

349 

350class LifecycleRuleDelete(dict): 

351 """Map a lifecycle rule deleting matching items. 

352 

353 :type kw: dict 

354 :params kw: arguments passed to :class:`LifecycleRuleConditions`. 

355 """ 

356 

357 def __init__(self, **kw): 

358 conditions = LifecycleRuleConditions(**kw) 

359 rule = {"action": {"type": "Delete"}, "condition": dict(conditions)} 

360 super().__init__(rule) 

361 

362 @classmethod 

363 def from_api_repr(cls, resource): 

364 """Factory: construct instance from resource. 

365 

366 :type resource: dict 

367 :param resource: mapping as returned from API call. 

368 

369 :rtype: :class:`LifecycleRuleDelete` 

370 :returns: Instance created from resource. 

371 """ 

372 instance = cls(_factory=True) 

373 instance.update(resource) 

374 return instance 

375 

376 

377class LifecycleRuleSetStorageClass(dict): 

378 """Map a lifecycle rule updating storage class of matching items. 

379 

380 :type storage_class: str, one of :attr:`Bucket.STORAGE_CLASSES`. 

381 :param storage_class: new storage class to assign to matching items. 

382 

383 :type kw: dict 

384 :params kw: arguments passed to :class:`LifecycleRuleConditions`. 

385 """ 

386 

387 def __init__(self, storage_class, **kw): 

388 conditions = LifecycleRuleConditions(**kw) 

389 rule = { 

390 "action": { 

391 "type": "SetStorageClass", 

392 "storageClass": storage_class, 

393 }, 

394 "condition": dict(conditions), 

395 } 

396 super().__init__(rule) 

397 

398 @classmethod 

399 def from_api_repr(cls, resource): 

400 """Factory: construct instance from resource. 

401 

402 :type resource: dict 

403 :param resource: mapping as returned from API call. 

404 

405 :rtype: :class:`LifecycleRuleSetStorageClass` 

406 :returns: Instance created from resource. 

407 """ 

408 action = resource["action"] 

409 instance = cls(action["storageClass"], _factory=True) 

410 instance.update(resource) 

411 return instance 

412 

413 

414class LifecycleRuleAbortIncompleteMultipartUpload(dict): 

415 """Map a rule aborting incomplete multipart uploads of matching items. 

416 

417 The "age" lifecycle condition is the only supported condition for this rule. 

418 

419 :type kw: dict 

420 :params kw: arguments passed to :class:`LifecycleRuleConditions`. 

421 """ 

422 

423 def __init__(self, **kw): 

424 conditions = LifecycleRuleConditions(**kw) 

425 rule = { 

426 "action": {"type": "AbortIncompleteMultipartUpload"}, 

427 "condition": dict(conditions), 

428 } 

429 super().__init__(rule) 

430 

431 @classmethod 

432 def from_api_repr(cls, resource): 

433 """Factory: construct instance from resource. 

434 

435 :type resource: dict 

436 :param resource: mapping as returned from API call. 

437 

438 :rtype: :class:`LifecycleRuleAbortIncompleteMultipartUpload` 

439 :returns: Instance created from resource. 

440 """ 

441 instance = cls(_factory=True) 

442 instance.update(resource) 

443 return instance 

444 

445 

446_default = object() 

447 

448 

449class IAMConfiguration(dict): 

450 """Map a bucket's IAM configuration. 

451 

452 :type bucket: :class:`Bucket` 

453 :params bucket: Bucket for which this instance is the policy. 

454 

455 :type public_access_prevention: str 

456 :params public_access_prevention: 

457 (Optional) Whether the public access prevention policy is 'inherited' (default) or 'enforced' 

458 See: https://cloud.google.com/storage/docs/public-access-prevention 

459 

460 :type uniform_bucket_level_access_enabled: bool 

461 :params bucket_policy_only_enabled: 

462 (Optional) Whether the IAM-only policy is enabled for the bucket. 

463 

464 :type uniform_bucket_level_access_locked_time: :class:`datetime.datetime` 

465 :params uniform_bucket_level_locked_time: 

466 (Optional) When the bucket's IAM-only policy was enabled. 

467 This value should normally only be set by the back-end API. 

468 

469 :type bucket_policy_only_enabled: bool 

470 :params bucket_policy_only_enabled: 

471 Deprecated alias for :data:`uniform_bucket_level_access_enabled`. 

472 

473 :type bucket_policy_only_locked_time: :class:`datetime.datetime` 

474 :params bucket_policy_only_locked_time: 

475 Deprecated alias for :data:`uniform_bucket_level_access_locked_time`. 

476 """ 

477 

478 def __init__( 

479 self, 

480 bucket, 

481 public_access_prevention=_default, 

482 uniform_bucket_level_access_enabled=_default, 

483 uniform_bucket_level_access_locked_time=_default, 

484 bucket_policy_only_enabled=_default, 

485 bucket_policy_only_locked_time=_default, 

486 ): 

487 if bucket_policy_only_enabled is not _default: 

488 if uniform_bucket_level_access_enabled is not _default: 

489 raise ValueError(_UBLA_BPO_ENABLED_MESSAGE) 

490 

491 warnings.warn(_BPO_ENABLED_MESSAGE, DeprecationWarning, stacklevel=2) 

492 uniform_bucket_level_access_enabled = bucket_policy_only_enabled 

493 

494 if bucket_policy_only_locked_time is not _default: 

495 if uniform_bucket_level_access_locked_time is not _default: 

496 raise ValueError(_UBLA_BPO_LOCK_TIME_MESSAGE) 

497 

498 warnings.warn(_BPO_LOCK_TIME_MESSAGE, DeprecationWarning, stacklevel=2) 

499 uniform_bucket_level_access_locked_time = bucket_policy_only_locked_time 

500 

501 if uniform_bucket_level_access_enabled is _default: 

502 uniform_bucket_level_access_enabled = False 

503 

504 if public_access_prevention is _default: 

505 public_access_prevention = PUBLIC_ACCESS_PREVENTION_INHERITED 

506 

507 data = { 

508 "uniformBucketLevelAccess": { 

509 "enabled": uniform_bucket_level_access_enabled 

510 }, 

511 "publicAccessPrevention": public_access_prevention, 

512 } 

513 if uniform_bucket_level_access_locked_time is not _default: 

514 data["uniformBucketLevelAccess"]["lockedTime"] = _datetime_to_rfc3339( 

515 uniform_bucket_level_access_locked_time 

516 ) 

517 super(IAMConfiguration, self).__init__(data) 

518 self._bucket = bucket 

519 

520 @classmethod 

521 def from_api_repr(cls, resource, bucket): 

522 """Factory: construct instance from resource. 

523 

524 :type bucket: :class:`Bucket` 

525 :params bucket: Bucket for which this instance is the policy. 

526 

527 :type resource: dict 

528 :param resource: mapping as returned from API call. 

529 

530 :rtype: :class:`IAMConfiguration` 

531 :returns: Instance created from resource. 

532 """ 

533 instance = cls(bucket) 

534 instance.update(resource) 

535 return instance 

536 

537 @property 

538 def bucket(self): 

539 """Bucket for which this instance is the policy. 

540 

541 :rtype: :class:`Bucket` 

542 :returns: the instance's bucket. 

543 """ 

544 return self._bucket 

545 

546 @property 

547 def public_access_prevention(self): 

548 """Setting for public access prevention policy. Options are 'inherited' (default) or 'enforced'. 

549 

550 See: https://cloud.google.com/storage/docs/public-access-prevention 

551 

552 :rtype: string 

553 :returns: the public access prevention status, either 'enforced' or 'inherited'. 

554 """ 

555 return self["publicAccessPrevention"] 

556 

557 @public_access_prevention.setter 

558 def public_access_prevention(self, value): 

559 self["publicAccessPrevention"] = value 

560 self.bucket._patch_property("iamConfiguration", self) 

561 

562 @property 

563 def uniform_bucket_level_access_enabled(self): 

564 """If set, access checks only use bucket-level IAM policies or above. 

565 

566 :rtype: bool 

567 :returns: whether the bucket is configured to allow only IAM. 

568 """ 

569 ubla = self.get("uniformBucketLevelAccess", {}) 

570 return ubla.get("enabled", False) 

571 

572 @uniform_bucket_level_access_enabled.setter 

573 def uniform_bucket_level_access_enabled(self, value): 

574 ubla = self.setdefault("uniformBucketLevelAccess", {}) 

575 ubla["enabled"] = bool(value) 

576 self.bucket._patch_property("iamConfiguration", self) 

577 

578 @property 

579 def uniform_bucket_level_access_locked_time(self): 

580 """Deadline for changing :attr:`uniform_bucket_level_access_enabled` from true to false. 

581 

582 If the bucket's :attr:`uniform_bucket_level_access_enabled` is true, this property 

583 is time time after which that setting becomes immutable. 

584 

585 If the bucket's :attr:`uniform_bucket_level_access_enabled` is false, this property 

586 is ``None``. 

587 

588 :rtype: Union[:class:`datetime.datetime`, None] 

589 :returns: (readonly) Time after which :attr:`uniform_bucket_level_access_enabled` will 

590 be frozen as true. 

591 """ 

592 ubla = self.get("uniformBucketLevelAccess", {}) 

593 stamp = ubla.get("lockedTime") 

594 if stamp is not None: 

595 stamp = _rfc3339_nanos_to_datetime(stamp) 

596 return stamp 

597 

598 @property 

599 def bucket_policy_only_enabled(self): 

600 """Deprecated alias for :attr:`uniform_bucket_level_access_enabled`. 

601 

602 :rtype: bool 

603 :returns: whether the bucket is configured to allow only IAM. 

604 """ 

605 return self.uniform_bucket_level_access_enabled 

606 

607 @bucket_policy_only_enabled.setter 

608 def bucket_policy_only_enabled(self, value): 

609 warnings.warn(_BPO_ENABLED_MESSAGE, DeprecationWarning, stacklevel=2) 

610 self.uniform_bucket_level_access_enabled = value 

611 

612 @property 

613 def bucket_policy_only_locked_time(self): 

614 """Deprecated alias for :attr:`uniform_bucket_level_access_locked_time`. 

615 

616 :rtype: Union[:class:`datetime.datetime`, None] 

617 :returns: 

618 (readonly) Time after which :attr:`bucket_policy_only_enabled` will 

619 be frozen as true. 

620 """ 

621 return self.uniform_bucket_level_access_locked_time 

622 

623 

624class Bucket(_PropertyMixin): 

625 """A class representing a Bucket on Cloud Storage. 

626 

627 :type client: :class:`google.cloud.storage.client.Client` 

628 :param client: A client which holds credentials and project configuration 

629 for the bucket (which requires a project). 

630 

631 :type name: str 

632 :param name: The name of the bucket. Bucket names must start and end with a 

633 number or letter. 

634 

635 :type user_project: str 

636 :param user_project: (Optional) the project ID to be billed for API 

637 requests made via this instance. 

638 

639 :type generation: int 

640 :param generation: (Optional) If present, selects a specific revision of 

641 this bucket. 

642 """ 

643 

644 _MAX_OBJECTS_FOR_ITERATION = 256 

645 """Maximum number of existing objects allowed in iteration. 

646 

647 This is used in Bucket.delete() and Bucket.make_public(). 

648 """ 

649 

650 STORAGE_CLASSES = ( 

651 STANDARD_STORAGE_CLASS, 

652 NEARLINE_STORAGE_CLASS, 

653 COLDLINE_STORAGE_CLASS, 

654 ARCHIVE_STORAGE_CLASS, 

655 MULTI_REGIONAL_LEGACY_STORAGE_CLASS, # legacy 

656 REGIONAL_LEGACY_STORAGE_CLASS, # legacy 

657 DURABLE_REDUCED_AVAILABILITY_LEGACY_STORAGE_CLASS, # legacy 

658 ) 

659 """Allowed values for :attr:`storage_class`. 

660 

661 Default value is :attr:`STANDARD_STORAGE_CLASS`. 

662 

663 See 

664 https://cloud.google.com/storage/docs/json_api/v1/buckets#storageClass 

665 https://cloud.google.com/storage/docs/storage-classes 

666 """ 

667 

668 _LOCATION_TYPES = ( 

669 MULTI_REGION_LOCATION_TYPE, 

670 REGION_LOCATION_TYPE, 

671 DUAL_REGION_LOCATION_TYPE, 

672 ) 

673 """Allowed values for :attr:`location_type`.""" 

674 

675 def __init__(self, client, name=None, user_project=None, generation=None): 

676 """ 

677 property :attr:`name` 

678 Get the bucket's name. 

679 """ 

680 name = _validate_name(name) 

681 super(Bucket, self).__init__(name=name) 

682 self._client = client 

683 self._acl = BucketACL(self) 

684 self._default_object_acl = DefaultObjectACL(self) 

685 self._label_removals = set() 

686 self._user_project = user_project 

687 

688 if generation is not None: 

689 self._properties["generation"] = generation 

690 

691 def __repr__(self): 

692 return f"<Bucket: {self.name}>" 

693 

694 @property 

695 def client(self): 

696 """The client bound to this bucket.""" 

697 return self._client 

698 

699 def _set_properties(self, value): 

700 """Set the properties for the current object. 

701 

702 :type value: dict or :class:`google.cloud.storage.batch._FutureDict` 

703 :param value: The properties to be set. 

704 """ 

705 self._label_removals.clear() 

706 return super(Bucket, self)._set_properties(value) 

707 

708 @property 

709 def rpo(self): 

710 """Get the RPO (Recovery Point Objective) of this bucket 

711 

712 See: https://cloud.google.com/storage/docs/managing-turbo-replication 

713 

714 "ASYNC_TURBO" or "DEFAULT" 

715 :rtype: str 

716 """ 

717 return self._properties.get("rpo") 

718 

719 @rpo.setter 

720 def rpo(self, value): 

721 """ 

722 Set the RPO (Recovery Point Objective) of this bucket. 

723 

724 See: https://cloud.google.com/storage/docs/managing-turbo-replication 

725 

726 :type value: str 

727 :param value: "ASYNC_TURBO" or "DEFAULT" 

728 """ 

729 self._patch_property("rpo", value) 

730 

731 @property 

732 def user_project(self): 

733 """Project ID to be billed for API requests made via this bucket. 

734 

735 If unset, API requests are billed to the bucket owner. 

736 

737 A user project is required for all operations on Requester Pays buckets. 

738 

739 See https://cloud.google.com/storage/docs/requester-pays#requirements for details. 

740 

741 :rtype: str 

742 """ 

743 return self._user_project 

744 

745 @property 

746 def generation(self): 

747 """Retrieve the generation for the bucket. 

748 

749 :rtype: int or ``NoneType`` 

750 :returns: The generation of the bucket or ``None`` if the bucket's 

751 resource has not been loaded from the server. 

752 """ 

753 generation = self._properties.get("generation") 

754 if generation is not None: 

755 return int(generation) 

756 

757 @property 

758 def soft_delete_time(self): 

759 """If this bucket has been soft-deleted, returns the time at which it became soft-deleted. 

760 

761 :rtype: :class:`datetime.datetime` or ``NoneType`` 

762 :returns: 

763 (readonly) The time that the bucket became soft-deleted. 

764 Note this property is only set for soft-deleted buckets. 

765 """ 

766 soft_delete_time = self._properties.get("softDeleteTime") 

767 if soft_delete_time is not None: 

768 return _rfc3339_nanos_to_datetime(soft_delete_time) 

769 

770 @property 

771 def hard_delete_time(self): 

772 """If this bucket has been soft-deleted, returns the time at which it will be permanently deleted. 

773 

774 :rtype: :class:`datetime.datetime` or ``NoneType`` 

775 :returns: 

776 (readonly) The time that the bucket will be permanently deleted. 

777 Note this property is only set for soft-deleted buckets. 

778 """ 

779 hard_delete_time = self._properties.get("hardDeleteTime") 

780 if hard_delete_time is not None: 

781 return _rfc3339_nanos_to_datetime(hard_delete_time) 

782 

783 @property 

784 def _query_params(self): 

785 """Default query parameters.""" 

786 params = super()._query_params 

787 return params 

788 

789 @classmethod 

790 def from_uri(cls, uri, client=None): 

791 """Get a constructor for bucket object by URI. 

792 

793 .. code-block:: python 

794 

795 from google.cloud import storage 

796 from google.cloud.storage.bucket import Bucket 

797 client = storage.Client() 

798 bucket = Bucket.from_uri("gs://bucket", client=client) 

799 

800 :type uri: str 

801 :param uri: The bucket uri pass to get bucket object. 

802 

803 :type client: :class:`~google.cloud.storage.client.Client` or 

804 ``NoneType`` 

805 :param client: (Optional) The client to use. Application code should 

806 *always* pass ``client``. 

807 

808 :rtype: :class:`google.cloud.storage.bucket.Bucket` 

809 :returns: The bucket object created. 

810 """ 

811 scheme, netloc, path, query, frag = urlsplit(uri) 

812 

813 if scheme != "gs": 

814 raise ValueError("URI scheme must be gs") 

815 

816 return cls(client, name=netloc) 

817 

818 @classmethod 

819 def from_string(cls, uri, client=None): 

820 """Get a constructor for bucket object by URI. 

821 

822 .. note:: 

823 Deprecated alias for :meth:`from_uri`. 

824 

825 .. code-block:: python 

826 

827 from google.cloud import storage 

828 from google.cloud.storage.bucket import Bucket 

829 client = storage.Client() 

830 bucket = Bucket.from_string("gs://bucket", client=client) 

831 

832 :type uri: str 

833 :param uri: The bucket uri pass to get bucket object. 

834 

835 :type client: :class:`~google.cloud.storage.client.Client` or 

836 ``NoneType`` 

837 :param client: (Optional) The client to use. Application code should 

838 *always* pass ``client``. 

839 

840 :rtype: :class:`google.cloud.storage.bucket.Bucket` 

841 :returns: The bucket object created. 

842 """ 

843 warnings.warn(_FROM_STRING_MESSAGE, PendingDeprecationWarning, stacklevel=2) 

844 return Bucket.from_uri(uri=uri, client=client) 

845 

846 def blob( 

847 self, 

848 blob_name, 

849 chunk_size=None, 

850 encryption_key=None, 

851 kms_key_name=None, 

852 generation=None, 

853 ): 

854 """Factory constructor for blob object. 

855 

856 .. note:: 

857 This will not make an HTTP request; it simply instantiates 

858 a blob object owned by this bucket. 

859 

860 :type blob_name: str 

861 :param blob_name: The name of the blob to be instantiated. 

862 

863 :type chunk_size: int 

864 :param chunk_size: The size of a chunk of data whenever iterating 

865 (in bytes). This must be a multiple of 256 KB per 

866 the API specification. 

867 

868 :type encryption_key: bytes 

869 :param encryption_key: 

870 (Optional) 32 byte encryption key for customer-supplied encryption. 

871 

872 :type kms_key_name: str 

873 :param kms_key_name: 

874 (Optional) Resource name of KMS key used to encrypt blob's content. 

875 

876 :type generation: long 

877 :param generation: (Optional) If present, selects a specific revision of 

878 this object. 

879 

880 :type crc32c_checksum: str 

881 :param crc32c_checksum: 

882 (Optional) If set, the CRC32C checksum of the blob's content. 

883 CRC32c checksum, as described in RFC 4960, Appendix B; encoded using 

884 base64 in big-endian byte order. See 

885 Apenndix B: https://datatracker.ietf.org/doc/html/rfc4960#appendix-B 

886 base64: https://datatracker.ietf.org/doc/html/rfc4648#section-4 

887 

888 :rtype: :class:`google.cloud.storage.blob.Blob` 

889 :returns: The blob object created. 

890 """ 

891 return Blob( 

892 name=blob_name, 

893 bucket=self, 

894 chunk_size=chunk_size, 

895 encryption_key=encryption_key, 

896 kms_key_name=kms_key_name, 

897 generation=generation, 

898 ) 

899 

900 def notification( 

901 self, 

902 topic_name=None, 

903 topic_project=None, 

904 custom_attributes=None, 

905 event_types=None, 

906 blob_name_prefix=None, 

907 payload_format=NONE_PAYLOAD_FORMAT, 

908 notification_id=None, 

909 ): 

910 """Factory: create a notification resource for the bucket. 

911 

912 See: :class:`.BucketNotification` for parameters. 

913 

914 :rtype: :class:`.BucketNotification` 

915 """ 

916 return BucketNotification( 

917 self, 

918 topic_name=topic_name, 

919 topic_project=topic_project, 

920 custom_attributes=custom_attributes, 

921 event_types=event_types, 

922 blob_name_prefix=blob_name_prefix, 

923 payload_format=payload_format, 

924 notification_id=notification_id, 

925 ) 

926 

927 def exists( 

928 self, 

929 client=None, 

930 timeout=_DEFAULT_TIMEOUT, 

931 if_etag_match=None, 

932 if_etag_not_match=None, 

933 if_metageneration_match=None, 

934 if_metageneration_not_match=None, 

935 retry=DEFAULT_RETRY, 

936 ): 

937 """Determines whether or not this bucket exists. 

938 

939 If :attr:`user_project` is set, bills the API request to that project. 

940 

941 :type client: :class:`~google.cloud.storage.client.Client` or 

942 ``NoneType`` 

943 :param client: (Optional) The client to use. If not passed, falls back 

944 to the ``client`` stored on the current bucket. 

945 

946 :type timeout: float or tuple 

947 :param timeout: 

948 (Optional) The amount of time, in seconds, to wait 

949 for the server response. See: :ref:`configuring_timeouts` 

950 

951 :type if_etag_match: Union[str, Set[str]] 

952 :param if_etag_match: (Optional) Make the operation conditional on whether the 

953 bucket's current ETag matches the given value. 

954 

955 :type if_etag_not_match: Union[str, Set[str]]) 

956 :param if_etag_not_match: (Optional) Make the operation conditional on whether the 

957 bucket's current ETag does not match the given value. 

958 

959 :type if_metageneration_match: long 

960 :param if_metageneration_match: (Optional) Make the operation conditional on whether the 

961 bucket's current metageneration matches the given value. 

962 

963 :type if_metageneration_not_match: long 

964 :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the 

965 bucket's current metageneration does not match the given value. 

966 

967 :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy 

968 :param retry: 

969 (Optional) How to retry the RPC. See: :ref:`configuring_retries` 

970 

971 :rtype: bool 

972 :returns: True if the bucket exists in Cloud Storage. 

973 """ 

974 with create_trace_span(name="Storage.Bucket.exists"): 

975 client = self._require_client(client) 

976 # We only need the status code (200 or not) so we seek to 

977 # minimize the returned payload. 

978 query_params = {"fields": "name"} 

979 

980 if self.user_project is not None: 

981 query_params["userProject"] = self.user_project 

982 

983 _add_generation_match_parameters( 

984 query_params, 

985 if_metageneration_match=if_metageneration_match, 

986 if_metageneration_not_match=if_metageneration_not_match, 

987 ) 

988 

989 headers = {} 

990 _add_etag_match_headers( 

991 headers, 

992 if_etag_match=if_etag_match, 

993 if_etag_not_match=if_etag_not_match, 

994 ) 

995 

996 try: 

997 # We intentionally pass `_target_object=None` since fields=name 

998 # would limit the local properties. 

999 client._get_resource( 

1000 self.path, 

1001 query_params=query_params, 

1002 headers=headers, 

1003 timeout=timeout, 

1004 retry=retry, 

1005 _target_object=None, 

1006 ) 

1007 except NotFound: 

1008 # NOTE: This will not fail immediately in a batch. However, when 

1009 # Batch.finish() is called, the resulting `NotFound` will be 

1010 # raised. 

1011 return False 

1012 return True 

1013 

1014 def create( 

1015 self, 

1016 client=None, 

1017 project=None, 

1018 location=None, 

1019 predefined_acl=None, 

1020 predefined_default_object_acl=None, 

1021 enable_object_retention=False, 

1022 timeout=_DEFAULT_TIMEOUT, 

1023 retry=DEFAULT_RETRY, 

1024 ): 

1025 """Creates current bucket. 

1026 

1027 If the bucket already exists, will raise 

1028 :class:`google.cloud.exceptions.Conflict`. 

1029 

1030 This implements "storage.buckets.insert". 

1031 

1032 If :attr:`user_project` is set, bills the API request to that project. 

1033 

1034 :type client: :class:`~google.cloud.storage.client.Client` or 

1035 ``NoneType`` 

1036 :param client: (Optional) The client to use. If not passed, falls back 

1037 to the ``client`` stored on the current bucket. 

1038 

1039 :type project: str 

1040 :param project: (Optional) The project under which the bucket is to 

1041 be created. If not passed, uses the project set on 

1042 the client. 

1043 :raises ValueError: if ``project`` is None and client's 

1044 :attr:`project` is also None. 

1045 

1046 :type location: str 

1047 :param location: (Optional) The location of the bucket. If not passed, 

1048 the default location, US, will be used. See 

1049 https://cloud.google.com/storage/docs/bucket-locations 

1050 

1051 :type predefined_acl: str 

1052 :param predefined_acl: 

1053 (Optional) Name of predefined ACL to apply to bucket. See: 

1054 https://cloud.google.com/storage/docs/access-control/lists#predefined-acl 

1055 

1056 :type predefined_default_object_acl: str 

1057 :param predefined_default_object_acl: 

1058 (Optional) Name of predefined ACL to apply to bucket's objects. See: 

1059 https://cloud.google.com/storage/docs/access-control/lists#predefined-acl 

1060 

1061 :type enable_object_retention: bool 

1062 :param enable_object_retention: 

1063 (Optional) Whether object retention should be enabled on this bucket. See: 

1064 https://cloud.google.com/storage/docs/object-lock 

1065 

1066 :type timeout: float or tuple 

1067 :param timeout: 

1068 (Optional) The amount of time, in seconds, to wait 

1069 for the server response. See: :ref:`configuring_timeouts` 

1070 

1071 :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy 

1072 :param retry: 

1073 (Optional) How to retry the RPC. See: :ref:`configuring_retries` 

1074 """ 

1075 with create_trace_span(name="Storage.Bucket.create"): 

1076 client = self._require_client(client) 

1077 client.create_bucket( 

1078 bucket_or_name=self, 

1079 project=project, 

1080 user_project=self.user_project, 

1081 location=location, 

1082 predefined_acl=predefined_acl, 

1083 predefined_default_object_acl=predefined_default_object_acl, 

1084 enable_object_retention=enable_object_retention, 

1085 timeout=timeout, 

1086 retry=retry, 

1087 ) 

1088 

1089 def update( 

1090 self, 

1091 client=None, 

1092 timeout=_DEFAULT_TIMEOUT, 

1093 if_metageneration_match=None, 

1094 if_metageneration_not_match=None, 

1095 retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, 

1096 ): 

1097 """Sends all properties in a PUT request. 

1098 

1099 Updates the ``_properties`` with the response from the backend. 

1100 

1101 If :attr:`user_project` is set, bills the API request to that project. 

1102 

1103 :type client: :class:`~google.cloud.storage.client.Client` or 

1104 ``NoneType`` 

1105 :param client: the client to use. If not passed, falls back to the 

1106 ``client`` stored on the current object. 

1107 

1108 :type timeout: float or tuple 

1109 :param timeout: 

1110 (Optional) The amount of time, in seconds, to wait 

1111 for the server response. See: :ref:`configuring_timeouts` 

1112 

1113 :type if_metageneration_match: long 

1114 :param if_metageneration_match: (Optional) Make the operation conditional on whether the 

1115 blob's current metageneration matches the given value. 

1116 

1117 :type if_metageneration_not_match: long 

1118 :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the 

1119 blob's current metageneration does not match the given value. 

1120 

1121 :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy 

1122 :param retry: 

1123 (Optional) How to retry the RPC. See: :ref:`configuring_retries` 

1124 """ 

1125 with create_trace_span(name="Storage.Bucket.update"): 

1126 super(Bucket, self).update( 

1127 client=client, 

1128 timeout=timeout, 

1129 if_metageneration_match=if_metageneration_match, 

1130 if_metageneration_not_match=if_metageneration_not_match, 

1131 retry=retry, 

1132 ) 

1133 

1134 def reload( 

1135 self, 

1136 client=None, 

1137 projection="noAcl", 

1138 timeout=_DEFAULT_TIMEOUT, 

1139 if_etag_match=None, 

1140 if_etag_not_match=None, 

1141 if_metageneration_match=None, 

1142 if_metageneration_not_match=None, 

1143 retry=DEFAULT_RETRY, 

1144 soft_deleted=None, 

1145 ): 

1146 """Reload properties from Cloud Storage. 

1147 

1148 If :attr:`user_project` is set, bills the API request to that project. 

1149 

1150 :type client: :class:`~google.cloud.storage.client.Client` or 

1151 ``NoneType`` 

1152 :param client: the client to use. If not passed, falls back to the 

1153 ``client`` stored on the current object. 

1154 

1155 :type projection: str 

1156 :param projection: (Optional) If used, must be 'full' or 'noAcl'. 

1157 Defaults to ``'noAcl'``. Specifies the set of 

1158 properties to return. 

1159 

1160 :type timeout: float or tuple 

1161 :param timeout: 

1162 (Optional) The amount of time, in seconds, to wait 

1163 for the server response. See: :ref:`configuring_timeouts` 

1164 

1165 :type if_etag_match: Union[str, Set[str]] 

1166 :param if_etag_match: (Optional) Make the operation conditional on whether the 

1167 bucket's current ETag matches the given value. 

1168 

1169 :type if_etag_not_match: Union[str, Set[str]]) 

1170 :param if_etag_not_match: (Optional) Make the operation conditional on whether the 

1171 bucket's current ETag does not match the given value. 

1172 

1173 :type if_metageneration_match: long 

1174 :param if_metageneration_match: (Optional) Make the operation conditional on whether the 

1175 bucket's current metageneration matches the given value. 

1176 

1177 :type if_metageneration_not_match: long 

1178 :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the 

1179 bucket's current metageneration does not match the given value. 

1180 

1181 :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy 

1182 :param retry: 

1183 (Optional) How to retry the RPC. See: :ref:`configuring_retries` 

1184 

1185 :type soft_deleted: bool 

1186 :param soft_deleted: (Optional) If True, looks for a soft-deleted 

1187 bucket. Will only return the bucket metadata if the bucket exists 

1188 and is in a soft-deleted state. The bucket ``generation`` must be 

1189 set if ``soft_deleted`` is set to True. 

1190 See: https://cloud.google.com/storage/docs/soft-delete 

1191 """ 

1192 with create_trace_span(name="Storage.Bucket.reload"): 

1193 super(Bucket, self).reload( 

1194 client=client, 

1195 projection=projection, 

1196 timeout=timeout, 

1197 if_etag_match=if_etag_match, 

1198 if_etag_not_match=if_etag_not_match, 

1199 if_metageneration_match=if_metageneration_match, 

1200 if_metageneration_not_match=if_metageneration_not_match, 

1201 retry=retry, 

1202 soft_deleted=soft_deleted, 

1203 ) 

1204 

1205 def patch( 

1206 self, 

1207 client=None, 

1208 timeout=_DEFAULT_TIMEOUT, 

1209 if_metageneration_match=None, 

1210 if_metageneration_not_match=None, 

1211 retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, 

1212 ): 

1213 """Sends all changed properties in a PATCH request. 

1214 

1215 Updates the ``_properties`` with the response from the backend. 

1216 

1217 If :attr:`user_project` is set, bills the API request to that project. 

1218 

1219 :type client: :class:`~google.cloud.storage.client.Client` or 

1220 ``NoneType`` 

1221 :param client: the client to use. If not passed, falls back to the 

1222 ``client`` stored on the current object. 

1223 

1224 :type timeout: float or tuple 

1225 :param timeout: 

1226 (Optional) The amount of time, in seconds, to wait 

1227 for the server response. See: :ref:`configuring_timeouts` 

1228 

1229 :type if_metageneration_match: long 

1230 :param if_metageneration_match: (Optional) Make the operation conditional on whether the 

1231 blob's current metageneration matches the given value. 

1232 

1233 :type if_metageneration_not_match: long 

1234 :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the 

1235 blob's current metageneration does not match the given value. 

1236 

1237 :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy 

1238 :param retry: 

1239 (Optional) How to retry the RPC. See: :ref:`configuring_retries` 

1240 """ 

1241 with create_trace_span(name="Storage.Bucket.patch"): 

1242 # Special case: For buckets, it is possible that labels are being 

1243 # removed; this requires special handling. 

1244 if self._label_removals: 

1245 self._changes.add("labels") 

1246 self._properties.setdefault("labels", {}) 

1247 for removed_label in self._label_removals: 

1248 self._properties["labels"][removed_label] = None 

1249 

1250 # Call the superclass method. 

1251 super(Bucket, self).patch( 

1252 client=client, 

1253 if_metageneration_match=if_metageneration_match, 

1254 if_metageneration_not_match=if_metageneration_not_match, 

1255 timeout=timeout, 

1256 retry=retry, 

1257 ) 

1258 

1259 @property 

1260 def acl(self): 

1261 """Create our ACL on demand.""" 

1262 return self._acl 

1263 

1264 @property 

1265 def default_object_acl(self): 

1266 """Create our defaultObjectACL on demand.""" 

1267 return self._default_object_acl 

1268 

1269 @staticmethod 

1270 def path_helper(bucket_name): 

1271 """Relative URL path for a bucket. 

1272 

1273 :type bucket_name: str 

1274 :param bucket_name: The bucket name in the path. 

1275 

1276 :rtype: str 

1277 :returns: The relative URL path for ``bucket_name``. 

1278 """ 

1279 return "/b/" + bucket_name 

1280 

1281 @property 

1282 def path(self): 

1283 """The URL path to this bucket.""" 

1284 if not self.name: 

1285 raise ValueError("Cannot determine path without bucket name.") 

1286 

1287 return self.path_helper(self.name) 

1288 

1289 def get_blob( 

1290 self, 

1291 blob_name, 

1292 client=None, 

1293 encryption_key=None, 

1294 generation=None, 

1295 if_etag_match=None, 

1296 if_etag_not_match=None, 

1297 if_generation_match=None, 

1298 if_generation_not_match=None, 

1299 if_metageneration_match=None, 

1300 if_metageneration_not_match=None, 

1301 timeout=_DEFAULT_TIMEOUT, 

1302 retry=DEFAULT_RETRY, 

1303 soft_deleted=None, 

1304 **kwargs, 

1305 ): 

1306 """Get a blob object by name. 

1307 

1308 See a [code sample](https://cloud.google.com/storage/docs/samples/storage-get-metadata#storage_get_metadata-python) 

1309 on how to retrieve metadata of an object. 

1310 

1311 If :attr:`user_project` is set, bills the API request to that project. 

1312 

1313 :type blob_name: str 

1314 :param blob_name: The name of the blob to retrieve. 

1315 

1316 :type client: :class:`~google.cloud.storage.client.Client` or 

1317 ``NoneType`` 

1318 :param client: (Optional) The client to use. If not passed, falls back 

1319 to the ``client`` stored on the current bucket. 

1320 

1321 :type encryption_key: bytes 

1322 :param encryption_key: 

1323 (Optional) 32 byte encryption key for customer-supplied encryption. 

1324 See 

1325 https://cloud.google.com/storage/docs/encryption#customer-supplied. 

1326 

1327 :type generation: long 

1328 :param generation: 

1329 (Optional) If present, selects a specific revision of this object. 

1330 

1331 :type if_etag_match: Union[str, Set[str]] 

1332 :param if_etag_match: 

1333 (Optional) See :ref:`using-if-etag-match` 

1334 

1335 :type if_etag_not_match: Union[str, Set[str]] 

1336 :param if_etag_not_match: 

1337 (Optional) See :ref:`using-if-etag-not-match` 

1338 

1339 :type if_generation_match: long 

1340 :param if_generation_match: 

1341 (Optional) See :ref:`using-if-generation-match` 

1342 

1343 :type if_generation_not_match: long 

1344 :param if_generation_not_match: 

1345 (Optional) See :ref:`using-if-generation-not-match` 

1346 

1347 :type if_metageneration_match: long 

1348 :param if_metageneration_match: 

1349 (Optional) See :ref:`using-if-metageneration-match` 

1350 

1351 :type if_metageneration_not_match: long 

1352 :param if_metageneration_not_match: 

1353 (Optional) See :ref:`using-if-metageneration-not-match` 

1354 

1355 :type timeout: float or tuple 

1356 :param timeout: 

1357 (Optional) The amount of time, in seconds, to wait 

1358 for the server response. See: :ref:`configuring_timeouts` 

1359 

1360 :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy 

1361 :param retry: 

1362 (Optional) How to retry the RPC. See: :ref:`configuring_retries` 

1363 

1364 :type soft_deleted: bool 

1365 :param soft_deleted: 

1366 (Optional) If True, looks for a soft-deleted object. Will only return 

1367 the object metadata if the object exists and is in a soft-deleted state. 

1368 Object ``generation`` is required if ``soft_deleted`` is set to True. 

1369 See: https://cloud.google.com/storage/docs/soft-delete 

1370 

1371 :param kwargs: Keyword arguments to pass to the 

1372 :class:`~google.cloud.storage.blob.Blob` constructor. 

1373 

1374 :rtype: :class:`google.cloud.storage.blob.Blob` or None 

1375 :returns: The blob object if it exists, otherwise None. 

1376 """ 

1377 with create_trace_span(name="Storage.Bucket.getBlob"): 

1378 blob = Blob( 

1379 bucket=self, 

1380 name=blob_name, 

1381 encryption_key=encryption_key, 

1382 generation=generation, 

1383 **kwargs, 

1384 ) 

1385 try: 

1386 # NOTE: This will not fail immediately in a batch. However, when 

1387 # Batch.finish() is called, the resulting `NotFound` will be 

1388 # raised. 

1389 blob.reload( 

1390 client=client, 

1391 timeout=timeout, 

1392 if_etag_match=if_etag_match, 

1393 if_etag_not_match=if_etag_not_match, 

1394 if_generation_match=if_generation_match, 

1395 if_generation_not_match=if_generation_not_match, 

1396 if_metageneration_match=if_metageneration_match, 

1397 if_metageneration_not_match=if_metageneration_not_match, 

1398 retry=retry, 

1399 soft_deleted=soft_deleted, 

1400 ) 

1401 except NotFound: 

1402 return None 

1403 else: 

1404 return blob 

1405 

1406 def list_blobs( 

1407 self, 

1408 max_results=None, 

1409 page_token=None, 

1410 prefix=None, 

1411 delimiter=None, 

1412 start_offset=None, 

1413 end_offset=None, 

1414 include_trailing_delimiter=None, 

1415 versions=None, 

1416 projection="noAcl", 

1417 fields=None, 

1418 client=None, 

1419 timeout=_DEFAULT_TIMEOUT, 

1420 retry=DEFAULT_RETRY, 

1421 match_glob=None, 

1422 include_folders_as_prefixes=None, 

1423 soft_deleted=None, 

1424 page_size=None, 

1425 ): 

1426 """Return an iterator used to find blobs in the bucket. 

1427 

1428 If :attr:`user_project` is set, bills the API request to that project. 

1429 

1430 :type max_results: int 

1431 :param max_results: 

1432 (Optional) The maximum number of blobs to return. 

1433 

1434 :type page_token: str 

1435 :param page_token: 

1436 (Optional) If present, return the next batch of blobs, using the 

1437 value, which must correspond to the ``nextPageToken`` value 

1438 returned in the previous response. Deprecated: use the ``pages`` 

1439 property of the returned iterator instead of manually passing the 

1440 token. 

1441 

1442 :type prefix: str 

1443 :param prefix: (Optional) Prefix used to filter blobs. 

1444 

1445 :type delimiter: str 

1446 :param delimiter: (Optional) Delimiter, used with ``prefix`` to 

1447 emulate hierarchy. 

1448 

1449 :type start_offset: str 

1450 :param start_offset: 

1451 (Optional) Filter results to objects whose names are 

1452 lexicographically equal to or after ``startOffset``. If 

1453 ``endOffset`` is also set, the objects listed will have names 

1454 between ``startOffset`` (inclusive) and ``endOffset`` (exclusive). 

1455 

1456 :type end_offset: str 

1457 :param end_offset: 

1458 (Optional) Filter results to objects whose names are 

1459 lexicographically before ``endOffset``. If ``startOffset`` is also 

1460 set, the objects listed will have names between ``startOffset`` 

1461 (inclusive) and ``endOffset`` (exclusive). 

1462 

1463 :type include_trailing_delimiter: boolean 

1464 :param include_trailing_delimiter: 

1465 (Optional) If true, objects that end in exactly one instance of 

1466 ``delimiter`` will have their metadata included in ``items`` in 

1467 addition to ``prefixes``. 

1468 

1469 :type versions: bool 

1470 :param versions: (Optional) Whether object versions should be returned 

1471 as separate blobs. 

1472 

1473 :type projection: str 

1474 :param projection: (Optional) If used, must be 'full' or 'noAcl'. 

1475 Defaults to ``'noAcl'``. Specifies the set of 

1476 properties to return. 

1477 

1478 :type fields: str 

1479 :param fields: 

1480 (Optional) Selector specifying which fields to include 

1481 in a partial response. Must be a list of fields. For 

1482 example to get a partial response with just the next 

1483 page token and the name and language of each blob returned: 

1484 ``'items(name,contentLanguage),nextPageToken'``. 

1485 See: https://cloud.google.com/storage/docs/json_api/v1/parameters#fields 

1486 

1487 :type client: :class:`~google.cloud.storage.client.Client` 

1488 :param client: (Optional) The client to use. If not passed, falls back 

1489 to the ``client`` stored on the current bucket. 

1490 

1491 :type timeout: float or tuple 

1492 :param timeout: 

1493 (Optional) The amount of time, in seconds, to wait 

1494 for the server response. See: :ref:`configuring_timeouts` 

1495 

1496 :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy 

1497 :param retry: 

1498 (Optional) How to retry the RPC. See: :ref:`configuring_retries` 

1499 

1500 :type match_glob: str 

1501 :param match_glob: 

1502 (Optional) A glob pattern used to filter results (for example, foo*bar). 

1503 The string value must be UTF-8 encoded. See: 

1504 https://cloud.google.com/storage/docs/json_api/v1/objects/list#list-object-glob 

1505 

1506 :type include_folders_as_prefixes: bool 

1507 (Optional) If true, includes Folders and Managed Folders in the set of 

1508 ``prefixes`` returned by the query. Only applicable if ``delimiter`` is set to /. 

1509 See: https://cloud.google.com/storage/docs/managed-folders 

1510 

1511 :type soft_deleted: bool 

1512 :param soft_deleted: 

1513 (Optional) If true, only soft-deleted objects will be listed as distinct results in order of increasing 

1514 generation number. This parameter can only be used successfully if the bucket has a soft delete policy. 

1515 Note ``soft_deleted`` and ``versions`` cannot be set to True simultaneously. See: 

1516 https://cloud.google.com/storage/docs/soft-delete 

1517 

1518 :type page_size: int 

1519 :param page_size: 

1520 (Optional) Maximum number of blobs to return in each page. 

1521 Defaults to a value set by the API. 

1522 

1523 :rtype: :class:`~google.api_core.page_iterator.Iterator` 

1524 :returns: Iterator of all :class:`~google.cloud.storage.blob.Blob` 

1525 in this bucket matching the arguments. 

1526 """ 

1527 with create_trace_span(name="Storage.Bucket.listBlobs"): 

1528 client = self._require_client(client) 

1529 return client.list_blobs( 

1530 self, 

1531 max_results=max_results, 

1532 page_token=page_token, 

1533 prefix=prefix, 

1534 delimiter=delimiter, 

1535 start_offset=start_offset, 

1536 end_offset=end_offset, 

1537 include_trailing_delimiter=include_trailing_delimiter, 

1538 versions=versions, 

1539 projection=projection, 

1540 fields=fields, 

1541 page_size=page_size, 

1542 timeout=timeout, 

1543 retry=retry, 

1544 match_glob=match_glob, 

1545 include_folders_as_prefixes=include_folders_as_prefixes, 

1546 soft_deleted=soft_deleted, 

1547 ) 

1548 

1549 def list_notifications( 

1550 self, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY 

1551 ): 

1552 """List Pub / Sub notifications for this bucket. 

1553 

1554 See: 

1555 https://cloud.google.com/storage/docs/json_api/v1/notifications/list 

1556 

1557 If :attr:`user_project` is set, bills the API request to that project. 

1558 

1559 :type client: :class:`~google.cloud.storage.client.Client` or 

1560 ``NoneType`` 

1561 :param client: (Optional) The client to use. If not passed, falls back 

1562 to the ``client`` stored on the current bucket. 

1563 :type timeout: float or tuple 

1564 :param timeout: 

1565 (Optional) The amount of time, in seconds, to wait 

1566 for the server response. See: :ref:`configuring_timeouts` 

1567 

1568 :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy 

1569 :param retry: 

1570 (Optional) How to retry the RPC. See: :ref:`configuring_retries` 

1571 

1572 :rtype: list of :class:`.BucketNotification` 

1573 :returns: notification instances 

1574 """ 

1575 with create_trace_span(name="Storage.Bucket.listNotifications"): 

1576 client = self._require_client(client) 

1577 path = self.path + "/notificationConfigs" 

1578 iterator = client._list_resource( 

1579 path, 

1580 _item_to_notification, 

1581 timeout=timeout, 

1582 retry=retry, 

1583 ) 

1584 iterator.bucket = self 

1585 return iterator 

1586 

1587 def get_notification( 

1588 self, 

1589 notification_id, 

1590 client=None, 

1591 timeout=_DEFAULT_TIMEOUT, 

1592 retry=DEFAULT_RETRY, 

1593 ): 

1594 """Get Pub / Sub notification for this bucket. 

1595 

1596 See [API reference docs](https://cloud.google.com/storage/docs/json_api/v1/notifications/get) 

1597 and a [code sample](https://cloud.google.com/storage/docs/samples/storage-print-pubsub-bucket-notification#storage_print_pubsub_bucket_notification-python). 

1598 

1599 If :attr:`user_project` is set, bills the API request to that project. 

1600 

1601 :type notification_id: str 

1602 :param notification_id: The notification id to retrieve the notification configuration. 

1603 

1604 :type client: :class:`~google.cloud.storage.client.Client` or 

1605 ``NoneType`` 

1606 :param client: (Optional) The client to use. If not passed, falls back 

1607 to the ``client`` stored on the current bucket. 

1608 :type timeout: float or tuple 

1609 :param timeout: 

1610 (Optional) The amount of time, in seconds, to wait 

1611 for the server response. See: :ref:`configuring_timeouts` 

1612 

1613 :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy 

1614 :param retry: 

1615 (Optional) How to retry the RPC. See: :ref:`configuring_retries` 

1616 

1617 :rtype: :class:`.BucketNotification` 

1618 :returns: notification instance. 

1619 """ 

1620 with create_trace_span(name="Storage.Bucket.getNotification"): 

1621 notification = self.notification(notification_id=notification_id) 

1622 notification.reload(client=client, timeout=timeout, retry=retry) 

1623 return notification 

1624 

1625 def delete( 

1626 self, 

1627 force=False, 

1628 client=None, 

1629 if_metageneration_match=None, 

1630 if_metageneration_not_match=None, 

1631 timeout=_DEFAULT_TIMEOUT, 

1632 retry=DEFAULT_RETRY, 

1633 ): 

1634 """Delete this bucket. 

1635 

1636 The bucket **must** be empty in order to submit a delete request. If 

1637 ``force=True`` is passed, this will first attempt to delete all the 

1638 objects / blobs in the bucket (i.e. try to empty the bucket). 

1639 

1640 If the bucket doesn't exist, this will raise 

1641 :class:`google.cloud.exceptions.NotFound`. If the bucket is not empty 

1642 (and ``force=False``), will raise :class:`google.cloud.exceptions.Conflict`. 

1643 

1644 If ``force=True`` and the bucket contains more than 256 objects / blobs 

1645 this will cowardly refuse to delete the objects (or the bucket). This 

1646 is to prevent accidental bucket deletion and to prevent extremely long 

1647 runtime of this method. Also note that ``force=True`` is not supported 

1648 in a ``Batch`` context. 

1649 

1650 If :attr:`user_project` is set, bills the API request to that project. 

1651 

1652 :type force: bool 

1653 :param force: If True, empties the bucket's objects then deletes it. 

1654 

1655 :type client: :class:`~google.cloud.storage.client.Client` or 

1656 ``NoneType`` 

1657 :param client: (Optional) The client to use. If not passed, falls back 

1658 to the ``client`` stored on the current bucket. 

1659 

1660 :type if_metageneration_match: long 

1661 :param if_metageneration_match: (Optional) Make the operation conditional on whether the 

1662 blob's current metageneration matches the given value. 

1663 

1664 :type if_metageneration_not_match: long 

1665 :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the 

1666 blob's current metageneration does not match the given value. 

1667 

1668 :type timeout: float or tuple 

1669 :param timeout: 

1670 (Optional) The amount of time, in seconds, to wait 

1671 for the server response. See: :ref:`configuring_timeouts` 

1672 

1673 :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy 

1674 :param retry: 

1675 (Optional) How to retry the RPC. See: :ref:`configuring_retries` 

1676 

1677 :raises: :class:`ValueError` if ``force`` is ``True`` and the bucket 

1678 contains more than 256 objects / blobs. 

1679 """ 

1680 with create_trace_span(name="Storage.Bucket.delete"): 

1681 client = self._require_client(client) 

1682 query_params = {} 

1683 

1684 if self.user_project is not None: 

1685 query_params["userProject"] = self.user_project 

1686 

1687 _add_generation_match_parameters( 

1688 query_params, 

1689 if_metageneration_match=if_metageneration_match, 

1690 if_metageneration_not_match=if_metageneration_not_match, 

1691 ) 

1692 if force: 

1693 blobs = list( 

1694 self.list_blobs( 

1695 max_results=self._MAX_OBJECTS_FOR_ITERATION + 1, 

1696 client=client, 

1697 timeout=timeout, 

1698 retry=retry, 

1699 versions=True, 

1700 ) 

1701 ) 

1702 if len(blobs) > self._MAX_OBJECTS_FOR_ITERATION: 

1703 message = ( 

1704 "Refusing to delete bucket with more than " 

1705 "%d objects. If you actually want to delete " 

1706 "this bucket, please delete the objects " 

1707 "yourself before calling Bucket.delete()." 

1708 ) % (self._MAX_OBJECTS_FOR_ITERATION,) 

1709 raise ValueError(message) 

1710 

1711 # Ignore 404 errors on delete. 

1712 self.delete_blobs( 

1713 blobs, 

1714 on_error=lambda blob: None, 

1715 client=client, 

1716 timeout=timeout, 

1717 retry=retry, 

1718 preserve_generation=True, 

1719 ) 

1720 

1721 # We intentionally pass `_target_object=None` since a DELETE 

1722 # request has no response value (whether in a standard request or 

1723 # in a batch request). 

1724 client._delete_resource( 

1725 self.path, 

1726 query_params=query_params, 

1727 timeout=timeout, 

1728 retry=retry, 

1729 _target_object=None, 

1730 ) 

1731 

1732 def delete_blob( 

1733 self, 

1734 blob_name, 

1735 client=None, 

1736 generation=None, 

1737 if_generation_match=None, 

1738 if_generation_not_match=None, 

1739 if_metageneration_match=None, 

1740 if_metageneration_not_match=None, 

1741 timeout=_DEFAULT_TIMEOUT, 

1742 retry=DEFAULT_RETRY, 

1743 ): 

1744 """Deletes a blob from the current bucket. 

1745 

1746 If :attr:`user_project` is set, bills the API request to that project. 

1747 

1748 :type blob_name: str 

1749 :param blob_name: A blob name to delete. 

1750 

1751 :type client: :class:`~google.cloud.storage.client.Client` or 

1752 ``NoneType`` 

1753 :param client: (Optional) The client to use. If not passed, falls back 

1754 to the ``client`` stored on the current bucket. 

1755 

1756 :type generation: long 

1757 :param generation: (Optional) If present, permanently deletes a specific 

1758 revision of this object. 

1759 

1760 :type if_generation_match: long 

1761 :param if_generation_match: 

1762 (Optional) See :ref:`using-if-generation-match` 

1763 

1764 :type if_generation_not_match: long 

1765 :param if_generation_not_match: 

1766 (Optional) See :ref:`using-if-generation-not-match` 

1767 

1768 :type if_metageneration_match: long 

1769 :param if_metageneration_match: 

1770 (Optional) See :ref:`using-if-metageneration-match` 

1771 

1772 :type if_metageneration_not_match: long 

1773 :param if_metageneration_not_match: 

1774 (Optional) See :ref:`using-if-metageneration-not-match` 

1775 

1776 :type timeout: float or tuple 

1777 :param timeout: 

1778 (Optional) The amount of time, in seconds, to wait 

1779 for the server response. See: :ref:`configuring_timeouts` 

1780 

1781 :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy 

1782 :param retry: (Optional) How to retry the RPC. A None value will disable 

1783 retries. A google.api_core.retry.Retry value will enable retries, 

1784 and the object will define retriable response codes and errors and 

1785 configure backoff and timeout options. 

1786 

1787 A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a 

1788 Retry object and activates it only if certain conditions are met. 

1789 This class exists to provide safe defaults for RPC calls that are 

1790 not technically safe to retry normally (due to potential data 

1791 duplication or other side-effects) but become safe to retry if a 

1792 condition such as if_generation_match is set. 

1793 

1794 See the retry.py source code and docstrings in this package 

1795 (google.cloud.storage.retry) for information on retry types and how 

1796 to configure them. 

1797 

1798 :raises: :class:`google.cloud.exceptions.NotFound` Raises a NotFound 

1799 if the blob isn't found. To suppress 

1800 the exception, use :meth:`delete_blobs` by passing a no-op 

1801 ``on_error`` callback. 

1802 """ 

1803 with create_trace_span(name="Storage.Bucket.deleteBlob"): 

1804 client = self._require_client(client) 

1805 blob = Blob(blob_name, bucket=self, generation=generation) 

1806 

1807 query_params = copy.deepcopy(blob._query_params) 

1808 _add_generation_match_parameters( 

1809 query_params, 

1810 if_generation_match=if_generation_match, 

1811 if_generation_not_match=if_generation_not_match, 

1812 if_metageneration_match=if_metageneration_match, 

1813 if_metageneration_not_match=if_metageneration_not_match, 

1814 ) 

1815 # We intentionally pass `_target_object=None` since a DELETE 

1816 # request has no response value (whether in a standard request or 

1817 # in a batch request). 

1818 client._delete_resource( 

1819 blob.path, 

1820 query_params=query_params, 

1821 timeout=timeout, 

1822 retry=retry, 

1823 _target_object=None, 

1824 ) 

1825 

1826 def delete_blobs( 

1827 self, 

1828 blobs, 

1829 on_error=None, 

1830 client=None, 

1831 preserve_generation=False, 

1832 timeout=_DEFAULT_TIMEOUT, 

1833 if_generation_match=None, 

1834 if_generation_not_match=None, 

1835 if_metageneration_match=None, 

1836 if_metageneration_not_match=None, 

1837 retry=DEFAULT_RETRY, 

1838 ): 

1839 """Deletes a list of blobs from the current bucket. 

1840 

1841 Uses :meth:`delete_blob` to delete each individual blob. 

1842 

1843 By default, any generation information in the list of blobs is ignored, and the 

1844 live versions of all blobs are deleted. Set `preserve_generation` to True 

1845 if blob generation should instead be propagated from the list of blobs. 

1846 

1847 If :attr:`user_project` is set, bills the API request to that project. 

1848 

1849 :type blobs: list 

1850 :param blobs: A list of :class:`~google.cloud.storage.blob.Blob`-s or 

1851 blob names to delete. 

1852 

1853 :type on_error: callable 

1854 :param on_error: (Optional) Takes single argument: ``blob``. 

1855 Called once for each blob raising 

1856 :class:`~google.cloud.exceptions.NotFound`; 

1857 otherwise, the exception is propagated. 

1858 Note that ``on_error`` is not supported in a ``Batch`` context. 

1859 

1860 :type client: :class:`~google.cloud.storage.client.Client` 

1861 :param client: (Optional) The client to use. If not passed, falls back 

1862 to the ``client`` stored on the current bucket. 

1863 

1864 :type preserve_generation: bool 

1865 :param preserve_generation: (Optional) Deletes only the generation specified on the blob object, 

1866 instead of the live version, if set to True. Only :class:~google.cloud.storage.blob.Blob 

1867 objects can have their generation set in this way. 

1868 Default: False. 

1869 

1870 :type if_generation_match: list of long 

1871 :param if_generation_match: 

1872 (Optional) See :ref:`using-if-generation-match` 

1873 Note that the length of the list must match the length of 

1874 The list must match ``blobs`` item-to-item. 

1875 

1876 :type if_generation_not_match: list of long 

1877 :param if_generation_not_match: 

1878 (Optional) See :ref:`using-if-generation-not-match` 

1879 The list must match ``blobs`` item-to-item. 

1880 

1881 :type if_metageneration_match: list of long 

1882 :param if_metageneration_match: 

1883 (Optional) See :ref:`using-if-metageneration-match` 

1884 The list must match ``blobs`` item-to-item. 

1885 

1886 :type if_metageneration_not_match: list of long 

1887 :param if_metageneration_not_match: 

1888 (Optional) See :ref:`using-if-metageneration-not-match` 

1889 The list must match ``blobs`` item-to-item. 

1890 

1891 :type timeout: float or tuple 

1892 :param timeout: 

1893 (Optional) The amount of time, in seconds, to wait 

1894 for the server response. See: :ref:`configuring_timeouts` 

1895 

1896 :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy 

1897 :param retry: (Optional) How to retry the RPC. A None value will disable 

1898 retries. A google.api_core.retry.Retry value will enable retries, 

1899 and the object will define retriable response codes and errors and 

1900 configure backoff and timeout options. 

1901 

1902 A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a 

1903 Retry object and activates it only if certain conditions are met. 

1904 This class exists to provide safe defaults for RPC calls that are 

1905 not technically safe to retry normally (due to potential data 

1906 duplication or other side-effects) but become safe to retry if a 

1907 condition such as if_generation_match is set. 

1908 

1909 See the retry.py source code and docstrings in this package 

1910 (google.cloud.storage.retry) for information on retry types and how 

1911 to configure them. 

1912 

1913 :raises: :class:`~google.cloud.exceptions.NotFound` (if 

1914 `on_error` is not passed). 

1915 """ 

1916 with create_trace_span(name="Storage.Bucket.deleteBlobs"): 

1917 _raise_if_len_differs( 

1918 len(blobs), 

1919 if_generation_match=if_generation_match, 

1920 if_generation_not_match=if_generation_not_match, 

1921 if_metageneration_match=if_metageneration_match, 

1922 if_metageneration_not_match=if_metageneration_not_match, 

1923 ) 

1924 if_generation_match = iter(if_generation_match or []) 

1925 if_generation_not_match = iter(if_generation_not_match or []) 

1926 if_metageneration_match = iter(if_metageneration_match or []) 

1927 if_metageneration_not_match = iter(if_metageneration_not_match or []) 

1928 

1929 for blob in blobs: 

1930 try: 

1931 blob_name = blob 

1932 generation = None 

1933 if not isinstance(blob_name, str): 

1934 blob_name = blob.name 

1935 generation = blob.generation if preserve_generation else None 

1936 

1937 self.delete_blob( 

1938 blob_name, 

1939 client=client, 

1940 generation=generation, 

1941 if_generation_match=next(if_generation_match, None), 

1942 if_generation_not_match=next(if_generation_not_match, None), 

1943 if_metageneration_match=next(if_metageneration_match, None), 

1944 if_metageneration_not_match=next( 

1945 if_metageneration_not_match, None 

1946 ), 

1947 timeout=timeout, 

1948 retry=retry, 

1949 ) 

1950 except NotFound: 

1951 if on_error is not None: 

1952 on_error(blob) 

1953 else: 

1954 raise 

1955 

1956 def copy_blob( 

1957 self, 

1958 blob, 

1959 destination_bucket, 

1960 new_name=None, 

1961 client=None, 

1962 preserve_acl=True, 

1963 source_generation=None, 

1964 if_generation_match=None, 

1965 if_generation_not_match=None, 

1966 if_metageneration_match=None, 

1967 if_metageneration_not_match=None, 

1968 if_source_generation_match=None, 

1969 if_source_generation_not_match=None, 

1970 if_source_metageneration_match=None, 

1971 if_source_metageneration_not_match=None, 

1972 timeout=_DEFAULT_TIMEOUT, 

1973 retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, 

1974 ): 

1975 """Copy the given blob to the given bucket, optionally with a new name. 

1976 

1977 If :attr:`user_project` is set, bills the API request to that project. 

1978 

1979 See [API reference docs](https://cloud.google.com/storage/docs/json_api/v1/objects/copy) 

1980 and a [code sample](https://cloud.google.com/storage/docs/samples/storage-copy-file#storage_copy_file-python). 

1981 

1982 :type blob: :class:`google.cloud.storage.blob.Blob` 

1983 :param blob: The blob to be copied. 

1984 

1985 :type destination_bucket: :class:`google.cloud.storage.bucket.Bucket` 

1986 :param destination_bucket: The bucket into which the blob should be 

1987 copied. 

1988 

1989 :type new_name: str 

1990 :param new_name: (Optional) The new name for the copied file. 

1991 

1992 :type client: :class:`~google.cloud.storage.client.Client` or 

1993 ``NoneType`` 

1994 :param client: (Optional) The client to use. If not passed, falls back 

1995 to the ``client`` stored on the current bucket. 

1996 

1997 :type preserve_acl: bool 

1998 :param preserve_acl: DEPRECATED. This argument is not functional! 

1999 (Optional) Copies ACL from old blob to new blob. 

2000 Default: True. 

2001 Note that ``preserve_acl`` is not supported in a 

2002 ``Batch`` context. 

2003 

2004 :type source_generation: long 

2005 :param source_generation: (Optional) The generation of the blob to be 

2006 copied. 

2007 

2008 :type if_generation_match: long 

2009 :param if_generation_match: 

2010 (Optional) See :ref:`using-if-generation-match` 

2011 Note that the generation to be matched is that of the 

2012 ``destination`` blob. 

2013 

2014 :type if_generation_not_match: long 

2015 :param if_generation_not_match: 

2016 (Optional) See :ref:`using-if-generation-not-match` 

2017 Note that the generation to be matched is that of the 

2018 ``destination`` blob. 

2019 

2020 :type if_metageneration_match: long 

2021 :param if_metageneration_match: 

2022 (Optional) See :ref:`using-if-metageneration-match` 

2023 Note that the metageneration to be matched is that of the 

2024 ``destination`` blob. 

2025 

2026 :type if_metageneration_not_match: long 

2027 :param if_metageneration_not_match: 

2028 (Optional) See :ref:`using-if-metageneration-not-match` 

2029 Note that the metageneration to be matched is that of the 

2030 ``destination`` blob. 

2031 

2032 :type if_source_generation_match: long 

2033 :param if_source_generation_match: 

2034 (Optional) Makes the operation conditional on whether the source 

2035 object's generation matches the given value. 

2036 

2037 :type if_source_generation_not_match: long 

2038 :param if_source_generation_not_match: 

2039 (Optional) Makes the operation conditional on whether the source 

2040 object's generation does not match the given value. 

2041 

2042 :type if_source_metageneration_match: long 

2043 :param if_source_metageneration_match: 

2044 (Optional) Makes the operation conditional on whether the source 

2045 object's current metageneration matches the given value. 

2046 

2047 :type if_source_metageneration_not_match: long 

2048 :param if_source_metageneration_not_match: 

2049 (Optional) Makes the operation conditional on whether the source 

2050 object's current metageneration does not match the given value. 

2051 

2052 :type timeout: float or tuple 

2053 :param timeout: 

2054 (Optional) The amount of time, in seconds, to wait 

2055 for the server response. See: :ref:`configuring_timeouts` 

2056 

2057 :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy 

2058 :param retry: 

2059 (Optional) How to retry the RPC. 

2060 The default value is ``DEFAULT_RETRY_IF_GENERATION_SPECIFIED``, a conditional retry 

2061 policy which will only enable retries if ``if_generation_match`` or ``generation`` 

2062 is set, in order to ensure requests are idempotent before retrying them. 

2063 Change the value to ``DEFAULT_RETRY`` or another `google.api_core.retry.Retry` object 

2064 to enable retries regardless of generation precondition setting. 

2065 See [Configuring Retries](https://cloud.google.com/python/docs/reference/storage/latest/retry_timeout). 

2066 

2067 :rtype: :class:`google.cloud.storage.blob.Blob` 

2068 :returns: The new Blob. 

2069 """ 

2070 with create_trace_span(name="Storage.Bucket.copyBlob"): 

2071 client = self._require_client(client) 

2072 query_params = {} 

2073 

2074 if self.user_project is not None: 

2075 query_params["userProject"] = self.user_project 

2076 

2077 if source_generation is not None: 

2078 query_params["sourceGeneration"] = source_generation 

2079 

2080 _add_generation_match_parameters( 

2081 query_params, 

2082 if_generation_match=if_generation_match, 

2083 if_generation_not_match=if_generation_not_match, 

2084 if_metageneration_match=if_metageneration_match, 

2085 if_metageneration_not_match=if_metageneration_not_match, 

2086 if_source_generation_match=if_source_generation_match, 

2087 if_source_generation_not_match=if_source_generation_not_match, 

2088 if_source_metageneration_match=if_source_metageneration_match, 

2089 if_source_metageneration_not_match=if_source_metageneration_not_match, 

2090 ) 

2091 

2092 if new_name is None: 

2093 new_name = blob.name 

2094 

2095 new_blob = Blob(bucket=destination_bucket, name=new_name) 

2096 api_path = blob.path + "/copyTo" + new_blob.path 

2097 copy_result = client._post_resource( 

2098 api_path, 

2099 None, 

2100 query_params=query_params, 

2101 timeout=timeout, 

2102 retry=retry, 

2103 _target_object=new_blob, 

2104 ) 

2105 

2106 if not preserve_acl: 

2107 new_blob.acl.save(acl={}, client=client, timeout=timeout) 

2108 

2109 new_blob._set_properties(copy_result) 

2110 return new_blob 

2111 

2112 def rename_blob( 

2113 self, 

2114 blob, 

2115 new_name, 

2116 client=None, 

2117 if_generation_match=None, 

2118 if_generation_not_match=None, 

2119 if_metageneration_match=None, 

2120 if_metageneration_not_match=None, 

2121 if_source_generation_match=None, 

2122 if_source_generation_not_match=None, 

2123 if_source_metageneration_match=None, 

2124 if_source_metageneration_not_match=None, 

2125 timeout=_DEFAULT_TIMEOUT, 

2126 retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, 

2127 ): 

2128 """Rename the given blob using copy and delete operations. 

2129 

2130 If :attr:`user_project` is set, bills the API request to that project. 

2131 

2132 Effectively, copies blob to the same bucket with a new name, then 

2133 deletes the blob. 

2134 

2135 .. warning:: 

2136 

2137 This method will first duplicate the data and then delete the 

2138 old blob. This means that with very large objects renaming 

2139 could be a very (temporarily) costly or a very slow operation. 

2140 If you need more control over the copy and deletion, instead 

2141 use ``google.cloud.storage.blob.Blob.copy_to`` and 

2142 ``google.cloud.storage.blob.Blob.delete`` directly. 

2143 

2144 Also note that this method is not fully supported in a 

2145 ``Batch`` context. 

2146 

2147 :type blob: :class:`google.cloud.storage.blob.Blob` 

2148 :param blob: The blob to be renamed. 

2149 

2150 :type new_name: str 

2151 :param new_name: The new name for this blob. 

2152 

2153 :type client: :class:`~google.cloud.storage.client.Client` or 

2154 ``NoneType`` 

2155 :param client: (Optional) The client to use. If not passed, falls back 

2156 to the ``client`` stored on the current bucket. 

2157 

2158 :type if_generation_match: long 

2159 :param if_generation_match: 

2160 (Optional) See :ref:`using-if-generation-match` 

2161 Note that the generation to be matched is that of the 

2162 ``destination`` blob. 

2163 

2164 :type if_generation_not_match: long 

2165 :param if_generation_not_match: 

2166 (Optional) See :ref:`using-if-generation-not-match` 

2167 Note that the generation to be matched is that of the 

2168 ``destination`` blob. 

2169 

2170 :type if_metageneration_match: long 

2171 :param if_metageneration_match: 

2172 (Optional) See :ref:`using-if-metageneration-match` 

2173 Note that the metageneration to be matched is that of the 

2174 ``destination`` blob. 

2175 

2176 :type if_metageneration_not_match: long 

2177 :param if_metageneration_not_match: 

2178 (Optional) See :ref:`using-if-metageneration-not-match` 

2179 Note that the metageneration to be matched is that of the 

2180 ``destination`` blob. 

2181 

2182 :type if_source_generation_match: long 

2183 :param if_source_generation_match: 

2184 (Optional) Makes the operation conditional on whether the source 

2185 object's generation matches the given value. Also used in the 

2186 (implied) delete request. 

2187 

2188 :type if_source_generation_not_match: long 

2189 :param if_source_generation_not_match: 

2190 (Optional) Makes the operation conditional on whether the source 

2191 object's generation does not match the given value. Also used in 

2192 the (implied) delete request. 

2193 

2194 :type if_source_metageneration_match: long 

2195 :param if_source_metageneration_match: 

2196 (Optional) Makes the operation conditional on whether the source 

2197 object's current metageneration matches the given value. Also used 

2198 in the (implied) delete request. 

2199 

2200 :type if_source_metageneration_not_match: long 

2201 :param if_source_metageneration_not_match: 

2202 (Optional) Makes the operation conditional on whether the source 

2203 object's current metageneration does not match the given value. 

2204 Also used in the (implied) delete request. 

2205 

2206 :type timeout: float or tuple 

2207 :param timeout: 

2208 (Optional) The amount of time, in seconds, to wait 

2209 for the server response. See: :ref:`configuring_timeouts` 

2210 

2211 :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy 

2212 :param retry: 

2213 (Optional) How to retry the RPC. 

2214 The default value is ``DEFAULT_RETRY_IF_GENERATION_SPECIFIED``, a conditional retry 

2215 policy which will only enable retries if ``if_generation_match`` or ``generation`` 

2216 is set, in order to ensure requests are idempotent before retrying them. 

2217 Change the value to ``DEFAULT_RETRY`` or another `google.api_core.retry.Retry` object 

2218 to enable retries regardless of generation precondition setting. 

2219 See [Configuring Retries](https://cloud.google.com/python/docs/reference/storage/latest/retry_timeout). 

2220 

2221 :rtype: :class:`Blob` 

2222 :returns: The newly-renamed blob. 

2223 """ 

2224 with create_trace_span(name="Storage.Bucket.renameBlob"): 

2225 same_name = blob.name == new_name 

2226 

2227 new_blob = self.copy_blob( 

2228 blob, 

2229 self, 

2230 new_name, 

2231 client=client, 

2232 timeout=timeout, 

2233 if_generation_match=if_generation_match, 

2234 if_generation_not_match=if_generation_not_match, 

2235 if_metageneration_match=if_metageneration_match, 

2236 if_metageneration_not_match=if_metageneration_not_match, 

2237 if_source_generation_match=if_source_generation_match, 

2238 if_source_generation_not_match=if_source_generation_not_match, 

2239 if_source_metageneration_match=if_source_metageneration_match, 

2240 if_source_metageneration_not_match=if_source_metageneration_not_match, 

2241 retry=retry, 

2242 ) 

2243 

2244 if not same_name: 

2245 blob.delete( 

2246 client=client, 

2247 timeout=timeout, 

2248 if_generation_match=if_source_generation_match, 

2249 if_generation_not_match=if_source_generation_not_match, 

2250 if_metageneration_match=if_source_metageneration_match, 

2251 if_metageneration_not_match=if_source_metageneration_not_match, 

2252 retry=retry, 

2253 ) 

2254 return new_blob 

2255 

2256 def move_blob( 

2257 self, 

2258 blob, 

2259 new_name, 

2260 client=None, 

2261 if_generation_match=None, 

2262 if_generation_not_match=None, 

2263 if_metageneration_match=None, 

2264 if_metageneration_not_match=None, 

2265 if_source_generation_match=None, 

2266 if_source_generation_not_match=None, 

2267 if_source_metageneration_match=None, 

2268 if_source_metageneration_not_match=None, 

2269 timeout=_DEFAULT_TIMEOUT, 

2270 retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, 

2271 ): 

2272 """Move a blob to a new name atomically. 

2273 

2274 If :attr:`user_project` is set on the bucket, bills the API request to that project. 

2275 

2276 :type blob: :class:`google.cloud.storage.blob.Blob` 

2277 :param blob: The blob to be renamed. 

2278 

2279 :type new_name: str 

2280 :param new_name: The new name for this blob. 

2281 

2282 :type client: :class:`~google.cloud.storage.client.Client` or 

2283 ``NoneType`` 

2284 :param client: (Optional) The client to use. If not passed, falls back 

2285 to the ``client`` stored on the current bucket. 

2286 

2287 :type if_generation_match: int 

2288 :param if_generation_match: 

2289 (Optional) See :ref:`using-if-generation-match` 

2290 Note that the generation to be matched is that of the 

2291 ``destination`` blob. 

2292 

2293 :type if_generation_not_match: int 

2294 :param if_generation_not_match: 

2295 (Optional) See :ref:`using-if-generation-not-match` 

2296 Note that the generation to be matched is that of the 

2297 ``destination`` blob. 

2298 

2299 :type if_metageneration_match: int 

2300 :param if_metageneration_match: 

2301 (Optional) See :ref:`using-if-metageneration-match` 

2302 Note that the metageneration to be matched is that of the 

2303 ``destination`` blob. 

2304 

2305 :type if_metageneration_not_match: int 

2306 :param if_metageneration_not_match: 

2307 (Optional) See :ref:`using-if-metageneration-not-match` 

2308 Note that the metageneration to be matched is that of the 

2309 ``destination`` blob. 

2310 

2311 :type if_source_generation_match: int 

2312 :param if_source_generation_match: 

2313 (Optional) Makes the operation conditional on whether the source 

2314 object's generation matches the given value. 

2315 

2316 :type if_source_generation_not_match: int 

2317 :param if_source_generation_not_match: 

2318 (Optional) Makes the operation conditional on whether the source 

2319 object's generation does not match the given value. 

2320 

2321 :type if_source_metageneration_match: int 

2322 :param if_source_metageneration_match: 

2323 (Optional) Makes the operation conditional on whether the source 

2324 object's current metageneration matches the given value. 

2325 

2326 :type if_source_metageneration_not_match: int 

2327 :param if_source_metageneration_not_match: 

2328 (Optional) Makes the operation conditional on whether the source 

2329 object's current metageneration does not match the given value. 

2330 

2331 :type timeout: float or tuple 

2332 :param timeout: 

2333 (Optional) The amount of time, in seconds, to wait 

2334 for the server response. See: :ref:`configuring_timeouts` 

2335 

2336 :type retry: google.api_core.retry.Retry 

2337 :param retry: 

2338 (Optional) How to retry the RPC. 

2339 See [Configuring Retries](https://cloud.google.com/python/docs/reference/storage/latest/retry_timeout). 

2340 

2341 :rtype: :class:`Blob` 

2342 :returns: The newly-moved blob. 

2343 """ 

2344 with create_trace_span(name="Storage.Bucket.moveBlob"): 

2345 client = self._require_client(client) 

2346 query_params = {} 

2347 

2348 if self.user_project is not None: 

2349 query_params["userProject"] = self.user_project 

2350 

2351 _add_generation_match_parameters( 

2352 query_params, 

2353 if_generation_match=if_generation_match, 

2354 if_generation_not_match=if_generation_not_match, 

2355 if_metageneration_match=if_metageneration_match, 

2356 if_metageneration_not_match=if_metageneration_not_match, 

2357 if_source_generation_match=if_source_generation_match, 

2358 if_source_generation_not_match=if_source_generation_not_match, 

2359 if_source_metageneration_match=if_source_metageneration_match, 

2360 if_source_metageneration_not_match=if_source_metageneration_not_match, 

2361 ) 

2362 

2363 new_blob = Blob(bucket=self, name=new_name) 

2364 api_path = "{blob_path}/moveTo/o/{new_name}".format( 

2365 blob_path=blob.path, new_name=_quote(new_blob.name) 

2366 ) 

2367 

2368 move_result = client._post_resource( 

2369 api_path, 

2370 None, 

2371 query_params=query_params, 

2372 timeout=timeout, 

2373 retry=retry, 

2374 _target_object=new_blob, 

2375 ) 

2376 

2377 new_blob._set_properties(move_result) 

2378 return new_blob 

2379 

2380 def restore_blob( 

2381 self, 

2382 blob_name, 

2383 client=None, 

2384 generation=None, 

2385 copy_source_acl=None, 

2386 projection=None, 

2387 if_generation_match=None, 

2388 if_generation_not_match=None, 

2389 if_metageneration_match=None, 

2390 if_metageneration_not_match=None, 

2391 timeout=_DEFAULT_TIMEOUT, 

2392 retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, 

2393 ): 

2394 """Restores a soft-deleted object. 

2395 

2396 If :attr:`user_project` is set on the bucket, bills the API request to that project. 

2397 

2398 See [API reference docs](https://cloud.google.com/storage/docs/json_api/v1/objects/restore) 

2399 

2400 :type blob_name: str 

2401 :param blob_name: The name of the blob to be restored. 

2402 

2403 :type client: :class:`~google.cloud.storage.client.Client` 

2404 :param client: (Optional) The client to use. If not passed, falls back 

2405 to the ``client`` stored on the current bucket. 

2406 

2407 :type generation: int 

2408 :param generation: Selects the specific revision of the object. 

2409 

2410 :type copy_source_acl: bool 

2411 :param copy_source_acl: (Optional) If true, copy the soft-deleted object's access controls. 

2412 

2413 :type projection: str 

2414 :param projection: (Optional) Specifies the set of properties to return. 

2415 If used, must be 'full' or 'noAcl'. 

2416 

2417 :type if_generation_match: long 

2418 :param if_generation_match: 

2419 (Optional) See :ref:`using-if-generation-match` 

2420 

2421 :type if_generation_not_match: long 

2422 :param if_generation_not_match: 

2423 (Optional) See :ref:`using-if-generation-not-match` 

2424 

2425 :type if_metageneration_match: long 

2426 :param if_metageneration_match: 

2427 (Optional) See :ref:`using-if-metageneration-match` 

2428 

2429 :type if_metageneration_not_match: long 

2430 :param if_metageneration_not_match: 

2431 (Optional) See :ref:`using-if-metageneration-not-match` 

2432 

2433 :type timeout: float or tuple 

2434 :param timeout: 

2435 (Optional) The amount of time, in seconds, to wait 

2436 for the server response. See: :ref:`configuring_timeouts` 

2437 

2438 :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy 

2439 :param retry: 

2440 (Optional) How to retry the RPC. 

2441 The default value is ``DEFAULT_RETRY_IF_GENERATION_SPECIFIED``, which 

2442 only restore operations with ``if_generation_match`` or ``generation`` set 

2443 will be retried. 

2444 

2445 Users can configure non-default retry behavior. A ``None`` value will 

2446 disable retries. A ``DEFAULT_RETRY`` value will enable retries 

2447 even if restore operations are not guaranteed to be idempotent. 

2448 See [Configuring Retries](https://cloud.google.com/python/docs/reference/storage/latest/retry_timeout). 

2449 

2450 :rtype: :class:`google.cloud.storage.blob.Blob` 

2451 :returns: The restored Blob. 

2452 """ 

2453 with create_trace_span(name="Storage.Bucket.restore_blob"): 

2454 client = self._require_client(client) 

2455 query_params = {} 

2456 

2457 if self.user_project is not None: 

2458 query_params["userProject"] = self.user_project 

2459 if generation is not None: 

2460 query_params["generation"] = generation 

2461 if copy_source_acl is not None: 

2462 query_params["copySourceAcl"] = copy_source_acl 

2463 if projection is not None: 

2464 query_params["projection"] = projection 

2465 

2466 _add_generation_match_parameters( 

2467 query_params, 

2468 if_generation_match=if_generation_match, 

2469 if_generation_not_match=if_generation_not_match, 

2470 if_metageneration_match=if_metageneration_match, 

2471 if_metageneration_not_match=if_metageneration_not_match, 

2472 ) 

2473 

2474 blob = Blob(bucket=self, name=blob_name) 

2475 api_response = client._post_resource( 

2476 f"{blob.path}/restore", 

2477 None, 

2478 query_params=query_params, 

2479 timeout=timeout, 

2480 retry=retry, 

2481 ) 

2482 blob._set_properties(api_response) 

2483 return blob 

2484 

2485 @property 

2486 def cors(self): 

2487 """Retrieve or set CORS policies configured for this bucket. 

2488 

2489 See http://www.w3.org/TR/cors/ and 

2490 https://cloud.google.com/storage/docs/json_api/v1/buckets 

2491 

2492 .. note:: 

2493 

2494 The getter for this property returns a list which contains 

2495 *copies* of the bucket's CORS policy mappings. Mutating the list 

2496 or one of its dicts has no effect unless you then re-assign the 

2497 dict via the setter. E.g.: 

2498 

2499 >>> policies = bucket.cors 

2500 >>> policies.append({'origin': '/foo', ...}) 

2501 >>> policies[1]['maxAgeSeconds'] = 3600 

2502 >>> del policies[0] 

2503 >>> bucket.cors = policies 

2504 >>> bucket.update() 

2505 

2506 :setter: Set CORS policies for this bucket. 

2507 :getter: Gets the CORS policies for this bucket. 

2508 

2509 :rtype: list of dictionaries 

2510 :returns: A sequence of mappings describing each CORS policy. 

2511 """ 

2512 return [copy.deepcopy(policy) for policy in self._properties.get("cors", ())] 

2513 

2514 @cors.setter 

2515 def cors(self, entries): 

2516 """Set CORS policies configured for this bucket. 

2517 

2518 See http://www.w3.org/TR/cors/ and 

2519 https://cloud.google.com/storage/docs/json_api/v1/buckets 

2520 

2521 :type entries: list of dictionaries 

2522 :param entries: A sequence of mappings describing each CORS policy. 

2523 """ 

2524 self._patch_property("cors", entries) 

2525 

2526 default_event_based_hold = _scalar_property("defaultEventBasedHold") 

2527 """Are uploaded objects automatically placed under an even-based hold? 

2528 

2529 If True, uploaded objects will be placed under an event-based hold to 

2530 be released at a future time. When released an object will then begin 

2531 the retention period determined by the policy retention period for the 

2532 object bucket. 

2533 

2534 See https://cloud.google.com/storage/docs/json_api/v1/buckets 

2535 

2536 If the property is not set locally, returns ``None``. 

2537 

2538 :rtype: bool or ``NoneType`` 

2539 """ 

2540 

2541 @property 

2542 def default_kms_key_name(self): 

2543 """Retrieve / set default KMS encryption key for objects in the bucket. 

2544 

2545 See https://cloud.google.com/storage/docs/json_api/v1/buckets 

2546 

2547 :setter: Set default KMS encryption key for items in this bucket. 

2548 :getter: Get default KMS encryption key for items in this bucket. 

2549 

2550 :rtype: str 

2551 :returns: Default KMS encryption key, or ``None`` if not set. 

2552 """ 

2553 encryption_config = self._properties.get("encryption", {}) 

2554 return encryption_config.get("defaultKmsKeyName") 

2555 

2556 @default_kms_key_name.setter 

2557 def default_kms_key_name(self, value): 

2558 """Set default KMS encryption key for objects in the bucket. 

2559 

2560 :type value: str or None 

2561 :param value: new KMS key name (None to clear any existing key). 

2562 """ 

2563 encryption_config = self._properties.get("encryption", {}) 

2564 encryption_config["defaultKmsKeyName"] = value 

2565 self._patch_property("encryption", encryption_config) 

2566 

2567 @property 

2568 def labels(self): 

2569 """Retrieve or set labels assigned to this bucket. 

2570 

2571 See 

2572 https://cloud.google.com/storage/docs/json_api/v1/buckets#labels 

2573 

2574 .. note:: 

2575 

2576 The getter for this property returns a dict which is a *copy* 

2577 of the bucket's labels. Mutating that dict has no effect unless 

2578 you then re-assign the dict via the setter. E.g.: 

2579 

2580 >>> labels = bucket.labels 

2581 >>> labels['new_key'] = 'some-label' 

2582 >>> del labels['old_key'] 

2583 >>> bucket.labels = labels 

2584 >>> bucket.update() 

2585 

2586 :setter: Set labels for this bucket. 

2587 :getter: Gets the labels for this bucket. 

2588 

2589 :rtype: :class:`dict` 

2590 :returns: Name-value pairs (string->string) labelling the bucket. 

2591 """ 

2592 labels = self._properties.get("labels") 

2593 if labels is None: 

2594 return {} 

2595 return copy.deepcopy(labels) 

2596 

2597 @labels.setter 

2598 def labels(self, mapping): 

2599 """Set labels assigned to this bucket. 

2600 

2601 See 

2602 https://cloud.google.com/storage/docs/json_api/v1/buckets#labels 

2603 

2604 :type mapping: :class:`dict` 

2605 :param mapping: Name-value pairs (string->string) labelling the bucket. 

2606 """ 

2607 # If any labels have been expressly removed, we need to track this 

2608 # so that a future .patch() call can do the correct thing. 

2609 existing = set([k for k in self.labels.keys()]) 

2610 incoming = set([k for k in mapping.keys()]) 

2611 self._label_removals = self._label_removals.union(existing.difference(incoming)) 

2612 mapping = {k: str(v) for k, v in mapping.items()} 

2613 

2614 # Actually update the labels on the object. 

2615 self._patch_property("labels", copy.deepcopy(mapping)) 

2616 

2617 @property 

2618 def etag(self): 

2619 """Retrieve the ETag for the bucket. 

2620 

2621 See https://tools.ietf.org/html/rfc2616#section-3.11 and 

2622 https://cloud.google.com/storage/docs/json_api/v1/buckets 

2623 

2624 :rtype: str or ``NoneType`` 

2625 :returns: The bucket etag or ``None`` if the bucket's 

2626 resource has not been loaded from the server. 

2627 """ 

2628 return self._properties.get("etag") 

2629 

2630 @property 

2631 def id(self): 

2632 """Retrieve the ID for the bucket. 

2633 

2634 See https://cloud.google.com/storage/docs/json_api/v1/buckets 

2635 

2636 :rtype: str or ``NoneType`` 

2637 :returns: The ID of the bucket or ``None`` if the bucket's 

2638 resource has not been loaded from the server. 

2639 """ 

2640 return self._properties.get("id") 

2641 

2642 @property 

2643 def iam_configuration(self): 

2644 """Retrieve IAM configuration for this bucket. 

2645 

2646 :rtype: :class:`IAMConfiguration` 

2647 :returns: an instance for managing the bucket's IAM configuration. 

2648 """ 

2649 info = self._properties.get("iamConfiguration", {}) 

2650 return IAMConfiguration.from_api_repr(info, self) 

2651 

2652 @property 

2653 def soft_delete_policy(self): 

2654 """Retrieve the soft delete policy for this bucket. 

2655 

2656 See https://cloud.google.com/storage/docs/soft-delete 

2657 

2658 :rtype: :class:`SoftDeletePolicy` 

2659 :returns: an instance for managing the bucket's soft delete policy. 

2660 """ 

2661 policy = self._properties.get("softDeletePolicy", {}) 

2662 return SoftDeletePolicy.from_api_repr(policy, self) 

2663 

2664 @property 

2665 def lifecycle_rules(self): 

2666 """Retrieve or set lifecycle rules configured for this bucket. 

2667 

2668 See https://cloud.google.com/storage/docs/lifecycle and 

2669 https://cloud.google.com/storage/docs/json_api/v1/buckets 

2670 

2671 .. note:: 

2672 

2673 The getter for this property returns a generator which yields 

2674 *copies* of the bucket's lifecycle rules mappings. Mutating the 

2675 output dicts has no effect unless you then re-assign the dict via 

2676 the setter. E.g.: 

2677 

2678 >>> rules = list(bucket.lifecycle_rules) 

2679 >>> rules.append({'origin': '/foo', ...}) 

2680 >>> rules[1]['rule']['action']['type'] = 'Delete' 

2681 >>> del rules[0] 

2682 >>> bucket.lifecycle_rules = rules 

2683 >>> bucket.update() 

2684 

2685 :setter: Set lifecycle rules for this bucket. 

2686 :getter: Gets the lifecycle rules for this bucket. 

2687 

2688 :rtype: generator(dict) 

2689 :returns: A sequence of mappings describing each lifecycle rule. 

2690 """ 

2691 info = self._properties.get("lifecycle", {}) 

2692 for rule in info.get("rule", ()): 

2693 action_type = rule["action"]["type"] 

2694 if action_type == "Delete": 

2695 yield LifecycleRuleDelete.from_api_repr(rule) 

2696 elif action_type == "SetStorageClass": 

2697 yield LifecycleRuleSetStorageClass.from_api_repr(rule) 

2698 elif action_type == "AbortIncompleteMultipartUpload": 

2699 yield LifecycleRuleAbortIncompleteMultipartUpload.from_api_repr(rule) 

2700 else: 

2701 warnings.warn( 

2702 "Unknown lifecycle rule type received: {}. Please upgrade to the latest version of google-cloud-storage.".format( 

2703 rule 

2704 ), 

2705 UserWarning, 

2706 stacklevel=1, 

2707 ) 

2708 

2709 @lifecycle_rules.setter 

2710 def lifecycle_rules(self, rules): 

2711 """Set lifecycle rules configured for this bucket. 

2712 

2713 See https://cloud.google.com/storage/docs/lifecycle and 

2714 https://cloud.google.com/storage/docs/json_api/v1/buckets 

2715 

2716 :type rules: list of dictionaries 

2717 :param rules: A sequence of mappings describing each lifecycle rule. 

2718 """ 

2719 rules = [dict(rule) for rule in rules] # Convert helpers if needed 

2720 self._patch_property("lifecycle", {"rule": rules}) 

2721 

2722 def clear_lifecycle_rules(self): 

2723 """Clear lifecycle rules configured for this bucket. 

2724 

2725 See https://cloud.google.com/storage/docs/lifecycle and 

2726 https://cloud.google.com/storage/docs/json_api/v1/buckets 

2727 """ 

2728 self.lifecycle_rules = [] 

2729 

2730 def clear_lifecyle_rules(self): 

2731 """Deprecated alias for clear_lifecycle_rules.""" 

2732 return self.clear_lifecycle_rules() 

2733 

2734 def add_lifecycle_delete_rule(self, **kw): 

2735 """Add a "delete" rule to lifecycle rules configured for this bucket. 

2736 

2737 This defines a [lifecycle configuration](https://cloud.google.com/storage/docs/lifecycle), 

2738 which is set on the bucket. For the general format of a lifecycle configuration, see the 

2739 [bucket resource representation for JSON](https://cloud.google.com/storage/docs/json_api/v1/buckets). 

2740 See also a [code sample](https://cloud.google.com/storage/docs/samples/storage-enable-bucket-lifecycle-management#storage_enable_bucket_lifecycle_management-python). 

2741 

2742 :type kw: dict 

2743 :params kw: arguments passed to :class:`LifecycleRuleConditions`. 

2744 """ 

2745 rules = list(self.lifecycle_rules) 

2746 rules.append(LifecycleRuleDelete(**kw)) 

2747 self.lifecycle_rules = rules 

2748 

2749 def add_lifecycle_set_storage_class_rule(self, storage_class, **kw): 

2750 """Add a "set storage class" rule to lifecycle rules. 

2751 

2752 This defines a [lifecycle configuration](https://cloud.google.com/storage/docs/lifecycle), 

2753 which is set on the bucket. For the general format of a lifecycle configuration, see the 

2754 [bucket resource representation for JSON](https://cloud.google.com/storage/docs/json_api/v1/buckets). 

2755 

2756 :type storage_class: str, one of :attr:`STORAGE_CLASSES`. 

2757 :param storage_class: new storage class to assign to matching items. 

2758 

2759 :type kw: dict 

2760 :params kw: arguments passed to :class:`LifecycleRuleConditions`. 

2761 """ 

2762 rules = list(self.lifecycle_rules) 

2763 rules.append(LifecycleRuleSetStorageClass(storage_class, **kw)) 

2764 self.lifecycle_rules = rules 

2765 

2766 def add_lifecycle_abort_incomplete_multipart_upload_rule(self, **kw): 

2767 """Add a "abort incomplete multipart upload" rule to lifecycle rules. 

2768 

2769 .. note:: 

2770 The "age" lifecycle condition is the only supported condition 

2771 for this rule. 

2772 

2773 This defines a [lifecycle configuration](https://cloud.google.com/storage/docs/lifecycle), 

2774 which is set on the bucket. For the general format of a lifecycle configuration, see the 

2775 [bucket resource representation for JSON](https://cloud.google.com/storage/docs/json_api/v1/buckets). 

2776 

2777 :type kw: dict 

2778 :params kw: arguments passed to :class:`LifecycleRuleConditions`. 

2779 """ 

2780 rules = list(self.lifecycle_rules) 

2781 rules.append(LifecycleRuleAbortIncompleteMultipartUpload(**kw)) 

2782 self.lifecycle_rules = rules 

2783 

2784 _location = _scalar_property("location") 

2785 

2786 @property 

2787 def location(self): 

2788 """Retrieve location configured for this bucket. 

2789 

2790 See https://cloud.google.com/storage/docs/json_api/v1/buckets and 

2791 https://cloud.google.com/storage/docs/locations 

2792 

2793 Returns ``None`` if the property has not been set before creation, 

2794 or if the bucket's resource has not been loaded from the server. 

2795 :rtype: str or ``NoneType`` 

2796 """ 

2797 return self._location 

2798 

2799 @location.setter 

2800 def location(self, value): 

2801 """(Deprecated) Set `Bucket.location` 

2802 

2803 This can only be set at bucket **creation** time. 

2804 

2805 See https://cloud.google.com/storage/docs/json_api/v1/buckets and 

2806 https://cloud.google.com/storage/docs/bucket-locations 

2807 

2808 .. warning:: 

2809 

2810 Assignment to 'Bucket.location' is deprecated, as it is only 

2811 valid before the bucket is created. Instead, pass the location 

2812 to `Bucket.create`. 

2813 """ 

2814 warnings.warn(_LOCATION_SETTER_MESSAGE, DeprecationWarning, stacklevel=2) 

2815 self._location = value 

2816 

2817 @property 

2818 def data_locations(self): 

2819 """Retrieve the list of regional locations for custom dual-region buckets. 

2820 

2821 See https://cloud.google.com/storage/docs/json_api/v1/buckets and 

2822 https://cloud.google.com/storage/docs/locations 

2823 

2824 Returns ``None`` if the property has not been set before creation, 

2825 if the bucket's resource has not been loaded from the server, 

2826 or if the bucket is not a dual-regions bucket. 

2827 :rtype: list of str or ``NoneType`` 

2828 """ 

2829 custom_placement_config = self._properties.get("customPlacementConfig", {}) 

2830 return custom_placement_config.get("dataLocations") 

2831 

2832 @property 

2833 def location_type(self): 

2834 """Retrieve the location type for the bucket. 

2835 

2836 See https://cloud.google.com/storage/docs/storage-classes 

2837 

2838 :getter: Gets the the location type for this bucket. 

2839 

2840 :rtype: str or ``NoneType`` 

2841 :returns: 

2842 If set, one of 

2843 :attr:`~google.cloud.storage.constants.MULTI_REGION_LOCATION_TYPE`, 

2844 :attr:`~google.cloud.storage.constants.REGION_LOCATION_TYPE`, or 

2845 :attr:`~google.cloud.storage.constants.DUAL_REGION_LOCATION_TYPE`, 

2846 else ``None``. 

2847 """ 

2848 return self._properties.get("locationType") 

2849 

2850 def get_logging(self): 

2851 """Return info about access logging for this bucket. 

2852 

2853 See https://cloud.google.com/storage/docs/access-logs#status 

2854 

2855 :rtype: dict or None 

2856 :returns: a dict w/ keys, ``logBucket`` and ``logObjectPrefix`` 

2857 (if logging is enabled), or None (if not). 

2858 """ 

2859 info = self._properties.get("logging") 

2860 return copy.deepcopy(info) 

2861 

2862 def enable_logging(self, bucket_name, object_prefix=""): 

2863 """Enable access logging for this bucket. 

2864 

2865 See https://cloud.google.com/storage/docs/access-logs 

2866 

2867 :type bucket_name: str 

2868 :param bucket_name: name of bucket in which to store access logs 

2869 

2870 :type object_prefix: str 

2871 :param object_prefix: prefix for access log filenames 

2872 """ 

2873 info = {"logBucket": bucket_name, "logObjectPrefix": object_prefix} 

2874 self._patch_property("logging", info) 

2875 

2876 def disable_logging(self): 

2877 """Disable access logging for this bucket. 

2878 

2879 See https://cloud.google.com/storage/docs/access-logs#disabling 

2880 """ 

2881 self._patch_property("logging", None) 

2882 

2883 @property 

2884 def metageneration(self): 

2885 """Retrieve the metageneration for the bucket. 

2886 

2887 See https://cloud.google.com/storage/docs/json_api/v1/buckets 

2888 

2889 :rtype: int or ``NoneType`` 

2890 :returns: The metageneration of the bucket or ``None`` if the bucket's 

2891 resource has not been loaded from the server. 

2892 """ 

2893 metageneration = self._properties.get("metageneration") 

2894 if metageneration is not None: 

2895 return int(metageneration) 

2896 

2897 @property 

2898 def owner(self): 

2899 """Retrieve info about the owner of the bucket. 

2900 

2901 See https://cloud.google.com/storage/docs/json_api/v1/buckets 

2902 

2903 :rtype: dict or ``NoneType`` 

2904 :returns: Mapping of owner's role/ID. Returns ``None`` if the bucket's 

2905 resource has not been loaded from the server. 

2906 """ 

2907 return copy.deepcopy(self._properties.get("owner")) 

2908 

2909 @property 

2910 def project_number(self): 

2911 """Retrieve the number of the project to which the bucket is assigned. 

2912 

2913 See https://cloud.google.com/storage/docs/json_api/v1/buckets 

2914 

2915 :rtype: int or ``NoneType`` 

2916 :returns: The project number that owns the bucket or ``None`` if 

2917 the bucket's resource has not been loaded from the server. 

2918 """ 

2919 project_number = self._properties.get("projectNumber") 

2920 if project_number is not None: 

2921 return int(project_number) 

2922 

2923 @property 

2924 def retention_policy_effective_time(self): 

2925 """Retrieve the effective time of the bucket's retention policy. 

2926 

2927 :rtype: datetime.datetime or ``NoneType`` 

2928 :returns: point-in time at which the bucket's retention policy is 

2929 effective, or ``None`` if the property is not 

2930 set locally. 

2931 """ 

2932 policy = self._properties.get("retentionPolicy") 

2933 if policy is not None: 

2934 timestamp = policy.get("effectiveTime") 

2935 if timestamp is not None: 

2936 return _rfc3339_nanos_to_datetime(timestamp) 

2937 

2938 @property 

2939 def retention_policy_locked(self): 

2940 """Retrieve whthere the bucket's retention policy is locked. 

2941 

2942 :rtype: bool 

2943 :returns: True if the bucket's policy is locked, or else False 

2944 if the policy is not locked, or the property is not 

2945 set locally. 

2946 """ 

2947 policy = self._properties.get("retentionPolicy") 

2948 if policy is not None: 

2949 return policy.get("isLocked") 

2950 

2951 @property 

2952 def retention_period(self): 

2953 """Retrieve or set the retention period for items in the bucket. 

2954 

2955 :rtype: int or ``NoneType`` 

2956 :returns: number of seconds to retain items after upload or release 

2957 from event-based lock, or ``None`` if the property is not 

2958 set locally. 

2959 """ 

2960 policy = self._properties.get("retentionPolicy") 

2961 if policy is not None: 

2962 period = policy.get("retentionPeriod") 

2963 if period is not None: 

2964 return int(period) 

2965 

2966 @retention_period.setter 

2967 def retention_period(self, value): 

2968 """Set the retention period for items in the bucket. 

2969 

2970 :type value: int 

2971 :param value: 

2972 number of seconds to retain items after upload or release from 

2973 event-based lock. 

2974 

2975 :raises ValueError: if the bucket's retention policy is locked. 

2976 """ 

2977 policy = self._properties.setdefault("retentionPolicy", {}) 

2978 if value is not None: 

2979 policy["retentionPeriod"] = str(value) 

2980 else: 

2981 policy = None 

2982 self._patch_property("retentionPolicy", policy) 

2983 

2984 @property 

2985 def self_link(self): 

2986 """Retrieve the URI for the bucket. 

2987 

2988 See https://cloud.google.com/storage/docs/json_api/v1/buckets 

2989 

2990 :rtype: str or ``NoneType`` 

2991 :returns: The self link for the bucket or ``None`` if 

2992 the bucket's resource has not been loaded from the server. 

2993 """ 

2994 return self._properties.get("selfLink") 

2995 

2996 @property 

2997 def storage_class(self): 

2998 """Retrieve or set the storage class for the bucket. 

2999 

3000 See https://cloud.google.com/storage/docs/storage-classes 

3001 

3002 :setter: Set the storage class for this bucket. 

3003 :getter: Gets the the storage class for this bucket. 

3004 

3005 :rtype: str or ``NoneType`` 

3006 :returns: 

3007 If set, one of 

3008 :attr:`~google.cloud.storage.constants.NEARLINE_STORAGE_CLASS`, 

3009 :attr:`~google.cloud.storage.constants.COLDLINE_STORAGE_CLASS`, 

3010 :attr:`~google.cloud.storage.constants.ARCHIVE_STORAGE_CLASS`, 

3011 :attr:`~google.cloud.storage.constants.STANDARD_STORAGE_CLASS`, 

3012 :attr:`~google.cloud.storage.constants.MULTI_REGIONAL_LEGACY_STORAGE_CLASS`, 

3013 :attr:`~google.cloud.storage.constants.REGIONAL_LEGACY_STORAGE_CLASS`, 

3014 or 

3015 :attr:`~google.cloud.storage.constants.DURABLE_REDUCED_AVAILABILITY_LEGACY_STORAGE_CLASS`, 

3016 else ``None``. 

3017 """ 

3018 return self._properties.get("storageClass") 

3019 

3020 @storage_class.setter 

3021 def storage_class(self, value): 

3022 """Set the storage class for the bucket. 

3023 

3024 See https://cloud.google.com/storage/docs/storage-classes 

3025 

3026 :type value: str 

3027 :param value: 

3028 One of 

3029 :attr:`~google.cloud.storage.constants.NEARLINE_STORAGE_CLASS`, 

3030 :attr:`~google.cloud.storage.constants.COLDLINE_STORAGE_CLASS`, 

3031 :attr:`~google.cloud.storage.constants.ARCHIVE_STORAGE_CLASS`, 

3032 :attr:`~google.cloud.storage.constants.STANDARD_STORAGE_CLASS`, 

3033 :attr:`~google.cloud.storage.constants.MULTI_REGIONAL_LEGACY_STORAGE_CLASS`, 

3034 :attr:`~google.cloud.storage.constants.REGIONAL_LEGACY_STORAGE_CLASS`, 

3035 or 

3036 :attr:`~google.cloud.storage.constants.DURABLE_REDUCED_AVAILABILITY_LEGACY_STORAGE_CLASS`, 

3037 """ 

3038 self._patch_property("storageClass", value) 

3039 

3040 @property 

3041 def time_created(self): 

3042 """Retrieve the timestamp at which the bucket was created. 

3043 

3044 See https://cloud.google.com/storage/docs/json_api/v1/buckets 

3045 

3046 :rtype: :class:`datetime.datetime` or ``NoneType`` 

3047 :returns: Datetime object parsed from RFC3339 valid timestamp, or 

3048 ``None`` if the bucket's resource has not been loaded 

3049 from the server. 

3050 """ 

3051 value = self._properties.get("timeCreated") 

3052 if value is not None: 

3053 return _rfc3339_nanos_to_datetime(value) 

3054 

3055 @property 

3056 def updated(self): 

3057 """Retrieve the timestamp at which the bucket was last updated. 

3058 

3059 See https://cloud.google.com/storage/docs/json_api/v1/buckets 

3060 

3061 :rtype: :class:`datetime.datetime` or ``NoneType`` 

3062 :returns: Datetime object parsed from RFC3339 valid timestamp, or 

3063 ``None`` if the bucket's resource has not been loaded 

3064 from the server. 

3065 """ 

3066 value = self._properties.get("updated") 

3067 if value is not None: 

3068 return _rfc3339_nanos_to_datetime(value) 

3069 

3070 @property 

3071 def versioning_enabled(self): 

3072 """Is versioning enabled for this bucket? 

3073 

3074 See https://cloud.google.com/storage/docs/object-versioning for 

3075 details. 

3076 

3077 :setter: Update whether versioning is enabled for this bucket. 

3078 :getter: Query whether versioning is enabled for this bucket. 

3079 

3080 :rtype: bool 

3081 :returns: True if enabled, else False. 

3082 """ 

3083 versioning = self._properties.get("versioning", {}) 

3084 return versioning.get("enabled", False) 

3085 

3086 @versioning_enabled.setter 

3087 def versioning_enabled(self, value): 

3088 """Enable versioning for this bucket. 

3089 

3090 See https://cloud.google.com/storage/docs/object-versioning for 

3091 details. 

3092 

3093 :type value: convertible to boolean 

3094 :param value: should versioning be enabled for the bucket? 

3095 """ 

3096 self._patch_property("versioning", {"enabled": bool(value)}) 

3097 

3098 @property 

3099 def requester_pays(self): 

3100 """Does the requester pay for API requests for this bucket? 

3101 

3102 See https://cloud.google.com/storage/docs/requester-pays for 

3103 details. 

3104 

3105 :setter: Update whether requester pays for this bucket. 

3106 :getter: Query whether requester pays for this bucket. 

3107 

3108 :rtype: bool 

3109 :returns: True if requester pays for API requests for the bucket, 

3110 else False. 

3111 """ 

3112 versioning = self._properties.get("billing", {}) 

3113 return versioning.get("requesterPays", False) 

3114 

3115 @requester_pays.setter 

3116 def requester_pays(self, value): 

3117 """Update whether requester pays for API requests for this bucket. 

3118 

3119 See https://cloud.google.com/storage/docs/using-requester-pays for 

3120 details. 

3121 

3122 :type value: convertible to boolean 

3123 :param value: should requester pay for API requests for the bucket? 

3124 """ 

3125 self._patch_property("billing", {"requesterPays": bool(value)}) 

3126 

3127 @property 

3128 def autoclass_enabled(self): 

3129 """Whether Autoclass is enabled for this bucket. 

3130 

3131 See https://cloud.google.com/storage/docs/using-autoclass for details. 

3132 

3133 :setter: Update whether autoclass is enabled for this bucket. 

3134 :getter: Query whether autoclass is enabled for this bucket. 

3135 

3136 :rtype: bool 

3137 :returns: True if enabled, else False. 

3138 """ 

3139 autoclass = self._properties.get("autoclass", {}) 

3140 return autoclass.get("enabled", False) 

3141 

3142 @autoclass_enabled.setter 

3143 def autoclass_enabled(self, value): 

3144 """Enable or disable Autoclass at the bucket-level. 

3145 

3146 See https://cloud.google.com/storage/docs/using-autoclass for details. 

3147 

3148 :type value: convertible to boolean 

3149 :param value: If true, enable Autoclass for this bucket. 

3150 If false, disable Autoclass for this bucket. 

3151 """ 

3152 autoclass = self._properties.get("autoclass", {}) 

3153 autoclass["enabled"] = bool(value) 

3154 self._patch_property("autoclass", autoclass) 

3155 

3156 @property 

3157 def autoclass_toggle_time(self): 

3158 """Retrieve the toggle time when Autoclaass was last enabled or disabled for the bucket. 

3159 :rtype: datetime.datetime or ``NoneType`` 

3160 :returns: point-in time at which the bucket's autoclass is toggled, or ``None`` if the property is not set locally. 

3161 """ 

3162 autoclass = self._properties.get("autoclass") 

3163 if autoclass is not None: 

3164 timestamp = autoclass.get("toggleTime") 

3165 if timestamp is not None: 

3166 return _rfc3339_nanos_to_datetime(timestamp) 

3167 

3168 @property 

3169 def autoclass_terminal_storage_class(self): 

3170 """The storage class that objects in an Autoclass bucket eventually transition to if 

3171 they are not read for a certain length of time. Valid values are NEARLINE and ARCHIVE. 

3172 

3173 See https://cloud.google.com/storage/docs/using-autoclass for details. 

3174 

3175 :setter: Set the terminal storage class for Autoclass configuration. 

3176 :getter: Get the terminal storage class for Autoclass configuration. 

3177 

3178 :rtype: str 

3179 :returns: The terminal storage class if Autoclass is enabled, else ``None``. 

3180 """ 

3181 autoclass = self._properties.get("autoclass", {}) 

3182 return autoclass.get("terminalStorageClass", None) 

3183 

3184 @autoclass_terminal_storage_class.setter 

3185 def autoclass_terminal_storage_class(self, value): 

3186 """The storage class that objects in an Autoclass bucket eventually transition to if 

3187 they are not read for a certain length of time. Valid values are NEARLINE and ARCHIVE. 

3188 

3189 See https://cloud.google.com/storage/docs/using-autoclass for details. 

3190 

3191 :type value: str 

3192 :param value: The only valid values are `"NEARLINE"` and `"ARCHIVE"`. 

3193 """ 

3194 autoclass = self._properties.get("autoclass", {}) 

3195 autoclass["terminalStorageClass"] = value 

3196 self._patch_property("autoclass", autoclass) 

3197 

3198 @property 

3199 def autoclass_terminal_storage_class_update_time(self): 

3200 """The time at which the Autoclass terminal_storage_class field was last updated for this bucket 

3201 :rtype: datetime.datetime or ``NoneType`` 

3202 :returns: point-in time at which the bucket's terminal_storage_class is last updated, or ``None`` if the property is not set locally. 

3203 """ 

3204 autoclass = self._properties.get("autoclass") 

3205 if autoclass is not None: 

3206 timestamp = autoclass.get("terminalStorageClassUpdateTime") 

3207 if timestamp is not None: 

3208 return _rfc3339_nanos_to_datetime(timestamp) 

3209 

3210 @property 

3211 def object_retention_mode(self): 

3212 """Retrieve the object retention mode set on the bucket. 

3213 

3214 :rtype: str 

3215 :returns: When set to Enabled, retention configurations can be 

3216 set on objects in the bucket. 

3217 """ 

3218 object_retention = self._properties.get("objectRetention") 

3219 if object_retention is not None: 

3220 return object_retention.get("mode") 

3221 

3222 @property 

3223 def hierarchical_namespace_enabled(self): 

3224 """Whether hierarchical namespace is enabled for this bucket. 

3225 

3226 :setter: Update whether hierarchical namespace is enabled for this bucket. 

3227 :getter: Query whether hierarchical namespace is enabled for this bucket. 

3228 

3229 :rtype: bool 

3230 :returns: True if enabled, else False. 

3231 """ 

3232 hns = self._properties.get("hierarchicalNamespace", {}) 

3233 return hns.get("enabled") 

3234 

3235 @hierarchical_namespace_enabled.setter 

3236 def hierarchical_namespace_enabled(self, value): 

3237 """Enable or disable hierarchical namespace at the bucket-level. 

3238 

3239 :type value: convertible to boolean 

3240 :param value: If true, enable hierarchical namespace for this bucket. 

3241 If false, disable hierarchical namespace for this bucket. 

3242 

3243 .. note:: 

3244 To enable hierarchical namespace, you must set it at bucket creation time. 

3245 Currently, hierarchical namespace configuration cannot be changed after bucket creation. 

3246 """ 

3247 hns = self._properties.get("hierarchicalNamespace", {}) 

3248 hns["enabled"] = bool(value) 

3249 self._patch_property("hierarchicalNamespace", hns) 

3250 

3251 def configure_website(self, main_page_suffix=None, not_found_page=None): 

3252 """Configure website-related properties. 

3253 

3254 See https://cloud.google.com/storage/docs/static-website 

3255 

3256 .. note:: 

3257 This configures the bucket's website-related properties,controlling how 

3258 the service behaves when accessing bucket contents as a web site. 

3259 See [tutorials](https://cloud.google.com/storage/docs/hosting-static-website) and 

3260 [code samples](https://cloud.google.com/storage/docs/samples/storage-define-bucket-website-configuration#storage_define_bucket_website_configuration-python) 

3261 for more information. 

3262 

3263 :type main_page_suffix: str 

3264 :param main_page_suffix: The page to use as the main page 

3265 of a directory. 

3266 Typically something like index.html. 

3267 

3268 :type not_found_page: str 

3269 :param not_found_page: The file to use when a page isn't found. 

3270 """ 

3271 data = { 

3272 "mainPageSuffix": main_page_suffix, 

3273 "notFoundPage": not_found_page, 

3274 } 

3275 self._patch_property("website", data) 

3276 

3277 def disable_website(self): 

3278 """Disable the website configuration for this bucket. 

3279 

3280 This is really just a shortcut for setting the website-related 

3281 attributes to ``None``. 

3282 """ 

3283 return self.configure_website(None, None) 

3284 

3285 def get_iam_policy( 

3286 self, 

3287 client=None, 

3288 requested_policy_version=None, 

3289 timeout=_DEFAULT_TIMEOUT, 

3290 retry=DEFAULT_RETRY, 

3291 ): 

3292 """Retrieve the IAM policy for the bucket. 

3293 

3294 See [API reference docs](https://cloud.google.com/storage/docs/json_api/v1/buckets/getIamPolicy) 

3295 and a [code sample](https://cloud.google.com/storage/docs/samples/storage-view-bucket-iam-members#storage_view_bucket_iam_members-python). 

3296 

3297 If :attr:`user_project` is set, bills the API request to that project. 

3298 

3299 :type client: :class:`~google.cloud.storage.client.Client` or 

3300 ``NoneType`` 

3301 :param client: (Optional) The client to use. If not passed, falls back 

3302 to the ``client`` stored on the current bucket. 

3303 

3304 :type requested_policy_version: int or ``NoneType`` 

3305 :param requested_policy_version: (Optional) The version of IAM policies to request. 

3306 If a policy with a condition is requested without 

3307 setting this, the server will return an error. 

3308 This must be set to a value of 3 to retrieve IAM 

3309 policies containing conditions. This is to prevent 

3310 client code that isn't aware of IAM conditions from 

3311 interpreting and modifying policies incorrectly. 

3312 The service might return a policy with version lower 

3313 than the one that was requested, based on the 

3314 feature syntax in the policy fetched. 

3315 

3316 :type timeout: float or tuple 

3317 :param timeout: 

3318 (Optional) The amount of time, in seconds, to wait 

3319 for the server response. See: :ref:`configuring_timeouts` 

3320 

3321 :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy 

3322 :param retry: 

3323 (Optional) How to retry the RPC. See: :ref:`configuring_retries` 

3324 

3325 :rtype: :class:`google.api_core.iam.Policy` 

3326 :returns: the policy instance, based on the resource returned from 

3327 the ``getIamPolicy`` API request. 

3328 """ 

3329 with create_trace_span(name="Storage.Bucket.getIamPolicy"): 

3330 client = self._require_client(client) 

3331 query_params = {} 

3332 

3333 if self.user_project is not None: 

3334 query_params["userProject"] = self.user_project 

3335 

3336 if requested_policy_version is not None: 

3337 query_params["optionsRequestedPolicyVersion"] = requested_policy_version 

3338 

3339 info = client._get_resource( 

3340 f"{self.path}/iam", 

3341 query_params=query_params, 

3342 timeout=timeout, 

3343 retry=retry, 

3344 _target_object=None, 

3345 ) 

3346 return Policy.from_api_repr(info) 

3347 

3348 def set_iam_policy( 

3349 self, 

3350 policy, 

3351 client=None, 

3352 timeout=_DEFAULT_TIMEOUT, 

3353 retry=DEFAULT_RETRY_IF_ETAG_IN_JSON, 

3354 ): 

3355 """Update the IAM policy for the bucket. 

3356 

3357 See 

3358 https://cloud.google.com/storage/docs/json_api/v1/buckets/setIamPolicy 

3359 

3360 If :attr:`user_project` is set, bills the API request to that project. 

3361 

3362 :type policy: :class:`google.api_core.iam.Policy` 

3363 :param policy: policy instance used to update bucket's IAM policy. 

3364 

3365 :type client: :class:`~google.cloud.storage.client.Client` or 

3366 ``NoneType`` 

3367 :param client: (Optional) The client to use. If not passed, falls back 

3368 to the ``client`` stored on the current bucket. 

3369 

3370 :type timeout: float or tuple 

3371 :param timeout: 

3372 (Optional) The amount of time, in seconds, to wait 

3373 for the server response. See: :ref:`configuring_timeouts` 

3374 

3375 :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy 

3376 :param retry: 

3377 (Optional) How to retry the RPC. See: :ref:`configuring_retries` 

3378 

3379 :rtype: :class:`google.api_core.iam.Policy` 

3380 :returns: the policy instance, based on the resource returned from 

3381 the ``setIamPolicy`` API request. 

3382 """ 

3383 with create_trace_span(name="Storage.Bucket.setIamPolicy"): 

3384 client = self._require_client(client) 

3385 query_params = {} 

3386 

3387 if self.user_project is not None: 

3388 query_params["userProject"] = self.user_project 

3389 

3390 path = f"{self.path}/iam" 

3391 resource = policy.to_api_repr() 

3392 resource["resourceId"] = self.path 

3393 

3394 info = client._put_resource( 

3395 path, 

3396 resource, 

3397 query_params=query_params, 

3398 timeout=timeout, 

3399 retry=retry, 

3400 _target_object=None, 

3401 ) 

3402 

3403 return Policy.from_api_repr(info) 

3404 

3405 def test_iam_permissions( 

3406 self, 

3407 permissions, 

3408 client=None, 

3409 timeout=_DEFAULT_TIMEOUT, 

3410 retry=DEFAULT_RETRY, 

3411 ): 

3412 """API call: test permissions 

3413 

3414 See 

3415 https://cloud.google.com/storage/docs/json_api/v1/buckets/testIamPermissions 

3416 

3417 If :attr:`user_project` is set, bills the API request to that project. 

3418 

3419 :type permissions: list of string 

3420 :param permissions: the permissions to check 

3421 

3422 :type client: :class:`~google.cloud.storage.client.Client` or 

3423 ``NoneType`` 

3424 :param client: (Optional) The client to use. If not passed, falls back 

3425 to the ``client`` stored on the current bucket. 

3426 

3427 :type timeout: float or tuple 

3428 :param timeout: 

3429 (Optional) The amount of time, in seconds, to wait 

3430 for the server response. See: :ref:`configuring_timeouts` 

3431 

3432 :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy 

3433 :param retry: 

3434 (Optional) How to retry the RPC. See: :ref:`configuring_retries` 

3435 

3436 :rtype: list of string 

3437 :returns: the permissions returned by the ``testIamPermissions`` API 

3438 request. 

3439 """ 

3440 with create_trace_span(name="Storage.Bucket.testIamPermissions"): 

3441 client = self._require_client(client) 

3442 query_params = {"permissions": permissions} 

3443 

3444 if self.user_project is not None: 

3445 query_params["userProject"] = self.user_project 

3446 

3447 path = f"{self.path}/iam/testPermissions" 

3448 resp = client._get_resource( 

3449 path, 

3450 query_params=query_params, 

3451 timeout=timeout, 

3452 retry=retry, 

3453 _target_object=None, 

3454 ) 

3455 return resp.get("permissions", []) 

3456 

3457 def make_public( 

3458 self, 

3459 recursive=False, 

3460 future=False, 

3461 client=None, 

3462 timeout=_DEFAULT_TIMEOUT, 

3463 if_metageneration_match=None, 

3464 if_metageneration_not_match=None, 

3465 retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, 

3466 ): 

3467 """Update bucket's ACL, granting read access to anonymous users. 

3468 

3469 :type recursive: bool 

3470 :param recursive: If True, this will make all blobs inside the bucket 

3471 public as well. 

3472 

3473 :type future: bool 

3474 :param future: If True, this will make all objects created in the 

3475 future public as well. 

3476 

3477 :type client: :class:`~google.cloud.storage.client.Client` or 

3478 ``NoneType`` 

3479 :param client: (Optional) The client to use. If not passed, falls back 

3480 to the ``client`` stored on the current bucket. 

3481 :type timeout: float or tuple 

3482 :param timeout: 

3483 (Optional) The amount of time, in seconds, to wait 

3484 for the server response. See: :ref:`configuring_timeouts` 

3485 

3486 :type if_metageneration_match: long 

3487 :param if_metageneration_match: (Optional) Make the operation conditional on whether the 

3488 blob's current metageneration matches the given value. 

3489 

3490 :type if_metageneration_not_match: long 

3491 :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the 

3492 blob's current metageneration does not match the given value. 

3493 

3494 :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy 

3495 :param retry: 

3496 (Optional) How to retry the RPC. See: :ref:`configuring_retries` 

3497 

3498 :raises ValueError: 

3499 If ``recursive`` is True, and the bucket contains more than 256 

3500 blobs. This is to prevent extremely long runtime of this 

3501 method. For such buckets, iterate over the blobs returned by 

3502 :meth:`list_blobs` and call 

3503 :meth:`~google.cloud.storage.blob.Blob.make_public` 

3504 for each blob. 

3505 """ 

3506 with create_trace_span(name="Storage.Bucket.makePublic"): 

3507 self.acl.all().grant_read() 

3508 self.acl.save( 

3509 client=client, 

3510 timeout=timeout, 

3511 if_metageneration_match=if_metageneration_match, 

3512 if_metageneration_not_match=if_metageneration_not_match, 

3513 retry=retry, 

3514 ) 

3515 

3516 if future: 

3517 doa = self.default_object_acl 

3518 if not doa.loaded: 

3519 doa.reload(client=client, timeout=timeout) 

3520 doa.all().grant_read() 

3521 doa.save( 

3522 client=client, 

3523 timeout=timeout, 

3524 if_metageneration_match=if_metageneration_match, 

3525 if_metageneration_not_match=if_metageneration_not_match, 

3526 retry=retry, 

3527 ) 

3528 

3529 if recursive: 

3530 blobs = list( 

3531 self.list_blobs( 

3532 projection="full", 

3533 max_results=self._MAX_OBJECTS_FOR_ITERATION + 1, 

3534 client=client, 

3535 timeout=timeout, 

3536 ) 

3537 ) 

3538 if len(blobs) > self._MAX_OBJECTS_FOR_ITERATION: 

3539 message = ( 

3540 "Refusing to make public recursively with more than " 

3541 "%d objects. If you actually want to make every object " 

3542 "in this bucket public, iterate through the blobs " 

3543 "returned by 'Bucket.list_blobs()' and call " 

3544 "'make_public' on each one." 

3545 ) % (self._MAX_OBJECTS_FOR_ITERATION,) 

3546 raise ValueError(message) 

3547 

3548 for blob in blobs: 

3549 blob.acl.all().grant_read() 

3550 blob.acl.save( 

3551 client=client, 

3552 timeout=timeout, 

3553 ) 

3554 

3555 def make_private( 

3556 self, 

3557 recursive=False, 

3558 future=False, 

3559 client=None, 

3560 timeout=_DEFAULT_TIMEOUT, 

3561 if_metageneration_match=None, 

3562 if_metageneration_not_match=None, 

3563 retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, 

3564 ): 

3565 """Update bucket's ACL, revoking read access for anonymous users. 

3566 

3567 :type recursive: bool 

3568 :param recursive: If True, this will make all blobs inside the bucket 

3569 private as well. 

3570 

3571 :type future: bool 

3572 :param future: If True, this will make all objects created in the 

3573 future private as well. 

3574 

3575 :type client: :class:`~google.cloud.storage.client.Client` or 

3576 ``NoneType`` 

3577 :param client: (Optional) The client to use. If not passed, falls back 

3578 to the ``client`` stored on the current bucket. 

3579 

3580 :type timeout: float or tuple 

3581 :param timeout: 

3582 (Optional) The amount of time, in seconds, to wait 

3583 for the server response. See: :ref:`configuring_timeouts` 

3584 

3585 :type if_metageneration_match: long 

3586 :param if_metageneration_match: (Optional) Make the operation conditional on whether the 

3587 blob's current metageneration matches the given value. 

3588 :type if_metageneration_not_match: long 

3589 :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the 

3590 blob's current metageneration does not match the given value. 

3591 :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy 

3592 :param retry: 

3593 (Optional) How to retry the RPC. See: :ref:`configuring_retries` 

3594 

3595 :raises ValueError: 

3596 If ``recursive`` is True, and the bucket contains more than 256 

3597 blobs. This is to prevent extremely long runtime of this 

3598 method. For such buckets, iterate over the blobs returned by 

3599 :meth:`list_blobs` and call 

3600 :meth:`~google.cloud.storage.blob.Blob.make_private` 

3601 for each blob. 

3602 """ 

3603 with create_trace_span(name="Storage.Bucket.makePrivate"): 

3604 self.acl.all().revoke_read() 

3605 self.acl.save( 

3606 client=client, 

3607 timeout=timeout, 

3608 if_metageneration_match=if_metageneration_match, 

3609 if_metageneration_not_match=if_metageneration_not_match, 

3610 retry=retry, 

3611 ) 

3612 

3613 if future: 

3614 doa = self.default_object_acl 

3615 if not doa.loaded: 

3616 doa.reload(client=client, timeout=timeout) 

3617 doa.all().revoke_read() 

3618 doa.save( 

3619 client=client, 

3620 timeout=timeout, 

3621 if_metageneration_match=if_metageneration_match, 

3622 if_metageneration_not_match=if_metageneration_not_match, 

3623 retry=retry, 

3624 ) 

3625 

3626 if recursive: 

3627 blobs = list( 

3628 self.list_blobs( 

3629 projection="full", 

3630 max_results=self._MAX_OBJECTS_FOR_ITERATION + 1, 

3631 client=client, 

3632 timeout=timeout, 

3633 ) 

3634 ) 

3635 if len(blobs) > self._MAX_OBJECTS_FOR_ITERATION: 

3636 message = ( 

3637 "Refusing to make private recursively with more than " 

3638 "%d objects. If you actually want to make every object " 

3639 "in this bucket private, iterate through the blobs " 

3640 "returned by 'Bucket.list_blobs()' and call " 

3641 "'make_private' on each one." 

3642 ) % (self._MAX_OBJECTS_FOR_ITERATION,) 

3643 raise ValueError(message) 

3644 

3645 for blob in blobs: 

3646 blob.acl.all().revoke_read() 

3647 blob.acl.save(client=client, timeout=timeout) 

3648 

3649 def generate_upload_policy(self, conditions, expiration=None, client=None): 

3650 """Create a signed upload policy for uploading objects. 

3651 

3652 This method generates and signs a policy document. You can use 

3653 [`policy documents`](https://cloud.google.com/storage/docs/xml-api/post-object-forms) 

3654 to allow visitors to a website to upload files to 

3655 Google Cloud Storage without giving them direct write access. 

3656 See a [code sample](https://cloud.google.com/storage/docs/xml-api/post-object-forms#python). 

3657 

3658 :type expiration: datetime 

3659 :param expiration: (Optional) Expiration in UTC. If not specified, the 

3660 policy will expire in 1 hour. 

3661 

3662 :type conditions: list 

3663 :param conditions: A list of conditions as described in the 

3664 `policy documents` documentation. 

3665 

3666 :type client: :class:`~google.cloud.storage.client.Client` 

3667 :param client: (Optional) The client to use. If not passed, falls back 

3668 to the ``client`` stored on the current bucket. 

3669 

3670 :rtype: dict 

3671 :returns: A dictionary of (form field name, form field value) of form 

3672 fields that should be added to your HTML upload form in order 

3673 to attach the signature. 

3674 """ 

3675 client = self._require_client(client) 

3676 credentials = client._credentials 

3677 _signing.ensure_signed_credentials(credentials) 

3678 

3679 if expiration is None: 

3680 expiration = _NOW(_UTC).replace(tzinfo=None) + datetime.timedelta(hours=1) 

3681 

3682 conditions = conditions + [{"bucket": self.name}] 

3683 

3684 policy_document = { 

3685 "expiration": _datetime_to_rfc3339(expiration), 

3686 "conditions": conditions, 

3687 } 

3688 

3689 encoded_policy_document = base64.b64encode( 

3690 json.dumps(policy_document).encode("utf-8") 

3691 ) 

3692 signature = base64.b64encode(credentials.sign_bytes(encoded_policy_document)) 

3693 

3694 fields = { 

3695 "bucket": self.name, 

3696 "GoogleAccessId": credentials.signer_email, 

3697 "policy": encoded_policy_document.decode("utf-8"), 

3698 "signature": signature.decode("utf-8"), 

3699 } 

3700 

3701 return fields 

3702 

3703 def lock_retention_policy( 

3704 self, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY 

3705 ): 

3706 """Lock the bucket's retention policy. 

3707 

3708 :type client: :class:`~google.cloud.storage.client.Client` or 

3709 ``NoneType`` 

3710 :param client: (Optional) The client to use. If not passed, falls back 

3711 to the ``client`` stored on the blob's bucket. 

3712 

3713 :type timeout: float or tuple 

3714 :param timeout: 

3715 (Optional) The amount of time, in seconds, to wait 

3716 for the server response. See: :ref:`configuring_timeouts` 

3717 

3718 :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy 

3719 :param retry: 

3720 (Optional) How to retry the RPC. See: :ref:`configuring_retries` 

3721 

3722 :raises ValueError: 

3723 if the bucket has no metageneration (i.e., new or never reloaded); 

3724 if the bucket has no retention policy assigned; 

3725 if the bucket's retention policy is already locked. 

3726 """ 

3727 with create_trace_span(name="Storage.Bucket.lockRetentionPolicy"): 

3728 if "metageneration" not in self._properties: 

3729 raise ValueError( 

3730 "Bucket has no retention policy assigned: try 'reload'?" 

3731 ) 

3732 

3733 policy = self._properties.get("retentionPolicy") 

3734 

3735 if policy is None: 

3736 raise ValueError( 

3737 "Bucket has no retention policy assigned: try 'reload'?" 

3738 ) 

3739 

3740 if policy.get("isLocked"): 

3741 raise ValueError("Bucket's retention policy is already locked.") 

3742 

3743 client = self._require_client(client) 

3744 

3745 query_params = {"ifMetagenerationMatch": self.metageneration} 

3746 

3747 if self.user_project is not None: 

3748 query_params["userProject"] = self.user_project 

3749 

3750 path = f"/b/{self.name}/lockRetentionPolicy" 

3751 api_response = client._post_resource( 

3752 path, 

3753 None, 

3754 query_params=query_params, 

3755 timeout=timeout, 

3756 retry=retry, 

3757 _target_object=self, 

3758 ) 

3759 self._set_properties(api_response) 

3760 

3761 def generate_signed_url( 

3762 self, 

3763 expiration=None, 

3764 api_access_endpoint=None, 

3765 method="GET", 

3766 headers=None, 

3767 query_parameters=None, 

3768 client=None, 

3769 credentials=None, 

3770 version=None, 

3771 virtual_hosted_style=False, 

3772 bucket_bound_hostname=None, 

3773 scheme="http", 

3774 ): 

3775 """Generates a signed URL for this bucket. 

3776 

3777 .. note:: 

3778 

3779 If you are on Google Compute Engine, you can't generate a signed 

3780 URL using GCE service account. If you'd like to be able to generate 

3781 a signed URL from GCE, you can use a standard service account from a 

3782 JSON file rather than a GCE service account. 

3783 

3784 If you have a bucket that you want to allow access to for a set 

3785 amount of time, you can use this method to generate a URL that 

3786 is only valid within a certain time period. 

3787 

3788 If ``bucket_bound_hostname`` is set as an argument of :attr:`api_access_endpoint`, 

3789 ``https`` works only if using a ``CDN``. 

3790 

3791 :type expiration: Union[Integer, datetime.datetime, datetime.timedelta] 

3792 :param expiration: Point in time when the signed URL should expire. If 

3793 a ``datetime`` instance is passed without an explicit 

3794 ``tzinfo`` set, it will be assumed to be ``UTC``. 

3795 

3796 :type api_access_endpoint: str 

3797 :param api_access_endpoint: (Optional) URI base, for instance 

3798 "https://storage.googleapis.com". If not specified, the client's 

3799 api_endpoint will be used. Incompatible with bucket_bound_hostname. 

3800 

3801 :type method: str 

3802 :param method: The HTTP verb that will be used when requesting the URL. 

3803 

3804 :type headers: dict 

3805 :param headers: 

3806 (Optional) Additional HTTP headers to be included as part of the 

3807 signed URLs. See: 

3808 https://cloud.google.com/storage/docs/xml-api/reference-headers 

3809 Requests using the signed URL *must* pass the specified header 

3810 (name and value) with each request for the URL. 

3811 

3812 :type query_parameters: dict 

3813 :param query_parameters: 

3814 (Optional) Additional query parameters to be included as part of the 

3815 signed URLs. See: 

3816 https://cloud.google.com/storage/docs/xml-api/reference-headers#query 

3817 

3818 :type client: :class:`~google.cloud.storage.client.Client` or 

3819 ``NoneType`` 

3820 :param client: (Optional) The client to use. If not passed, falls back 

3821 to the ``client`` stored on the blob's bucket. 

3822 

3823 :type credentials: :class:`google.auth.credentials.Credentials` or 

3824 :class:`NoneType` 

3825 :param credentials: The authorization credentials to attach to requests. 

3826 These credentials identify this application to the service. 

3827 If none are specified, the client will attempt to ascertain 

3828 the credentials from the environment. 

3829 

3830 :type version: str 

3831 :param version: (Optional) The version of signed credential to create. 

3832 Must be one of 'v2' | 'v4'. 

3833 

3834 :type virtual_hosted_style: bool 

3835 :param virtual_hosted_style: 

3836 (Optional) If true, then construct the URL relative the bucket's 

3837 virtual hostname, e.g., '<bucket-name>.storage.googleapis.com'. 

3838 Incompatible with bucket_bound_hostname. 

3839 

3840 :type bucket_bound_hostname: str 

3841 :param bucket_bound_hostname: 

3842 (Optional) If passed, then construct the URL relative to the bucket-bound hostname. 

3843 Value can be a bare or with scheme, e.g., 'example.com' or 'http://example.com'. 

3844 Incompatible with api_access_endpoint and virtual_hosted_style. 

3845 See: https://cloud.google.com/storage/docs/request-endpoints#cname 

3846 

3847 :type scheme: str 

3848 :param scheme: 

3849 (Optional) If ``bucket_bound_hostname`` is passed as a bare hostname, use 

3850 this value as the scheme. ``https`` will work only when using a CDN. 

3851 Defaults to ``"http"``. 

3852 

3853 :raises: :exc:`ValueError` when version is invalid or mutually exclusive arguments are used. 

3854 :raises: :exc:`TypeError` when expiration is not a valid type. 

3855 :raises: :exc:`AttributeError` if credentials is not an instance 

3856 of :class:`google.auth.credentials.Signing`. 

3857 

3858 :rtype: str 

3859 :returns: A signed URL you can use to access the resource 

3860 until expiration. 

3861 """ 

3862 if version is None: 

3863 version = "v2" 

3864 elif version not in ("v2", "v4"): 

3865 raise ValueError("'version' must be either 'v2' or 'v4'") 

3866 

3867 if ( 

3868 api_access_endpoint is not None or virtual_hosted_style 

3869 ) and bucket_bound_hostname: 

3870 raise ValueError( 

3871 "The bucket_bound_hostname argument is not compatible with " 

3872 "either api_access_endpoint or virtual_hosted_style." 

3873 ) 

3874 

3875 if api_access_endpoint is None: 

3876 client = self._require_client(client) 

3877 api_access_endpoint = client.api_endpoint 

3878 

3879 # If you are on Google Compute Engine, you can't generate a signed URL 

3880 # using GCE service account. 

3881 # See https://github.com/googleapis/google-auth-library-python/issues/50 

3882 if virtual_hosted_style: 

3883 api_access_endpoint = _virtual_hosted_style_base_url( 

3884 api_access_endpoint, self.name 

3885 ) 

3886 resource = "/" 

3887 elif bucket_bound_hostname: 

3888 api_access_endpoint = _bucket_bound_hostname_url( 

3889 bucket_bound_hostname, scheme 

3890 ) 

3891 resource = "/" 

3892 else: 

3893 resource = f"/{self.name}" 

3894 

3895 if credentials is None: 

3896 client = self._require_client(client) # May be redundant, but that's ok. 

3897 credentials = client._credentials 

3898 

3899 if version == "v2": 

3900 helper = generate_signed_url_v2 

3901 else: 

3902 helper = generate_signed_url_v4 

3903 

3904 return helper( 

3905 credentials, 

3906 resource=resource, 

3907 expiration=expiration, 

3908 api_access_endpoint=api_access_endpoint, 

3909 method=method.upper(), 

3910 headers=headers, 

3911 query_parameters=query_parameters, 

3912 ) 

3913 

3914 @property 

3915 def ip_filter(self): 

3916 """Retrieve or set the IP Filter configuration for this bucket. 

3917 

3918 See https://cloud.google.com/storage/docs/ip-filtering-overview and 

3919 https://cloud.google.com/storage/docs/json_api/v1/buckets#ipFilter 

3920 

3921 .. note:: 

3922 The getter for this property returns an 

3923 :class:`~google.cloud.storage.ip_filter.IPFilter` object, which is a 

3924 structured representation of the bucket's IP filter configuration. 

3925 Modifying the returned object has no effect. To update the bucket's 

3926 IP filter, create and assign a new ``IPFilter`` object to this 

3927 property and then call 

3928 :meth:`~google.cloud.storage.bucket.Bucket.patch`. 

3929 

3930 .. code-block:: python 

3931 

3932 from google.cloud.storage.ip_filter import ( 

3933 IPFilter, 

3934 PublicNetworkSource, 

3935 ) 

3936 

3937 ip_filter = IPFilter() 

3938 ip_filter.mode = "Enabled" 

3939 ip_filter.public_network_source = PublicNetworkSource( 

3940 allowed_ip_cidr_ranges=["203.0.113.5/32"] 

3941 ) 

3942 bucket.ip_filter = ip_filter 

3943 bucket.patch() 

3944 

3945 :setter: Set the IP Filter configuration for this bucket. 

3946 :getter: Gets the IP Filter configuration for this bucket. 

3947 

3948 :rtype: :class:`~google.cloud.storage.ip_filter.IPFilter` or ``NoneType`` 

3949 :returns: 

3950 An ``IPFilter`` object representing the configuration, or ``None`` 

3951 if no filter is configured. 

3952 """ 

3953 resource = self._properties.get(_IP_FILTER_PROPERTY) 

3954 if resource: 

3955 return IPFilter._from_api_resource(resource) 

3956 return None 

3957 

3958 @ip_filter.setter 

3959 def ip_filter(self, value): 

3960 if value is None: 

3961 self._patch_property(_IP_FILTER_PROPERTY, None) 

3962 elif isinstance(value, IPFilter): 

3963 self._patch_property(_IP_FILTER_PROPERTY, value._to_api_resource()) 

3964 else: 

3965 self._patch_property(_IP_FILTER_PROPERTY, value) 

3966 

3967 

3968class SoftDeletePolicy(dict): 

3969 """Map a bucket's soft delete policy. 

3970 

3971 See https://cloud.google.com/storage/docs/soft-delete 

3972 

3973 :type bucket: :class:`Bucket` 

3974 :param bucket: Bucket for which this instance is the policy. 

3975 

3976 :type retention_duration_seconds: int 

3977 :param retention_duration_seconds: 

3978 (Optional) The period of time in seconds that soft-deleted objects in the bucket 

3979 will be retained and cannot be permanently deleted. 

3980 

3981 :type effective_time: :class:`datetime.datetime` 

3982 :param effective_time: 

3983 (Optional) When the bucket's soft delete policy is effective. 

3984 This value should normally only be set by the back-end API. 

3985 """ 

3986 

3987 def __init__(self, bucket, **kw): 

3988 data = {} 

3989 retention_duration_seconds = kw.get("retention_duration_seconds") 

3990 data["retentionDurationSeconds"] = retention_duration_seconds 

3991 

3992 effective_time = kw.get("effective_time") 

3993 if effective_time is not None: 

3994 effective_time = _datetime_to_rfc3339(effective_time) 

3995 data["effectiveTime"] = effective_time 

3996 

3997 super().__init__(data) 

3998 self._bucket = bucket 

3999 

4000 @classmethod 

4001 def from_api_repr(cls, resource, bucket): 

4002 """Factory: construct instance from resource. 

4003 

4004 :type resource: dict 

4005 :param resource: mapping as returned from API call. 

4006 

4007 :type bucket: :class:`Bucket` 

4008 :params bucket: Bucket for which this instance is the policy. 

4009 

4010 :rtype: :class:`SoftDeletePolicy` 

4011 :returns: Instance created from resource. 

4012 """ 

4013 instance = cls(bucket) 

4014 instance.update(resource) 

4015 return instance 

4016 

4017 @property 

4018 def bucket(self): 

4019 """Bucket for which this instance is the policy. 

4020 

4021 :rtype: :class:`Bucket` 

4022 :returns: the instance's bucket. 

4023 """ 

4024 return self._bucket 

4025 

4026 @property 

4027 def retention_duration_seconds(self): 

4028 """Get the retention duration of the bucket's soft delete policy. 

4029 

4030 :rtype: int or ``NoneType`` 

4031 :returns: The period of time in seconds that soft-deleted objects in the bucket 

4032 will be retained and cannot be permanently deleted; Or ``None`` if the 

4033 property is not set. 

4034 """ 

4035 duration = self.get("retentionDurationSeconds") 

4036 if duration is not None: 

4037 return int(duration) 

4038 

4039 @retention_duration_seconds.setter 

4040 def retention_duration_seconds(self, value): 

4041 """Set the retention duration of the bucket's soft delete policy. 

4042 

4043 :type value: int 

4044 :param value: 

4045 The period of time in seconds that soft-deleted objects in the bucket 

4046 will be retained and cannot be permanently deleted. 

4047 """ 

4048 self["retentionDurationSeconds"] = value 

4049 self.bucket._patch_property("softDeletePolicy", self) 

4050 

4051 @property 

4052 def effective_time(self): 

4053 """Get the effective time of the bucket's soft delete policy. 

4054 

4055 :rtype: datetime.datetime or ``NoneType`` 

4056 :returns: point-in time at which the bucket's soft delte policy is 

4057 effective, or ``None`` if the property is not set. 

4058 """ 

4059 timestamp = self.get("effectiveTime") 

4060 if timestamp is not None: 

4061 return _rfc3339_nanos_to_datetime(timestamp) 

4062 

4063 

4064def _raise_if_len_differs(expected_len, **generation_match_args): 

4065 """ 

4066 Raise an error if any generation match argument 

4067 is set and its len differs from the given value. 

4068 

4069 :type expected_len: int 

4070 :param expected_len: Expected argument length in case it's set. 

4071 

4072 :type generation_match_args: dict 

4073 :param generation_match_args: Lists, which length must be checked. 

4074 

4075 :raises: :exc:`ValueError` if any argument set, but has an unexpected length. 

4076 """ 

4077 for name, value in generation_match_args.items(): 

4078 if value is not None and len(value) != expected_len: 

4079 raise ValueError(f"'{name}' length must be the same as 'blobs' length")