Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.11/site-packages/botocore/handlers.py: 22%

Shortcuts on this page

r m x   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

615 statements  

1# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. 

2# 

3# Licensed under the Apache License, Version 2.0 (the "License"). You 

4# may not use this file except in compliance with the License. A copy of 

5# the License is located at 

6# 

7# http://aws.amazon.com/apache2.0/ 

8# 

9# or in the "license" file accompanying this file. This file is 

10# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF 

11# ANY KIND, either express or implied. See the License for the specific 

12# language governing permissions and limitations under the License. 

13 

14"""Builtin event handlers. 

15 

16This module contains builtin handlers for events emitted by botocore. 

17""" 

18 

19import base64 

20import copy 

21import logging 

22import os 

23import re 

24import uuid 

25import warnings 

26from io import BytesIO 

27 

28import botocore 

29import botocore.auth 

30from botocore import ( 

31 retryhandler, # noqa: F401 

32 translate, # noqa: F401 

33 utils, 

34) 

35from botocore.args import ClientConfigString 

36from botocore.compat import ( 

37 MD5_AVAILABLE, # noqa: F401 

38 ETree, 

39 OrderedDict, 

40 XMLParseError, 

41 ensure_bytes, 

42 get_md5, 

43 json, 

44 quote, 

45 unquote, 

46 unquote_str, 

47 urlsplit, 

48 urlunsplit, 

49) 

50from botocore.docs.utils import ( 

51 AppendParamDocumentation, 

52 AutoPopulatedParam, 

53 HideParamFromOperations, 

54) 

55from botocore.endpoint_provider import VALID_HOST_LABEL_RE 

56from botocore.exceptions import ( 

57 AliasConflictParameterError, 

58 MissingServiceIdError, # noqa: F401 

59 ParamValidationError, 

60 UnsupportedTLSVersionWarning, 

61) 

62from botocore.regions import EndpointResolverBuiltins 

63from botocore.serialize import TIMESTAMP_PRECISION_MILLISECOND 

64from botocore.signers import ( 

65 add_dsql_generate_db_auth_token_methods, 

66 add_generate_db_auth_token, 

67 add_generate_presigned_post, 

68 add_generate_presigned_url, 

69) 

70from botocore.useragent import register_feature_id 

71from botocore.utils import ( 

72 SAFE_CHARS, 

73 SERVICE_NAME_ALIASES, # noqa: F401 

74 ArnParser, 

75 get_token_from_environment, 

76 hyphenize_service_id, # noqa: F401 

77 is_global_accesspoint, # noqa: F401 

78 percent_encode, 

79 switch_host_with_param, 

80) 

81 

82logger = logging.getLogger(__name__) 

83 

84REGISTER_FIRST = object() 

85REGISTER_LAST = object() 

86# From the S3 docs: 

87# The rules for bucket names in the US Standard region allow bucket names 

88# to be as long as 255 characters, and bucket names can contain any 

89# combination of uppercase letters, lowercase letters, numbers, periods 

90# (.), hyphens (-), and underscores (_). 

91VALID_BUCKET = re.compile(r'^[a-zA-Z0-9.\-_]{1,255}$') 

92_ACCESSPOINT_ARN = ( 

93 r'^arn:(aws).*:(s3|s3-object-lambda):[a-z\-0-9]*:[0-9]{12}:accesspoint[/:]' 

94 r'[a-zA-Z0-9\-.]{1,63}$' 

95) 

96_OUTPOST_ARN = ( 

97 r'^arn:(aws).*:s3-outposts:[a-z\-0-9]+:[0-9]{12}:outpost[/:]' 

98 r'[a-zA-Z0-9\-]{1,63}[/:]accesspoint[/:][a-zA-Z0-9\-]{1,63}$' 

99) 

100VALID_S3_ARN = re.compile('|'.join([_ACCESSPOINT_ARN, _OUTPOST_ARN])) 

101# signing names used for the services s3 and s3-control, for example in 

102# botocore/data/s3/2006-03-01/endpoints-rule-set-1.json 

103S3_SIGNING_NAMES = ('s3', 's3-outposts', 's3-object-lambda', 's3express') 

104VERSION_ID_SUFFIX = re.compile(r'\?versionId=[^\s]+$') 

105 

106 

107def handle_service_name_alias(service_name, **kwargs): 

108 return SERVICE_NAME_ALIASES.get(service_name, service_name) 

109 

110 

111def add_recursion_detection_header(params, **kwargs): 

112 has_lambda_name = 'AWS_LAMBDA_FUNCTION_NAME' in os.environ 

113 trace_id = os.environ.get('_X_AMZN_TRACE_ID') 

114 if has_lambda_name and trace_id: 

115 headers = params['headers'] 

116 if 'X-Amzn-Trace-Id' not in headers: 

117 headers['X-Amzn-Trace-Id'] = quote(trace_id, safe='-=;:+&[]{}"\',') 

118 

119 

120def escape_xml_payload(params, **kwargs): 

121 # Replace \r and \n with the escaped sequence over the whole XML document 

122 # to avoid linebreak normalization modifying customer input when the 

123 # document is parsed. Ideally, we would do this in ElementTree.tostring, 

124 # but it doesn't allow us to override entity escaping for text fields. For 

125 # this operation \r and \n can only appear in the XML document if they were 

126 # passed as part of the customer input. 

127 body = params['body'] 

128 if b'\r' in body: 

129 body = body.replace(b'\r', b'
') 

130 if b'\n' in body: 

131 body = body.replace(b'\n', b'
') 

132 

133 params['body'] = body 

134 

135 

136def check_for_200_error(response, **kwargs): 

137 """This function has been deprecated, but is kept for backwards compatibility.""" 

138 # From: http://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectCOPY.html 

139 # There are two opportunities for a copy request to return an error. One 

140 # can occur when Amazon S3 receives the copy request and the other can 

141 # occur while Amazon S3 is copying the files. If the error occurs before 

142 # the copy operation starts, you receive a standard Amazon S3 error. If the 

143 # error occurs during the copy operation, the error response is embedded in 

144 # the 200 OK response. This means that a 200 OK response can contain either 

145 # a success or an error. Make sure to design your application to parse the 

146 # contents of the response and handle it appropriately. 

147 # 

148 # So this handler checks for this case. Even though the server sends a 

149 # 200 response, conceptually this should be handled exactly like a 

150 # 500 response (with respect to raising exceptions, retries, etc.) 

151 # We're connected *before* all the other retry logic handlers, so as long 

152 # as we switch the error code to 500, we'll retry the error as expected. 

153 if response is None: 

154 # A None response can happen if an exception is raised while 

155 # trying to retrieve the response. See Endpoint._get_response(). 

156 return 

157 http_response, parsed = response 

158 if _looks_like_special_case_error( 

159 http_response.status_code, http_response.content 

160 ): 

161 logger.debug( 

162 "Error found for response with 200 status code, " 

163 "errors: %s, changing status code to " 

164 "500.", 

165 parsed, 

166 ) 

167 http_response.status_code = 500 

168 

169 

170def _looks_like_special_case_error(status_code, body): 

171 if status_code == 200 and body: 

172 try: 

173 parser = ETree.XMLParser( 

174 target=ETree.TreeBuilder(), encoding='utf-8' 

175 ) 

176 parser.feed(body) 

177 root = parser.close() 

178 except XMLParseError: 

179 # In cases of network disruptions, we may end up with a partial 

180 # streamed response from S3. We need to treat these cases as 

181 # 500 Service Errors and try again. 

182 return True 

183 if root.tag == 'Error': 

184 return True 

185 return False 

186 

187 

188def set_operation_specific_signer(context, signing_name, **kwargs): 

189 """Choose the operation-specific signer. 

190 

191 Individual operations may have a different auth type than the service as a 

192 whole. This will most often manifest as operations that should not be 

193 authenticated at all, but can include other auth modes such as sigv4 

194 without body signing. 

195 """ 

196 auth_type = context.get('auth_type') 

197 

198 # Auth type will be None if the operation doesn't have a configured auth 

199 # type. 

200 if not auth_type: 

201 return 

202 

203 # Auth type will be the string value 'none' if the operation should not 

204 # be signed at all. 

205 if auth_type == 'none': 

206 return botocore.UNSIGNED 

207 

208 if auth_type == 'bearer': 

209 return 'bearer' 

210 

211 # If the operation needs an unsigned body, we set additional context 

212 # allowing the signer to be aware of this. 

213 if context.get('unsigned_payload') or auth_type == 'v4-unsigned-body': 

214 context['payload_signing_enabled'] = False 

215 

216 if auth_type.startswith('v4'): 

217 if auth_type == 'v4-s3express': 

218 return auth_type 

219 

220 if auth_type == 'v4a': 

221 # If sigv4a is chosen, we must add additional signing config for 

222 # global signature. 

223 region = _resolve_sigv4a_region(context) 

224 signing = {'region': region, 'signing_name': signing_name} 

225 if 'signing' in context: 

226 context['signing'].update(signing) 

227 else: 

228 context['signing'] = signing 

229 signature_version = 'v4a' 

230 else: 

231 signature_version = 'v4' 

232 

233 # Signing names used by s3 and s3-control use customized signers "s3v4" 

234 # and "s3v4a". 

235 if signing_name in S3_SIGNING_NAMES: 

236 signature_version = f's3{signature_version}' 

237 

238 return signature_version 

239 

240 

241def _handle_sqs_compatible_error(parsed, context, **kwargs): 

242 """ 

243 Ensures backward compatibility for SQS errors. 

244 

245 SQS's migration from the Query protocol to JSON was done prior to SDKs allowing a 

246 service to support multiple protocols. Because of this, SQS is missing the "error" 

247 key from its modeled exceptions, which is used by most query compatible services 

248 to map error codes to the proper exception. Instead, SQS uses the error's shape name, 

249 which is preserved in the QueryErrorCode key. 

250 """ 

251 parsed_error = parsed.get("Error", {}) 

252 if not parsed_error: 

253 return 

254 

255 if query_code := parsed_error.get("QueryErrorCode"): 

256 context['error_code_override'] = query_code 

257 

258 

259def _resolve_sigv4a_region(context): 

260 region = None 

261 if 'client_config' in context: 

262 region = context['client_config'].sigv4a_signing_region_set 

263 if not region and context.get('signing', {}).get('region'): 

264 region = context['signing']['region'] 

265 return region or '*' 

266 

267 

268def decode_console_output(parsed, **kwargs): 

269 if 'Output' in parsed: 

270 try: 

271 # We're using 'replace' for errors because it is 

272 # possible that console output contains non string 

273 # chars we can't utf-8 decode. 

274 value = base64.b64decode( 

275 bytes(parsed['Output'], 'latin-1') 

276 ).decode('utf-8', 'replace') 

277 parsed['Output'] = value 

278 except (ValueError, TypeError, AttributeError): 

279 logger.debug('Error decoding base64', exc_info=True) 

280 

281 

282def generate_idempotent_uuid(params, model, **kwargs): 

283 for name in model.idempotent_members: 

284 if name not in params: 

285 params[name] = str(uuid.uuid4()) 

286 logger.debug( 

287 "injecting idempotency token (%s) into param '%s'.", 

288 params[name], 

289 name, 

290 ) 

291 

292 

293def decode_quoted_jsondoc(value): 

294 try: 

295 value = json.loads(unquote(value)) 

296 except (ValueError, TypeError): 

297 logger.debug('Error loading quoted JSON', exc_info=True) 

298 return value 

299 

300 

301def json_decode_template_body(parsed, **kwargs): 

302 if 'TemplateBody' in parsed: 

303 try: 

304 value = json.loads( 

305 parsed['TemplateBody'], object_pairs_hook=OrderedDict 

306 ) 

307 parsed['TemplateBody'] = value 

308 except (ValueError, TypeError): 

309 logger.debug('error loading JSON', exc_info=True) 

310 

311 

312def validate_bucket_name(params, **kwargs): 

313 if 'Bucket' not in params: 

314 return 

315 bucket = params['Bucket'] 

316 if not VALID_BUCKET.search(bucket) and not VALID_S3_ARN.search(bucket): 

317 error_msg = ( 

318 f'Invalid bucket name "{bucket}": Bucket name must match ' 

319 f'the regex "{VALID_BUCKET.pattern}" or be an ARN matching ' 

320 f'the regex "{VALID_S3_ARN.pattern}"' 

321 ) 

322 raise ParamValidationError(report=error_msg) 

323 

324 

325def sse_md5(params, **kwargs): 

326 """ 

327 S3 server-side encryption requires the encryption key to be sent to the 

328 server base64 encoded, as well as a base64-encoded MD5 hash of the 

329 encryption key. This handler does both if the MD5 has not been set by 

330 the caller. 

331 """ 

332 _sse_md5(params, 'SSECustomer') 

333 

334 

335def copy_source_sse_md5(params, **kwargs): 

336 """ 

337 S3 server-side encryption requires the encryption key to be sent to the 

338 server base64 encoded, as well as a base64-encoded MD5 hash of the 

339 encryption key. This handler does both if the MD5 has not been set by 

340 the caller specifically if the parameter is for the copy-source sse-c key. 

341 """ 

342 _sse_md5(params, 'CopySourceSSECustomer') 

343 

344 

345def _sse_md5(params, sse_member_prefix='SSECustomer'): 

346 if not _needs_s3_sse_customization(params, sse_member_prefix): 

347 return 

348 

349 sse_key_member = sse_member_prefix + 'Key' 

350 sse_md5_member = sse_member_prefix + 'KeyMD5' 

351 key_as_bytes = params[sse_key_member] 

352 if isinstance(key_as_bytes, str): 

353 key_as_bytes = key_as_bytes.encode('utf-8') 

354 md5_val = get_md5(key_as_bytes, usedforsecurity=False).digest() 

355 key_md5_str = base64.b64encode(md5_val).decode('utf-8') 

356 key_b64_encoded = base64.b64encode(key_as_bytes).decode('utf-8') 

357 params[sse_key_member] = key_b64_encoded 

358 params[sse_md5_member] = key_md5_str 

359 

360 

361def _needs_s3_sse_customization(params, sse_member_prefix): 

362 return ( 

363 params.get(sse_member_prefix + 'Key') is not None 

364 and sse_member_prefix + 'KeyMD5' not in params 

365 ) 

366 

367 

368def disable_signing(**kwargs): 

369 """ 

370 This handler disables request signing by setting the signer 

371 name to a special sentinel value. 

372 """ 

373 return botocore.UNSIGNED 

374 

375 

376def add_expect_header(model, params, **kwargs): 

377 if model.http.get('method', '') not in ['PUT', 'POST']: 

378 return 

379 if 'body' in params: 

380 body = params['body'] 

381 if hasattr(body, 'read'): 

382 check_body = utils.ensure_boolean( 

383 os.environ.get( 

384 'BOTO_EXPERIMENTAL__NO_EMPTY_CONTINUE', 

385 False, 

386 ) 

387 ) 

388 if check_body and utils.determine_content_length(body) == 0: 

389 return 

390 # Any file like object will use an expect 100-continue 

391 # header regardless of size. 

392 logger.debug("Adding expect 100 continue header to request.") 

393 params['headers']['Expect'] = '100-continue' 

394 

395 

396class DeprecatedServiceDocumenter: 

397 def __init__(self, replacement_service_name): 

398 self._replacement_service_name = replacement_service_name 

399 

400 def inject_deprecation_notice(self, section, event_name, **kwargs): 

401 section.style.start_important() 

402 section.write('This service client is deprecated. Please use ') 

403 section.style.ref( 

404 self._replacement_service_name, 

405 self._replacement_service_name, 

406 ) 

407 section.write(' instead.') 

408 section.style.end_important() 

409 

410 

411def document_copy_source_form(section, event_name, **kwargs): 

412 if 'request-example' in event_name: 

413 parent = section.get_section('structure-value') 

414 param_line = parent.get_section('CopySource') 

415 value_portion = param_line.get_section('member-value') 

416 value_portion.clear_text() 

417 value_portion.write( 

418 "'string' or {'Bucket': 'string', " 

419 "'Key': 'string', 'VersionId': 'string'}" 

420 ) 

421 elif 'request-params' in event_name: 

422 param_section = section.get_section('CopySource') 

423 type_section = param_section.get_section('param-type') 

424 type_section.clear_text() 

425 type_section.write(':type CopySource: str or dict') 

426 doc_section = param_section.get_section('param-documentation') 

427 doc_section.clear_text() 

428 doc_section.write( 

429 "The name of the source bucket, key name of the source object, " 

430 "and optional version ID of the source object. You can either " 

431 "provide this value as a string or a dictionary. The " 

432 "string form is {bucket}/{key} or " 

433 "{bucket}/{key}?versionId={versionId} if you want to copy a " 

434 "specific version. You can also provide this value as a " 

435 "dictionary. The dictionary format is recommended over " 

436 "the string format because it is more explicit. The dictionary " 

437 "format is: {'Bucket': 'bucket', 'Key': 'key', 'VersionId': 'id'}." 

438 " Note that the VersionId key is optional and may be omitted." 

439 " To specify an S3 access point, provide the access point" 

440 " ARN for the ``Bucket`` key in the copy source dictionary. If you" 

441 " want to provide the copy source for an S3 access point as a" 

442 " string instead of a dictionary, the ARN provided must be the" 

443 " full S3 access point object ARN" 

444 " (i.e. {accesspoint_arn}/object/{key})" 

445 ) 

446 

447 

448def handle_copy_source_param(params, **kwargs): 

449 """Convert CopySource param for CopyObject/UploadPartCopy. 

450 

451 This handler will deal with two cases: 

452 

453 * CopySource provided as a string. We'll make a best effort 

454 to URL encode the key name as required. This will require 

455 parsing the bucket and version id from the CopySource value 

456 and only encoding the key. 

457 * CopySource provided as a dict. In this case we're 

458 explicitly given the Bucket, Key, and VersionId so we're 

459 able to encode the key and ensure this value is serialized 

460 and correctly sent to S3. 

461 

462 """ 

463 source = params.get('CopySource') 

464 if source is None: 

465 # The call will eventually fail but we'll let the 

466 # param validator take care of this. It will 

467 # give a better error message. 

468 return 

469 if isinstance(source, str): 

470 params['CopySource'] = _quote_source_header(source) 

471 elif isinstance(source, dict): 

472 params['CopySource'] = _quote_source_header_from_dict(source) 

473 

474 

475def _quote_source_header_from_dict(source_dict): 

476 try: 

477 bucket = source_dict['Bucket'] 

478 key = source_dict['Key'] 

479 version_id = source_dict.get('VersionId') 

480 if VALID_S3_ARN.search(bucket): 

481 final = f'{bucket}/object/{key}' 

482 else: 

483 final = f'{bucket}/{key}' 

484 except KeyError as e: 

485 raise ParamValidationError( 

486 report=f'Missing required parameter: {str(e)}' 

487 ) 

488 final = percent_encode(final, safe=SAFE_CHARS + '/') 

489 if version_id is not None: 

490 final += f'?versionId={version_id}' 

491 return final 

492 

493 

494def _quote_source_header(value): 

495 result = VERSION_ID_SUFFIX.search(value) 

496 if result is None: 

497 return percent_encode(value, safe=SAFE_CHARS + '/') 

498 else: 

499 first, version_id = value[: result.start()], value[result.start() :] 

500 return percent_encode(first, safe=SAFE_CHARS + '/') + version_id 

501 

502 

503def _get_cross_region_presigned_url( 

504 request_signer, request_dict, model, source_region, destination_region 

505): 

506 # The better way to do this is to actually get the 

507 # endpoint_resolver and get the endpoint_url given the 

508 # source region. In this specific case, we know that 

509 # we can safely replace the dest region with the source 

510 # region because of the supported EC2 regions, but in 

511 # general this is not a safe assumption to make. 

512 # I think eventually we should try to plumb through something 

513 # that allows us to resolve endpoints from regions. 

514 request_dict_copy = copy.deepcopy(request_dict) 

515 request_dict_copy['body']['DestinationRegion'] = destination_region 

516 request_dict_copy['url'] = request_dict['url'].replace( 

517 destination_region, source_region 

518 ) 

519 request_dict_copy['method'] = 'GET' 

520 request_dict_copy['headers'] = {} 

521 return request_signer.generate_presigned_url( 

522 request_dict_copy, region_name=source_region, operation_name=model.name 

523 ) 

524 

525 

526def _get_presigned_url_source_and_destination_regions(request_signer, params): 

527 # Gets the source and destination regions to be used 

528 destination_region = request_signer._region_name 

529 source_region = params.get('SourceRegion') 

530 return source_region, destination_region 

531 

532 

533def inject_presigned_url_ec2(params, request_signer, model, **kwargs): 

534 # The customer can still provide this, so we should pass if they do. 

535 if 'PresignedUrl' in params['body']: 

536 return 

537 src, dest = _get_presigned_url_source_and_destination_regions( 

538 request_signer, params['body'] 

539 ) 

540 url = _get_cross_region_presigned_url( 

541 request_signer, params, model, src, dest 

542 ) 

543 params['body']['PresignedUrl'] = url 

544 # EC2 Requires that the destination region be sent over the wire in 

545 # addition to the source region. 

546 params['body']['DestinationRegion'] = dest 

547 

548 

549def inject_presigned_url_rds(params, request_signer, model, **kwargs): 

550 # SourceRegion is not required for RDS operations, so it's possible that 

551 # it isn't set. In that case it's probably a local copy so we don't need 

552 # to do anything else. 

553 if 'SourceRegion' not in params['body']: 

554 return 

555 

556 src, dest = _get_presigned_url_source_and_destination_regions( 

557 request_signer, params['body'] 

558 ) 

559 

560 # Since SourceRegion isn't actually modeled for RDS, it needs to be 

561 # removed from the request params before we send the actual request. 

562 del params['body']['SourceRegion'] 

563 

564 if 'PreSignedUrl' in params['body']: 

565 return 

566 

567 url = _get_cross_region_presigned_url( 

568 request_signer, params, model, src, dest 

569 ) 

570 params['body']['PreSignedUrl'] = url 

571 

572 

573def json_decode_policies(parsed, model, **kwargs): 

574 # Any time an IAM operation returns a policy document 

575 # it is a string that is json that has been urlencoded, 

576 # i.e urlencode(json.dumps(policy_document)). 

577 # To give users something more useful, we will urldecode 

578 # this value and json.loads() the result so that they have 

579 # the policy document as a dictionary. 

580 output_shape = model.output_shape 

581 if output_shape is not None: 

582 _decode_policy_types(parsed, model.output_shape) 

583 

584 

585def _decode_policy_types(parsed, shape): 

586 # IAM consistently uses the policyDocumentType shape to indicate 

587 # strings that have policy documents. 

588 shape_name = 'policyDocumentType' 

589 if shape.type_name == 'structure': 

590 for member_name, member_shape in shape.members.items(): 

591 if ( 

592 member_shape.type_name == 'string' 

593 and member_shape.name == shape_name 

594 and member_name in parsed 

595 ): 

596 parsed[member_name] = decode_quoted_jsondoc( 

597 parsed[member_name] 

598 ) 

599 elif member_name in parsed: 

600 _decode_policy_types(parsed[member_name], member_shape) 

601 if shape.type_name == 'list': 

602 shape_member = shape.member 

603 for item in parsed: 

604 _decode_policy_types(item, shape_member) 

605 

606 

607def parse_get_bucket_location(parsed, http_response, **kwargs): 

608 # s3.GetBucketLocation cannot be modeled properly. To 

609 # account for this we just manually parse the XML document. 

610 # The "parsed" passed in only has the ResponseMetadata 

611 # filled out. This handler will fill in the LocationConstraint 

612 # value. 

613 if http_response.raw is None: 

614 return 

615 response_body = http_response.content 

616 parser = ETree.XMLParser(target=ETree.TreeBuilder(), encoding='utf-8') 

617 parser.feed(response_body) 

618 root = parser.close() 

619 region = root.text 

620 parsed['LocationConstraint'] = region 

621 

622 

623def base64_encode_user_data(params, **kwargs): 

624 if 'UserData' in params: 

625 if isinstance(params['UserData'], str): 

626 # Encode it to bytes if it is text. 

627 params['UserData'] = params['UserData'].encode('utf-8') 

628 params['UserData'] = base64.b64encode(params['UserData']).decode( 

629 'utf-8' 

630 ) 

631 

632 

633def document_base64_encoding(param): 

634 description = ( 

635 '**This value will be base64 encoded automatically. Do ' 

636 'not base64 encode this value prior to performing the ' 

637 'operation.**' 

638 ) 

639 append = AppendParamDocumentation(param, description) 

640 return append.append_documentation 

641 

642 

643def validate_ascii_metadata(params, **kwargs): 

644 """Verify S3 Metadata only contains ascii characters. 

645 

646 From: http://docs.aws.amazon.com/AmazonS3/latest/dev/UsingMetadata.html 

647 

648 "Amazon S3 stores user-defined metadata in lowercase. Each name, value pair 

649 must conform to US-ASCII when using REST and UTF-8 when using SOAP or 

650 browser-based uploads via POST." 

651 

652 """ 

653 metadata = params.get('Metadata') 

654 if not metadata or not isinstance(metadata, dict): 

655 # We have to at least type check the metadata as a dict type 

656 # because this handler is called before param validation. 

657 # We'll go ahead and return because the param validator will 

658 # give a descriptive error message for us. 

659 # We might need a post-param validation event. 

660 return 

661 for key, value in metadata.items(): 

662 try: 

663 key.encode('ascii') 

664 value.encode('ascii') 

665 except UnicodeEncodeError: 

666 error_msg = ( 

667 'Non ascii characters found in S3 metadata ' 

668 f'for key "{key}", value: "{value}". \nS3 metadata can only ' 

669 'contain ASCII characters. ' 

670 ) 

671 raise ParamValidationError(report=error_msg) 

672 

673 

674def fix_route53_ids(params, model, **kwargs): 

675 """ 

676 Check for and split apart Route53 resource IDs, setting 

677 only the last piece. This allows the output of one operation 

678 (e.g. ``'foo/1234'``) to be used as input in another 

679 operation (e.g. it expects just ``'1234'``). 

680 """ 

681 input_shape = model.input_shape 

682 if not input_shape or not hasattr(input_shape, 'members'): 

683 return 

684 

685 members = [ 

686 name 

687 for (name, shape) in input_shape.members.items() 

688 if shape.name in ['ResourceId', 'DelegationSetId', 'ChangeId'] 

689 ] 

690 

691 for name in members: 

692 if name in params: 

693 orig_value = params[name] 

694 params[name] = orig_value.split('/')[-1] 

695 logger.debug('%s %s -> %s', name, orig_value, params[name]) 

696 

697 

698def inject_account_id(params, **kwargs): 

699 if params.get('accountId') is None: 

700 # Glacier requires accountId, but allows you 

701 # to specify '-' for the current owners account. 

702 # We add this default value if the user does not 

703 # provide the accountId as a convenience. 

704 params['accountId'] = '-' 

705 

706 

707def add_glacier_version(model, params, **kwargs): 

708 request_dict = params 

709 request_dict['headers']['x-amz-glacier-version'] = model.metadata[ 

710 'apiVersion' 

711 ] 

712 

713 

714def add_accept_header(model, params, **kwargs): 

715 if params['headers'].get('Accept', None) is None: 

716 request_dict = params 

717 request_dict['headers']['Accept'] = 'application/json' 

718 

719 

720def add_glacier_checksums(params, **kwargs): 

721 """Add glacier checksums to the http request. 

722 

723 This will add two headers to the http request: 

724 

725 * x-amz-content-sha256 

726 * x-amz-sha256-tree-hash 

727 

728 These values will only be added if they are not present 

729 in the HTTP request. 

730 

731 """ 

732 request_dict = params 

733 headers = request_dict['headers'] 

734 body = request_dict['body'] 

735 if isinstance(body, bytes): 

736 # If the user provided a bytes type instead of a file 

737 # like object, we're temporarily create a BytesIO object 

738 # so we can use the util functions to calculate the 

739 # checksums which assume file like objects. Note that 

740 # we're not actually changing the body in the request_dict. 

741 body = BytesIO(body) 

742 starting_position = body.tell() 

743 if 'x-amz-content-sha256' not in headers: 

744 headers['x-amz-content-sha256'] = utils.calculate_sha256( 

745 body, as_hex=True 

746 ) 

747 body.seek(starting_position) 

748 if 'x-amz-sha256-tree-hash' not in headers: 

749 headers['x-amz-sha256-tree-hash'] = utils.calculate_tree_hash(body) 

750 body.seek(starting_position) 

751 

752 

753def document_glacier_tree_hash_checksum(): 

754 doc = ''' 

755 This is a required field. 

756 

757 Ideally you will want to compute this value with checksums from 

758 previous uploaded parts, using the algorithm described in 

759 `Glacier documentation <http://docs.aws.amazon.com/amazonglacier/latest/dev/checksum-calculations.html>`_. 

760 

761 But if you prefer, you can also use botocore.utils.calculate_tree_hash() 

762 to compute it from raw file by:: 

763 

764 checksum = calculate_tree_hash(open('your_file.txt', 'rb')) 

765 

766 ''' 

767 return AppendParamDocumentation('checksum', doc).append_documentation 

768 

769 

770def document_cloudformation_get_template_return_type( 

771 section, event_name, **kwargs 

772): 

773 if 'response-params' in event_name: 

774 template_body_section = section.get_section('TemplateBody') 

775 type_section = template_body_section.get_section('param-type') 

776 type_section.clear_text() 

777 type_section.write('(*dict*) --') 

778 elif 'response-example' in event_name: 

779 parent = section.get_section('structure-value') 

780 param_line = parent.get_section('TemplateBody') 

781 value_portion = param_line.get_section('member-value') 

782 value_portion.clear_text() 

783 value_portion.write('{}') 

784 

785 

786def switch_host_machinelearning(request, **kwargs): 

787 switch_host_with_param(request, 'PredictEndpoint') 

788 

789 

790def check_openssl_supports_tls_version_1_2(**kwargs): 

791 import ssl 

792 

793 try: 

794 openssl_version_tuple = ssl.OPENSSL_VERSION_INFO 

795 if openssl_version_tuple < (1, 0, 1): 

796 warnings.warn( 

797 f'Currently installed openssl version: {ssl.OPENSSL_VERSION} does not ' 

798 'support TLS 1.2, which is required for use of iot-data. ' 

799 'Please use python installed with openssl version 1.0.1 or ' 

800 'higher.', 

801 UnsupportedTLSVersionWarning, 

802 ) 

803 # We cannot check the openssl version on python2.6, so we should just 

804 # pass on this conveniency check. 

805 except AttributeError: 

806 pass 

807 

808 

809def change_get_to_post(request, **kwargs): 

810 # This is useful when we need to change a potentially large GET request 

811 # into a POST with x-www-form-urlencoded encoding. 

812 if request.method == 'GET' and '?' in request.url: 

813 request.headers['Content-Type'] = 'application/x-www-form-urlencoded' 

814 request.method = 'POST' 

815 request.url, request.data = request.url.split('?', 1) 

816 

817 

818def set_list_objects_encoding_type_url(params, context, **kwargs): 

819 if 'EncodingType' not in params: 

820 # We set this context so that we know it wasn't the customer that 

821 # requested the encoding. 

822 context['encoding_type_auto_set'] = True 

823 params['EncodingType'] = 'url' 

824 

825 

826def decode_list_object(parsed, context, **kwargs): 

827 # This is needed because we are passing url as the encoding type. Since the 

828 # paginator is based on the key, we need to handle it before it can be 

829 # round tripped. 

830 # 

831 # From the documentation: If you specify encoding-type request parameter, 

832 # Amazon S3 includes this element in the response, and returns encoded key 

833 # name values in the following response elements: 

834 # Delimiter, Marker, Prefix, NextMarker, Key. 

835 _decode_list_object( 

836 top_level_keys=['Delimiter', 'Marker', 'NextMarker'], 

837 nested_keys=[('Contents', 'Key'), ('CommonPrefixes', 'Prefix')], 

838 parsed=parsed, 

839 context=context, 

840 ) 

841 

842 

843def decode_list_object_v2(parsed, context, **kwargs): 

844 # From the documentation: If you specify encoding-type request parameter, 

845 # Amazon S3 includes this element in the response, and returns encoded key 

846 # name values in the following response elements: 

847 # Delimiter, Prefix, ContinuationToken, Key, and StartAfter. 

848 _decode_list_object( 

849 top_level_keys=['Delimiter', 'Prefix', 'StartAfter'], 

850 nested_keys=[('Contents', 'Key'), ('CommonPrefixes', 'Prefix')], 

851 parsed=parsed, 

852 context=context, 

853 ) 

854 

855 

856def decode_list_object_versions(parsed, context, **kwargs): 

857 # From the documentation: If you specify encoding-type request parameter, 

858 # Amazon S3 includes this element in the response, and returns encoded key 

859 # name values in the following response elements: 

860 # KeyMarker, NextKeyMarker, Prefix, Key, and Delimiter. 

861 _decode_list_object( 

862 top_level_keys=[ 

863 'KeyMarker', 

864 'NextKeyMarker', 

865 'Prefix', 

866 'Delimiter', 

867 ], 

868 nested_keys=[ 

869 ('Versions', 'Key'), 

870 ('DeleteMarkers', 'Key'), 

871 ('CommonPrefixes', 'Prefix'), 

872 ], 

873 parsed=parsed, 

874 context=context, 

875 ) 

876 

877 

878def _decode_list_object(top_level_keys, nested_keys, parsed, context): 

879 if parsed.get('EncodingType') == 'url' and context.get( 

880 'encoding_type_auto_set' 

881 ): 

882 # URL decode top-level keys in the response if present. 

883 for key in top_level_keys: 

884 if key in parsed: 

885 parsed[key] = unquote_str(parsed[key]) 

886 # URL decode nested keys from the response if present. 

887 for top_key, child_key in nested_keys: 

888 if top_key in parsed: 

889 for member in parsed[top_key]: 

890 member[child_key] = unquote_str(member[child_key]) 

891 

892 

893def convert_body_to_file_like_object(params, **kwargs): 

894 if 'Body' in params: 

895 if isinstance(params['Body'], str): 

896 params['Body'] = BytesIO(ensure_bytes(params['Body'])) 

897 elif isinstance(params['Body'], bytes): 

898 params['Body'] = BytesIO(params['Body']) 

899 

900 

901def _add_parameter_aliases(handler_list): 

902 # Mapping of original parameter to parameter alias. 

903 # The key is <service>.<operation>.parameter 

904 # The first part of the key is used for event registration. 

905 # The last part is the original parameter name and the value is the 

906 # alias to expose in documentation. 

907 aliases = { 

908 'ec2.*.Filter': 'Filters', 

909 'logs.CreateExportTask.from': 'fromTime', 

910 'cloudsearchdomain.Search.return': 'returnFields', 

911 } 

912 

913 for original, new_name in aliases.items(): 

914 event_portion, original_name = original.rsplit('.', 1) 

915 parameter_alias = ParameterAlias(original_name, new_name) 

916 

917 # Add the handlers to the list of handlers. 

918 # One handler is to handle when users provide the alias. 

919 # The other handler is to update the documentation to show only 

920 # the alias. 

921 parameter_build_event_handler_tuple = ( 

922 'before-parameter-build.' + event_portion, 

923 parameter_alias.alias_parameter_in_call, 

924 REGISTER_FIRST, 

925 ) 

926 docs_event_handler_tuple = ( 

927 'docs.*.' + event_portion + '.complete-section', 

928 parameter_alias.alias_parameter_in_documentation, 

929 ) 

930 handler_list.append(parameter_build_event_handler_tuple) 

931 handler_list.append(docs_event_handler_tuple) 

932 

933 

934class ParameterAlias: 

935 def __init__(self, original_name, alias_name): 

936 self._original_name = original_name 

937 self._alias_name = alias_name 

938 

939 def alias_parameter_in_call(self, params, model, **kwargs): 

940 if model.input_shape: 

941 # Only consider accepting the alias if it is modeled in the 

942 # input shape. 

943 if self._original_name in model.input_shape.members: 

944 if self._alias_name in params: 

945 if self._original_name in params: 

946 raise AliasConflictParameterError( 

947 original=self._original_name, 

948 alias=self._alias_name, 

949 operation=model.name, 

950 ) 

951 # Remove the alias parameter value and use the old name 

952 # instead. 

953 params[self._original_name] = params.pop(self._alias_name) 

954 

955 def alias_parameter_in_documentation(self, event_name, section, **kwargs): 

956 if event_name.startswith('docs.request-params'): 

957 if self._original_name not in section.available_sections: 

958 return 

959 # Replace the name for parameter type 

960 param_section = section.get_section(self._original_name) 

961 param_type_section = param_section.get_section('param-type') 

962 self._replace_content(param_type_section) 

963 

964 # Replace the name for the parameter description 

965 param_name_section = param_section.get_section('param-name') 

966 self._replace_content(param_name_section) 

967 elif event_name.startswith('docs.request-example'): 

968 section = section.get_section('structure-value') 

969 if self._original_name not in section.available_sections: 

970 return 

971 # Replace the name for the example 

972 param_section = section.get_section(self._original_name) 

973 self._replace_content(param_section) 

974 

975 def _replace_content(self, section): 

976 content = section.getvalue().decode('utf-8') 

977 updated_content = content.replace( 

978 self._original_name, self._alias_name 

979 ) 

980 section.clear_text() 

981 section.write(updated_content) 

982 

983 

984class ClientMethodAlias: 

985 def __init__(self, actual_name): 

986 """Aliases a non-extant method to an existing method. 

987 

988 :param actual_name: The name of the method that actually exists on 

989 the client. 

990 """ 

991 self._actual = actual_name 

992 

993 def __call__(self, client, **kwargs): 

994 return getattr(client, self._actual) 

995 

996 

997# TODO: Remove this class as it is no longer used 

998class HeaderToHostHoister: 

999 """Takes a header and moves it to the front of the hoststring.""" 

1000 

1001 _VALID_HOSTNAME = re.compile(r'(?!-)[a-z\d-]{1,63}(?<!-)$', re.IGNORECASE) 

1002 

1003 def __init__(self, header_name): 

1004 self._header_name = header_name 

1005 

1006 def hoist(self, params, **kwargs): 

1007 """Hoist a header to the hostname. 

1008 

1009 Hoist a header to the beginning of the hostname with a suffix "." after 

1010 it. The original header should be removed from the header map. This 

1011 method is intended to be used as a target for the before-call event. 

1012 """ 

1013 if self._header_name not in params['headers']: 

1014 return 

1015 header_value = params['headers'][self._header_name] 

1016 self._ensure_header_is_valid_host(header_value) 

1017 original_url = params['url'] 

1018 new_url = self._prepend_to_host(original_url, header_value) 

1019 params['url'] = new_url 

1020 

1021 def _ensure_header_is_valid_host(self, header): 

1022 match = self._VALID_HOSTNAME.match(header) 

1023 if not match: 

1024 raise ParamValidationError( 

1025 report=( 

1026 'Hostnames must contain only - and alphanumeric characters, ' 

1027 'and between 1 and 63 characters long.' 

1028 ) 

1029 ) 

1030 

1031 def _prepend_to_host(self, url, prefix): 

1032 url_components = urlsplit(url) 

1033 parts = url_components.netloc.split('.') 

1034 parts = [prefix] + parts 

1035 new_netloc = '.'.join(parts) 

1036 new_components = ( 

1037 url_components.scheme, 

1038 new_netloc, 

1039 url_components.path, 

1040 url_components.query, 

1041 '', 

1042 ) 

1043 new_url = urlunsplit(new_components) 

1044 return new_url 

1045 

1046 

1047def inject_api_version_header_if_needed(model, params, **kwargs): 

1048 if not model.is_endpoint_discovery_operation: 

1049 return 

1050 params['headers']['x-amz-api-version'] = model.service_model.api_version 

1051 

1052 

1053def remove_lex_v2_start_conversation(class_attributes, **kwargs): 

1054 """Operation requires h2 which is currently unsupported in Python""" 

1055 if 'start_conversation' in class_attributes: 

1056 del class_attributes['start_conversation'] 

1057 

1058 

1059def remove_qbusiness_chat(class_attributes, **kwargs): 

1060 """Operation requires h2 which is currently unsupported in Python""" 

1061 if 'chat' in class_attributes: 

1062 del class_attributes['chat'] 

1063 

1064 

1065def remove_bedrock_runtime_invoke_model_with_bidirectional_stream( 

1066 class_attributes, **kwargs 

1067): 

1068 """Operation requires h2 which is currently unsupported in Python""" 

1069 if 'invoke_model_with_bidirectional_stream' in class_attributes: 

1070 del class_attributes['invoke_model_with_bidirectional_stream'] 

1071 

1072 

1073def enable_millisecond_timestamp_precision(serializer_kwargs, **kwargs): 

1074 """Event handler to enable millisecond precision""" 

1075 serializer_kwargs['timestamp_precision'] = TIMESTAMP_PRECISION_MILLISECOND 

1076 

1077 

1078def add_retry_headers(request, **kwargs): 

1079 retries_context = request.context.get('retries') 

1080 if not retries_context: 

1081 return 

1082 headers = request.headers 

1083 headers['amz-sdk-invocation-id'] = retries_context['invocation-id'] 

1084 sdk_retry_keys = ('ttl', 'attempt', 'max') 

1085 sdk_request_headers = [ 

1086 f'{key}={retries_context[key]}' 

1087 for key in sdk_retry_keys 

1088 if key in retries_context 

1089 ] 

1090 headers['amz-sdk-request'] = '; '.join(sdk_request_headers) 

1091 

1092 

1093def remove_bucket_from_url_paths_from_model(params, model, context, **kwargs): 

1094 """Strips leading `{Bucket}/` from any operations that have it. 

1095 

1096 The original value is retained in a separate "authPath" field. This is 

1097 used in the HmacV1Auth signer. See HmacV1Auth.canonical_resource in 

1098 botocore/auth.py for details. 

1099 

1100 This change is applied to the operation model during the first time the 

1101 operation is invoked and then stays in effect for the lifetime of the 

1102 client object. 

1103 

1104 When the ruleset based endpoint resolver is in effect, both the endpoint 

1105 ruleset AND the service model place the bucket name in the final URL. 

1106 The result is an invalid URL. This handler modifies the operation model to 

1107 no longer place the bucket name. Previous versions of botocore fixed the 

1108 URL after the fact when necessary. Since the introduction of ruleset based 

1109 endpoint resolution, the problem exists in ALL URLs that contain a bucket 

1110 name and can therefore be addressed before the URL gets assembled. 

1111 """ 

1112 req_uri = model.http['requestUri'] 

1113 bucket_path = '/{Bucket}' 

1114 if req_uri.startswith(bucket_path): 

1115 model.http['requestUri'] = req_uri[len(bucket_path) :] 

1116 # Strip query off the requestUri before using as authPath. The 

1117 # HmacV1Auth signer will append query params to the authPath during 

1118 # signing. 

1119 req_uri = req_uri.split('?')[0] 

1120 # If the request URI is ONLY a bucket, the auth_path must be 

1121 # terminated with a '/' character to generate a signature that the 

1122 # server will accept. 

1123 needs_slash = req_uri == bucket_path 

1124 model.http['authPath'] = f'{req_uri}/' if needs_slash else req_uri 

1125 

1126 

1127def remove_accid_host_prefix_from_model(params, model, context, **kwargs): 

1128 """Removes the `{AccountId}.` prefix from the operation model. 

1129 

1130 This change is applied to the operation model during the first time the 

1131 operation is invoked and then stays in effect for the lifetime of the 

1132 client object. 

1133 

1134 When the ruleset based endpoint resolver is in effect, both the endpoint 

1135 ruleset AND the service model place the {AccountId}. prefix in the URL. 

1136 The result is an invalid endpoint. This handler modifies the operation 

1137 model to remove the `endpoint.hostPrefix` field while leaving the 

1138 `RequiresAccountId` static context parameter in place. 

1139 """ 

1140 has_ctx_param = any( 

1141 ctx_param.name == 'RequiresAccountId' and ctx_param.value is True 

1142 for ctx_param in model.static_context_parameters 

1143 ) 

1144 if ( 

1145 model.endpoint is not None 

1146 and model.endpoint.get('hostPrefix') == '{AccountId}.' 

1147 and has_ctx_param 

1148 ): 

1149 del model.endpoint['hostPrefix'] 

1150 

1151 

1152def remove_arn_from_signing_path(request, **kwargs): 

1153 auth_path = request.auth_path 

1154 if isinstance(auth_path, str) and auth_path.startswith('/arn%3A'): 

1155 auth_path_parts = auth_path.split('/') 

1156 if len(auth_path_parts) > 1 and ArnParser.is_arn( 

1157 unquote(auth_path_parts[1]) 

1158 ): 

1159 request.auth_path = '/'.join(['', *auth_path_parts[2:]]) 

1160 

1161 

1162def customize_endpoint_resolver_builtins( 

1163 builtins, model, params, context, **kwargs 

1164): 

1165 """Modify builtin parameter values for endpoint resolver 

1166 

1167 Modifies the builtins dict in place. Changes are in effect for one call. 

1168 The corresponding event is emitted only if at least one builtin parameter 

1169 value is required for endpoint resolution for the operation. 

1170 """ 

1171 bucket_name = params.get('Bucket') 

1172 bucket_is_arn = bucket_name is not None and ArnParser.is_arn(bucket_name) 

1173 # In some situations the host will return AuthorizationHeaderMalformed 

1174 # when the signing region of a sigv4 request is not the bucket's 

1175 # region (which is likely unknown by the user of GetBucketLocation). 

1176 # Avoid this by always using path-style addressing. 

1177 if model.name == 'GetBucketLocation': 

1178 builtins[EndpointResolverBuiltins.AWS_S3_FORCE_PATH_STYLE] = True 

1179 # All situations where the bucket name is an ARN are not compatible 

1180 # with path style addressing. 

1181 elif bucket_is_arn: 

1182 builtins[EndpointResolverBuiltins.AWS_S3_FORCE_PATH_STYLE] = False 

1183 

1184 # Bucket names that are invalid host labels require path-style addressing. 

1185 # If path-style addressing was specifically requested, the default builtin 

1186 # value is already set. 

1187 path_style_required = ( 

1188 bucket_name is not None and not VALID_HOST_LABEL_RE.match(bucket_name) 

1189 ) 

1190 path_style_requested = builtins[ 

1191 EndpointResolverBuiltins.AWS_S3_FORCE_PATH_STYLE 

1192 ] 

1193 

1194 # Path-style addressing is incompatible with the global endpoint for 

1195 # presigned URLs. If the bucket name is an ARN, the ARN's region should be 

1196 # used in the endpoint. 

1197 if ( 

1198 context.get('use_global_endpoint') 

1199 and not path_style_required 

1200 and not path_style_requested 

1201 and not bucket_is_arn 

1202 and not utils.is_s3express_bucket(bucket_name) 

1203 ): 

1204 builtins[EndpointResolverBuiltins.AWS_REGION] = 'aws-global' 

1205 builtins[EndpointResolverBuiltins.AWS_S3_USE_GLOBAL_ENDPOINT] = True 

1206 

1207 

1208def remove_content_type_header_for_presigning(request, **kwargs): 

1209 if ( 

1210 request.context.get('is_presign_request') is True 

1211 and 'Content-Type' in request.headers 

1212 ): 

1213 del request.headers['Content-Type'] 

1214 

1215 

1216def handle_expires_header( 

1217 operation_model, response_dict, customized_response_dict, **kwargs 

1218): 

1219 if _has_expires_shape(operation_model.output_shape): 

1220 if expires_value := response_dict.get('headers', {}).get('Expires'): 

1221 customized_response_dict['ExpiresString'] = expires_value 

1222 try: 

1223 utils.parse_timestamp(expires_value) 

1224 except (ValueError, RuntimeError): 

1225 logger.warning( 

1226 'Failed to parse the "Expires" member as a timestamp: %s. ' 

1227 'The unparsed value is available in the response under "ExpiresString".', 

1228 expires_value, 

1229 ) 

1230 del response_dict['headers']['Expires'] 

1231 

1232 

1233def _has_expires_shape(shape): 

1234 if not shape: 

1235 return False 

1236 return any( 

1237 member_shape.name == 'Expires' 

1238 and member_shape.serialization.get('name') == 'Expires' 

1239 for member_shape in shape.members.values() 

1240 ) 

1241 

1242 

1243def document_expires_shape(section, event_name, **kwargs): 

1244 # Updates the documentation for S3 operations that include the 'Expires' member 

1245 # in their response structure. Documents a synthetic member 'ExpiresString' and 

1246 # includes a deprecation notice for 'Expires'. 

1247 if 'response-example' in event_name: 

1248 if not section.has_section('structure-value'): 

1249 return 

1250 parent = section.get_section('structure-value') 

1251 if not parent.has_section('Expires'): 

1252 return 

1253 param_line = parent.get_section('Expires') 

1254 param_line.add_new_section('ExpiresString') 

1255 new_param_line = param_line.get_section('ExpiresString') 

1256 new_param_line.write("'ExpiresString': 'string',") 

1257 new_param_line.style.new_line() 

1258 elif 'response-params' in event_name: 

1259 if not section.has_section('Expires'): 

1260 return 

1261 param_section = section.get_section('Expires') 

1262 # Add a deprecation notice for the "Expires" param 

1263 doc_section = param_section.get_section('param-documentation') 

1264 doc_section.style.start_note() 

1265 doc_section.write( 

1266 'This member has been deprecated. Please use ``ExpiresString`` instead.' 

1267 ) 

1268 doc_section.style.end_note() 

1269 # Document the "ExpiresString" param 

1270 new_param_section = param_section.add_new_section('ExpiresString') 

1271 new_param_section.style.new_paragraph() 

1272 new_param_section.write('- **ExpiresString** *(string) --*') 

1273 new_param_section.style.indent() 

1274 new_param_section.style.new_paragraph() 

1275 new_param_section.write( 

1276 'The raw, unparsed value of the ``Expires`` field.' 

1277 ) 

1278 

1279 

1280def _handle_200_error(operation_model, response_dict, **kwargs): 

1281 # S3 can return a 200 response with an error embedded in the body. 

1282 # Convert the 200 to a 500 for retry resolution in ``_update_status_code``. 

1283 if not _should_handle_200_error(operation_model, response_dict): 

1284 # Operations with streaming response blobs are excluded as they 

1285 # can't be reliably distinguished from an S3 error. 

1286 return 

1287 if _looks_like_special_case_error( 

1288 response_dict['status_code'], response_dict['body'] 

1289 ): 

1290 response_dict['status_code'] = 500 

1291 logger.debug( 

1292 "Error found for response with 200 status code: %s.", 

1293 response_dict['body'], 

1294 ) 

1295 

1296 

1297def _should_handle_200_error(operation_model, response_dict): 

1298 output_shape = operation_model.output_shape 

1299 if ( 

1300 not response_dict 

1301 or operation_model.has_event_stream_output 

1302 or not output_shape 

1303 ): 

1304 return False 

1305 payload = output_shape.serialization.get('payload') 

1306 if payload is not None: 

1307 payload_shape = output_shape.members[payload] 

1308 if payload_shape.type_name in ('blob', 'string'): 

1309 return False 

1310 return True 

1311 

1312 

1313def _update_status_code(response, **kwargs): 

1314 # Update the http_response status code when the parsed response has been 

1315 # modified in a handler. This enables retries for cases like ``_handle_200_error``. 

1316 if response is None: 

1317 return 

1318 http_response, parsed = response 

1319 parsed_status_code = parsed.get('ResponseMetadata', {}).get( 

1320 'HTTPStatusCode', http_response.status_code 

1321 ) 

1322 if http_response.status_code != parsed_status_code: 

1323 http_response.status_code = parsed_status_code 

1324 

1325 

1326def _handle_request_validation_mode_member(params, model, **kwargs): 

1327 client_config = kwargs.get("context", {}).get("client_config") 

1328 if client_config is None: 

1329 return 

1330 response_checksum_validation = client_config.response_checksum_validation 

1331 http_checksum = model.http_checksum 

1332 mode_member = http_checksum.get("requestValidationModeMember") 

1333 if ( 

1334 mode_member is not None 

1335 and response_checksum_validation == "when_supported" 

1336 ): 

1337 params.setdefault(mode_member, "ENABLED") 

1338 

1339 

1340def _set_extra_headers_for_unsigned_request( 

1341 request, signature_version, **kwargs 

1342): 

1343 # When sending a checksum in the trailer of an unsigned chunked request, S3 

1344 # requires us to set the "X-Amz-Content-SHA256" header to "STREAMING-UNSIGNED-PAYLOAD-TRAILER". 

1345 checksum_context = request.context.get("checksum", {}) 

1346 algorithm = checksum_context.get("request_algorithm", {}) 

1347 in_trailer = algorithm.get("in") == "trailer" 

1348 headers = request.headers 

1349 if signature_version == botocore.UNSIGNED and in_trailer: 

1350 headers["X-Amz-Content-SHA256"] = "STREAMING-UNSIGNED-PAYLOAD-TRAILER" 

1351 

1352 

1353def _set_auth_scheme_preference_signer(context, signing_name, **kwargs): 

1354 """ 

1355 Determines the appropriate signer to use based on the client configuration, 

1356 authentication scheme preferences, and the availability of a bearer token. 

1357 """ 

1358 client_config = context.get('client_config') 

1359 if client_config is None: 

1360 return 

1361 

1362 signature_version = client_config.signature_version 

1363 auth_scheme_preference = client_config.auth_scheme_preference 

1364 auth_options = context.get('auth_options') 

1365 

1366 signature_version_set_in_code = ( 

1367 isinstance(signature_version, ClientConfigString) 

1368 or signature_version is botocore.UNSIGNED 

1369 ) 

1370 auth_preference_set_in_code = isinstance( 

1371 auth_scheme_preference, ClientConfigString 

1372 ) 

1373 has_in_code_configuration = ( 

1374 signature_version_set_in_code or auth_preference_set_in_code 

1375 ) 

1376 

1377 resolved_signature_version = signature_version 

1378 

1379 # If signature version was not set in code, but an auth scheme preference 

1380 # is available, resolve it based on the preferred schemes and supported auth 

1381 # options for this service. 

1382 if ( 

1383 not signature_version_set_in_code 

1384 and auth_scheme_preference 

1385 and auth_options 

1386 ): 

1387 preferred_schemes = auth_scheme_preference.split(',') 

1388 resolved = botocore.auth.resolve_auth_scheme_preference( 

1389 preferred_schemes, auth_options 

1390 ) 

1391 resolved_signature_version = ( 

1392 botocore.UNSIGNED if resolved == 'none' else resolved 

1393 ) 

1394 

1395 # Prefer 'bearer' signature version if a bearer token is available, and it 

1396 # is allowed for this service. This can override earlier resolution if the 

1397 # config object didn't explicitly set a signature version. 

1398 if _should_prefer_bearer_auth( 

1399 has_in_code_configuration, 

1400 signing_name, 

1401 resolved_signature_version, 

1402 auth_options, 

1403 ): 

1404 register_feature_id('BEARER_SERVICE_ENV_VARS') 

1405 resolved_signature_version = 'bearer' 

1406 

1407 if resolved_signature_version == signature_version: 

1408 return None 

1409 return resolved_signature_version 

1410 

1411 

1412def _should_prefer_bearer_auth( 

1413 has_in_code_configuration, 

1414 signing_name, 

1415 resolved_signature_version, 

1416 auth_options, 

1417): 

1418 if signing_name not in get_bearer_auth_supported_services(): 

1419 return False 

1420 

1421 if not auth_options or 'smithy.api#httpBearerAuth' not in auth_options: 

1422 return False 

1423 

1424 has_token = get_token_from_environment(signing_name) is not None 

1425 

1426 # Prefer 'bearer' if a bearer token is available, and either: 

1427 # Bearer was already resolved, or 

1428 # No auth-related values were explicitly set in code 

1429 return has_token and ( 

1430 resolved_signature_version == 'bearer' or not has_in_code_configuration 

1431 ) 

1432 

1433 

1434def get_bearer_auth_supported_services(): 

1435 """ 

1436 Returns a set of services that support bearer token authentication. 

1437 These values correspond to the service's `signingName` property as defined 

1438 in model.py, falling back to `endpointPrefix` if `signingName` is not set. 

1439 

1440 Warning: This is a private interface and is subject to abrupt breaking changes, 

1441 including removal, in any botocore release. It is not intended for external use, 

1442 and its usage outside of botocore is not advised or supported. 

1443 """ 

1444 return {'bedrock'} 

1445 

1446 

1447# This is a list of (event_name, handler). 

1448# When a Session is created, everything in this list will be 

1449# automatically registered with that Session. 

1450 

1451BUILTIN_HANDLERS = [ 

1452 ('choose-service-name', handle_service_name_alias), 

1453 ( 

1454 'getattr.mturk.list_hi_ts_for_qualification_type', 

1455 ClientMethodAlias('list_hits_for_qualification_type'), 

1456 ), 

1457 ( 

1458 'getattr.socialmessaging.delete_whatsapp_media_message', 

1459 ClientMethodAlias('delete_whatsapp_message_media'), 

1460 ), 

1461 ( 

1462 'before-parameter-build.s3.UploadPart', 

1463 convert_body_to_file_like_object, 

1464 REGISTER_LAST, 

1465 ), 

1466 ( 

1467 'before-parameter-build.s3.PutObject', 

1468 convert_body_to_file_like_object, 

1469 REGISTER_LAST, 

1470 ), 

1471 ('creating-client-class', add_generate_presigned_url), 

1472 ('creating-client-class.s3', add_generate_presigned_post), 

1473 ('creating-client-class.iot-data', check_openssl_supports_tls_version_1_2), 

1474 ('creating-client-class.lex-runtime-v2', remove_lex_v2_start_conversation), 

1475 ('creating-client-class.qbusiness', remove_qbusiness_chat), 

1476 ( 

1477 'creating-client-class.bedrock-runtime', 

1478 remove_bedrock_runtime_invoke_model_with_bidirectional_stream, 

1479 ), 

1480 ( 

1481 'creating-serializer.bedrock-agentcore', 

1482 enable_millisecond_timestamp_precision, 

1483 ), 

1484 ('after-call.iam', json_decode_policies), 

1485 ('after-call.ec2.GetConsoleOutput', decode_console_output), 

1486 ('after-call.cloudformation.GetTemplate', json_decode_template_body), 

1487 ('after-call.s3.GetBucketLocation', parse_get_bucket_location), 

1488 ( 

1489 'after-call.sqs.*', 

1490 _handle_sqs_compatible_error, 

1491 ), 

1492 ('before-parse.s3.*', handle_expires_header), 

1493 ('before-parse.s3.*', _handle_200_error, REGISTER_FIRST), 

1494 ('before-parameter-build', generate_idempotent_uuid), 

1495 ('before-parameter-build', _handle_request_validation_mode_member), 

1496 ('before-parameter-build.s3', validate_bucket_name), 

1497 ('before-parameter-build.s3', remove_bucket_from_url_paths_from_model), 

1498 ( 

1499 'before-parameter-build.s3.ListObjects', 

1500 set_list_objects_encoding_type_url, 

1501 ), 

1502 ( 

1503 'before-parameter-build.s3.ListObjectsV2', 

1504 set_list_objects_encoding_type_url, 

1505 ), 

1506 ( 

1507 'before-parameter-build.s3.ListObjectVersions', 

1508 set_list_objects_encoding_type_url, 

1509 ), 

1510 ('before-parameter-build.s3.CopyObject', handle_copy_source_param), 

1511 ('before-parameter-build.s3.UploadPartCopy', handle_copy_source_param), 

1512 ('before-parameter-build.s3.CopyObject', validate_ascii_metadata), 

1513 ('before-parameter-build.s3.PutObject', validate_ascii_metadata), 

1514 ( 

1515 'before-parameter-build.s3.CreateMultipartUpload', 

1516 validate_ascii_metadata, 

1517 ), 

1518 ('before-parameter-build.s3-control', remove_accid_host_prefix_from_model), 

1519 ('docs.*.s3.CopyObject.complete-section', document_copy_source_form), 

1520 ('docs.*.s3.UploadPartCopy.complete-section', document_copy_source_form), 

1521 ('docs.response-example.s3.*.complete-section', document_expires_shape), 

1522 ('docs.response-params.s3.*.complete-section', document_expires_shape), 

1523 ('before-endpoint-resolution.s3', customize_endpoint_resolver_builtins), 

1524 ('before-call', add_recursion_detection_header), 

1525 ('before-call.s3', add_expect_header), 

1526 ('before-call.glacier', add_glacier_version), 

1527 ('before-call.apigateway', add_accept_header), 

1528 ('before-call.s3.DeleteObjects', escape_xml_payload), 

1529 ('before-call.s3.PutBucketLifecycleConfiguration', escape_xml_payload), 

1530 ('before-call.glacier.UploadArchive', add_glacier_checksums), 

1531 ('before-call.glacier.UploadMultipartPart', add_glacier_checksums), 

1532 ('before-call.ec2.CopySnapshot', inject_presigned_url_ec2), 

1533 ('request-created', add_retry_headers), 

1534 ('request-created.machinelearning.Predict', switch_host_machinelearning), 

1535 ('needs-retry.s3.*', _update_status_code, REGISTER_FIRST), 

1536 ('choose-signer.cognito-identity.GetId', disable_signing), 

1537 ('choose-signer.cognito-identity.GetOpenIdToken', disable_signing), 

1538 ('choose-signer.cognito-identity.UnlinkIdentity', disable_signing), 

1539 ( 

1540 'choose-signer.cognito-identity.GetCredentialsForIdentity', 

1541 disable_signing, 

1542 ), 

1543 ('choose-signer.sts.AssumeRoleWithSAML', disable_signing), 

1544 ('choose-signer.sts.AssumeRoleWithWebIdentity', disable_signing), 

1545 ('choose-signer', set_operation_specific_signer), 

1546 ('choose-signer', _set_auth_scheme_preference_signer), 

1547 ('before-parameter-build.s3.HeadObject', sse_md5), 

1548 ('before-parameter-build.s3.GetObject', sse_md5), 

1549 ('before-parameter-build.s3.PutObject', sse_md5), 

1550 ('before-parameter-build.s3.CopyObject', sse_md5), 

1551 ('before-parameter-build.s3.CopyObject', copy_source_sse_md5), 

1552 ('before-parameter-build.s3.CreateMultipartUpload', sse_md5), 

1553 ('before-parameter-build.s3.UploadPart', sse_md5), 

1554 ('before-parameter-build.s3.UploadPartCopy', sse_md5), 

1555 ('before-parameter-build.s3.UploadPartCopy', copy_source_sse_md5), 

1556 ('before-parameter-build.s3.CompleteMultipartUpload', sse_md5), 

1557 ('before-parameter-build.s3.SelectObjectContent', sse_md5), 

1558 ('before-parameter-build.ec2.RunInstances', base64_encode_user_data), 

1559 ( 

1560 'before-parameter-build.autoscaling.CreateLaunchConfiguration', 

1561 base64_encode_user_data, 

1562 ), 

1563 ('before-parameter-build.route53', fix_route53_ids), 

1564 ('before-parameter-build.glacier', inject_account_id), 

1565 ('before-sign.s3', remove_arn_from_signing_path), 

1566 ('before-sign.s3', _set_extra_headers_for_unsigned_request), 

1567 ( 

1568 'before-sign.polly.SynthesizeSpeech', 

1569 remove_content_type_header_for_presigning, 

1570 ), 

1571 ('after-call.s3.ListObjects', decode_list_object), 

1572 ('after-call.s3.ListObjectsV2', decode_list_object_v2), 

1573 ('after-call.s3.ListObjectVersions', decode_list_object_versions), 

1574 # Cloudsearchdomain search operation will be sent by HTTP POST 

1575 ('request-created.cloudsearchdomain.Search', change_get_to_post), 

1576 # Glacier documentation customizations 

1577 ( 

1578 'docs.*.glacier.*.complete-section', 

1579 AutoPopulatedParam( 

1580 'accountId', 

1581 'Note: this parameter is set to "-" by' 

1582 'default if no value is not specified.', 

1583 ).document_auto_populated_param, 

1584 ), 

1585 ( 

1586 'docs.*.glacier.UploadArchive.complete-section', 

1587 AutoPopulatedParam('checksum').document_auto_populated_param, 

1588 ), 

1589 ( 

1590 'docs.*.glacier.UploadMultipartPart.complete-section', 

1591 AutoPopulatedParam('checksum').document_auto_populated_param, 

1592 ), 

1593 ( 

1594 'docs.request-params.glacier.CompleteMultipartUpload.complete-section', 

1595 document_glacier_tree_hash_checksum(), 

1596 ), 

1597 # Cloudformation documentation customizations 

1598 ( 

1599 'docs.*.cloudformation.GetTemplate.complete-section', 

1600 document_cloudformation_get_template_return_type, 

1601 ), 

1602 # UserData base64 encoding documentation customizations 

1603 ( 

1604 'docs.*.ec2.RunInstances.complete-section', 

1605 document_base64_encoding('UserData'), 

1606 ), 

1607 ( 

1608 'docs.*.autoscaling.CreateLaunchConfiguration.complete-section', 

1609 document_base64_encoding('UserData'), 

1610 ), 

1611 # EC2 CopySnapshot documentation customizations 

1612 ( 

1613 'docs.*.ec2.CopySnapshot.complete-section', 

1614 AutoPopulatedParam('PresignedUrl').document_auto_populated_param, 

1615 ), 

1616 ( 

1617 'docs.*.ec2.CopySnapshot.complete-section', 

1618 AutoPopulatedParam('DestinationRegion').document_auto_populated_param, 

1619 ), 

1620 # S3 SSE documentation modifications 

1621 ( 

1622 'docs.*.s3.*.complete-section', 

1623 AutoPopulatedParam('SSECustomerKeyMD5').document_auto_populated_param, 

1624 ), 

1625 # S3 SSE Copy Source documentation modifications 

1626 ( 

1627 'docs.*.s3.*.complete-section', 

1628 AutoPopulatedParam( 

1629 'CopySourceSSECustomerKeyMD5' 

1630 ).document_auto_populated_param, 

1631 ), 

1632 # Add base64 information to Lambda 

1633 ( 

1634 'docs.*.lambda.UpdateFunctionCode.complete-section', 

1635 document_base64_encoding('ZipFile'), 

1636 ), 

1637 # The following S3 operations cannot actually accept a ContentMD5 

1638 ( 

1639 'docs.*.s3.*.complete-section', 

1640 HideParamFromOperations( 

1641 's3', 

1642 'ContentMD5', 

1643 [ 

1644 'DeleteObjects', 

1645 'PutBucketAcl', 

1646 'PutBucketCors', 

1647 'PutBucketLifecycle', 

1648 'PutBucketLogging', 

1649 'PutBucketNotification', 

1650 'PutBucketPolicy', 

1651 'PutBucketReplication', 

1652 'PutBucketRequestPayment', 

1653 'PutBucketTagging', 

1654 'PutBucketVersioning', 

1655 'PutBucketWebsite', 

1656 'PutObjectAcl', 

1657 ], 

1658 ).hide_param, 

1659 ), 

1660 ############# 

1661 # DSQL 

1662 ############# 

1663 ('creating-client-class.dsql', add_dsql_generate_db_auth_token_methods), 

1664 ############# 

1665 # RDS 

1666 ############# 

1667 ('creating-client-class.rds', add_generate_db_auth_token), 

1668 ('before-call.rds.CopyDBClusterSnapshot', inject_presigned_url_rds), 

1669 ('before-call.rds.CreateDBCluster', inject_presigned_url_rds), 

1670 ('before-call.rds.CopyDBSnapshot', inject_presigned_url_rds), 

1671 ('before-call.rds.CreateDBInstanceReadReplica', inject_presigned_url_rds), 

1672 ( 

1673 'before-call.rds.StartDBInstanceAutomatedBackupsReplication', 

1674 inject_presigned_url_rds, 

1675 ), 

1676 # RDS PresignedUrl documentation customizations 

1677 ( 

1678 'docs.*.rds.CopyDBClusterSnapshot.complete-section', 

1679 AutoPopulatedParam('PreSignedUrl').document_auto_populated_param, 

1680 ), 

1681 ( 

1682 'docs.*.rds.CreateDBCluster.complete-section', 

1683 AutoPopulatedParam('PreSignedUrl').document_auto_populated_param, 

1684 ), 

1685 ( 

1686 'docs.*.rds.CopyDBSnapshot.complete-section', 

1687 AutoPopulatedParam('PreSignedUrl').document_auto_populated_param, 

1688 ), 

1689 ( 

1690 'docs.*.rds.CreateDBInstanceReadReplica.complete-section', 

1691 AutoPopulatedParam('PreSignedUrl').document_auto_populated_param, 

1692 ), 

1693 ( 

1694 'docs.*.rds.StartDBInstanceAutomatedBackupsReplication.complete-section', 

1695 AutoPopulatedParam('PreSignedUrl').document_auto_populated_param, 

1696 ), 

1697 ############# 

1698 # Neptune 

1699 ############# 

1700 ('before-call.neptune.CopyDBClusterSnapshot', inject_presigned_url_rds), 

1701 ('before-call.neptune.CreateDBCluster', inject_presigned_url_rds), 

1702 # Neptune PresignedUrl documentation customizations 

1703 ( 

1704 'docs.*.neptune.CopyDBClusterSnapshot.complete-section', 

1705 AutoPopulatedParam('PreSignedUrl').document_auto_populated_param, 

1706 ), 

1707 ( 

1708 'docs.*.neptune.CreateDBCluster.complete-section', 

1709 AutoPopulatedParam('PreSignedUrl').document_auto_populated_param, 

1710 ), 

1711 ############# 

1712 # DocDB 

1713 ############# 

1714 ('before-call.docdb.CopyDBClusterSnapshot', inject_presigned_url_rds), 

1715 ('before-call.docdb.CreateDBCluster', inject_presigned_url_rds), 

1716 # DocDB PresignedUrl documentation customizations 

1717 ( 

1718 'docs.*.docdb.CopyDBClusterSnapshot.complete-section', 

1719 AutoPopulatedParam('PreSignedUrl').document_auto_populated_param, 

1720 ), 

1721 ( 

1722 'docs.*.docdb.CreateDBCluster.complete-section', 

1723 AutoPopulatedParam('PreSignedUrl').document_auto_populated_param, 

1724 ), 

1725 ('before-call', inject_api_version_header_if_needed), 

1726] 

1727_add_parameter_aliases(BUILTIN_HANDLERS)