Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.11/site-packages/botocore/handlers.py: 22%

Shortcuts on this page

r m x   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

604 statements  

1# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. 

2# 

3# Licensed under the Apache License, Version 2.0 (the "License"). You 

4# may not use this file except in compliance with the License. A copy of 

5# the License is located at 

6# 

7# http://aws.amazon.com/apache2.0/ 

8# 

9# or in the "license" file accompanying this file. This file is 

10# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF 

11# ANY KIND, either express or implied. See the License for the specific 

12# language governing permissions and limitations under the License. 

13 

14"""Builtin event handlers. 

15 

16This module contains builtin handlers for events emitted by botocore. 

17""" 

18 

19import base64 

20import copy 

21import logging 

22import os 

23import re 

24import uuid 

25import warnings 

26from io import BytesIO 

27 

28import botocore 

29import botocore.auth 

30from botocore import ( 

31 retryhandler, # noqa: F401 

32 translate, # noqa: F401 

33 utils, 

34) 

35from botocore.args import ClientConfigString 

36from botocore.compat import ( 

37 MD5_AVAILABLE, # noqa: F401 

38 ETree, 

39 OrderedDict, 

40 XMLParseError, 

41 ensure_bytes, 

42 get_md5, 

43 json, 

44 quote, 

45 unquote, 

46 unquote_str, 

47 urlsplit, 

48 urlunsplit, 

49) 

50from botocore.docs.utils import ( 

51 AppendParamDocumentation, 

52 AutoPopulatedParam, 

53 HideParamFromOperations, 

54) 

55from botocore.endpoint_provider import VALID_HOST_LABEL_RE 

56from botocore.exceptions import ( 

57 AliasConflictParameterError, 

58 MissingServiceIdError, # noqa: F401 

59 ParamValidationError, 

60 UnsupportedTLSVersionWarning, 

61) 

62from botocore.regions import EndpointResolverBuiltins 

63from botocore.signers import ( 

64 add_dsql_generate_db_auth_token_methods, 

65 add_generate_db_auth_token, 

66 add_generate_presigned_post, 

67 add_generate_presigned_url, 

68) 

69from botocore.useragent import register_feature_id 

70from botocore.utils import ( 

71 SAFE_CHARS, 

72 SERVICE_NAME_ALIASES, # noqa: F401 

73 ArnParser, 

74 get_token_from_environment, 

75 hyphenize_service_id, # noqa: F401 

76 is_global_accesspoint, # noqa: F401 

77 percent_encode, 

78 switch_host_with_param, 

79) 

80 

81logger = logging.getLogger(__name__) 

82 

83REGISTER_FIRST = object() 

84REGISTER_LAST = object() 

85# From the S3 docs: 

86# The rules for bucket names in the US Standard region allow bucket names 

87# to be as long as 255 characters, and bucket names can contain any 

88# combination of uppercase letters, lowercase letters, numbers, periods 

89# (.), hyphens (-), and underscores (_). 

90VALID_BUCKET = re.compile(r'^[a-zA-Z0-9.\-_]{1,255}$') 

91_ACCESSPOINT_ARN = ( 

92 r'^arn:(aws).*:(s3|s3-object-lambda):[a-z\-0-9]*:[0-9]{12}:accesspoint[/:]' 

93 r'[a-zA-Z0-9\-.]{1,63}$' 

94) 

95_OUTPOST_ARN = ( 

96 r'^arn:(aws).*:s3-outposts:[a-z\-0-9]+:[0-9]{12}:outpost[/:]' 

97 r'[a-zA-Z0-9\-]{1,63}[/:]accesspoint[/:][a-zA-Z0-9\-]{1,63}$' 

98) 

99VALID_S3_ARN = re.compile('|'.join([_ACCESSPOINT_ARN, _OUTPOST_ARN])) 

100# signing names used for the services s3 and s3-control, for example in 

101# botocore/data/s3/2006-03-01/endpoints-rule-set-1.json 

102S3_SIGNING_NAMES = ('s3', 's3-outposts', 's3-object-lambda', 's3express') 

103VERSION_ID_SUFFIX = re.compile(r'\?versionId=[^\s]+$') 

104 

105 

106def handle_service_name_alias(service_name, **kwargs): 

107 return SERVICE_NAME_ALIASES.get(service_name, service_name) 

108 

109 

110def add_recursion_detection_header(params, **kwargs): 

111 has_lambda_name = 'AWS_LAMBDA_FUNCTION_NAME' in os.environ 

112 trace_id = os.environ.get('_X_AMZN_TRACE_ID') 

113 if has_lambda_name and trace_id: 

114 headers = params['headers'] 

115 if 'X-Amzn-Trace-Id' not in headers: 

116 headers['X-Amzn-Trace-Id'] = quote(trace_id, safe='-=;:+&[]{}"\',') 

117 

118 

119def escape_xml_payload(params, **kwargs): 

120 # Replace \r and \n with the escaped sequence over the whole XML document 

121 # to avoid linebreak normalization modifying customer input when the 

122 # document is parsed. Ideally, we would do this in ElementTree.tostring, 

123 # but it doesn't allow us to override entity escaping for text fields. For 

124 # this operation \r and \n can only appear in the XML document if they were 

125 # passed as part of the customer input. 

126 body = params['body'] 

127 if b'\r' in body: 

128 body = body.replace(b'\r', b'
') 

129 if b'\n' in body: 

130 body = body.replace(b'\n', b'
') 

131 

132 params['body'] = body 

133 

134 

135def check_for_200_error(response, **kwargs): 

136 """This function has been deprecated, but is kept for backwards compatibility.""" 

137 # From: http://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectCOPY.html 

138 # There are two opportunities for a copy request to return an error. One 

139 # can occur when Amazon S3 receives the copy request and the other can 

140 # occur while Amazon S3 is copying the files. If the error occurs before 

141 # the copy operation starts, you receive a standard Amazon S3 error. If the 

142 # error occurs during the copy operation, the error response is embedded in 

143 # the 200 OK response. This means that a 200 OK response can contain either 

144 # a success or an error. Make sure to design your application to parse the 

145 # contents of the response and handle it appropriately. 

146 # 

147 # So this handler checks for this case. Even though the server sends a 

148 # 200 response, conceptually this should be handled exactly like a 

149 # 500 response (with respect to raising exceptions, retries, etc.) 

150 # We're connected *before* all the other retry logic handlers, so as long 

151 # as we switch the error code to 500, we'll retry the error as expected. 

152 if response is None: 

153 # A None response can happen if an exception is raised while 

154 # trying to retrieve the response. See Endpoint._get_response(). 

155 return 

156 http_response, parsed = response 

157 if _looks_like_special_case_error( 

158 http_response.status_code, http_response.content 

159 ): 

160 logger.debug( 

161 "Error found for response with 200 status code, " 

162 "errors: %s, changing status code to " 

163 "500.", 

164 parsed, 

165 ) 

166 http_response.status_code = 500 

167 

168 

169def _looks_like_special_case_error(status_code, body): 

170 if status_code == 200 and body: 

171 try: 

172 parser = ETree.XMLParser( 

173 target=ETree.TreeBuilder(), encoding='utf-8' 

174 ) 

175 parser.feed(body) 

176 root = parser.close() 

177 except XMLParseError: 

178 # In cases of network disruptions, we may end up with a partial 

179 # streamed response from S3. We need to treat these cases as 

180 # 500 Service Errors and try again. 

181 return True 

182 if root.tag == 'Error': 

183 return True 

184 return False 

185 

186 

187def set_operation_specific_signer(context, signing_name, **kwargs): 

188 """Choose the operation-specific signer. 

189 

190 Individual operations may have a different auth type than the service as a 

191 whole. This will most often manifest as operations that should not be 

192 authenticated at all, but can include other auth modes such as sigv4 

193 without body signing. 

194 """ 

195 auth_type = context.get('auth_type') 

196 

197 # Auth type will be None if the operation doesn't have a configured auth 

198 # type. 

199 if not auth_type: 

200 return 

201 

202 # Auth type will be the string value 'none' if the operation should not 

203 # be signed at all. 

204 if auth_type == 'none': 

205 return botocore.UNSIGNED 

206 

207 if auth_type == 'bearer': 

208 return 'bearer' 

209 

210 # If the operation needs an unsigned body, we set additional context 

211 # allowing the signer to be aware of this. 

212 if context.get('unsigned_payload') or auth_type == 'v4-unsigned-body': 

213 context['payload_signing_enabled'] = False 

214 

215 if auth_type.startswith('v4'): 

216 if auth_type == 'v4-s3express': 

217 return auth_type 

218 

219 if auth_type == 'v4a': 

220 # If sigv4a is chosen, we must add additional signing config for 

221 # global signature. 

222 region = _resolve_sigv4a_region(context) 

223 signing = {'region': region, 'signing_name': signing_name} 

224 if 'signing' in context: 

225 context['signing'].update(signing) 

226 else: 

227 context['signing'] = signing 

228 signature_version = 'v4a' 

229 else: 

230 signature_version = 'v4' 

231 

232 # Signing names used by s3 and s3-control use customized signers "s3v4" 

233 # and "s3v4a". 

234 if signing_name in S3_SIGNING_NAMES: 

235 signature_version = f's3{signature_version}' 

236 

237 return signature_version 

238 

239 

240def _resolve_sigv4a_region(context): 

241 region = None 

242 if 'client_config' in context: 

243 region = context['client_config'].sigv4a_signing_region_set 

244 if not region and context.get('signing', {}).get('region'): 

245 region = context['signing']['region'] 

246 return region or '*' 

247 

248 

249def decode_console_output(parsed, **kwargs): 

250 if 'Output' in parsed: 

251 try: 

252 # We're using 'replace' for errors because it is 

253 # possible that console output contains non string 

254 # chars we can't utf-8 decode. 

255 value = base64.b64decode( 

256 bytes(parsed['Output'], 'latin-1') 

257 ).decode('utf-8', 'replace') 

258 parsed['Output'] = value 

259 except (ValueError, TypeError, AttributeError): 

260 logger.debug('Error decoding base64', exc_info=True) 

261 

262 

263def generate_idempotent_uuid(params, model, **kwargs): 

264 for name in model.idempotent_members: 

265 if name not in params: 

266 params[name] = str(uuid.uuid4()) 

267 logger.debug( 

268 f"injecting idempotency token ({params[name]}) into param '{name}'." 

269 ) 

270 

271 

272def decode_quoted_jsondoc(value): 

273 try: 

274 value = json.loads(unquote(value)) 

275 except (ValueError, TypeError): 

276 logger.debug('Error loading quoted JSON', exc_info=True) 

277 return value 

278 

279 

280def json_decode_template_body(parsed, **kwargs): 

281 if 'TemplateBody' in parsed: 

282 try: 

283 value = json.loads( 

284 parsed['TemplateBody'], object_pairs_hook=OrderedDict 

285 ) 

286 parsed['TemplateBody'] = value 

287 except (ValueError, TypeError): 

288 logger.debug('error loading JSON', exc_info=True) 

289 

290 

291def validate_bucket_name(params, **kwargs): 

292 if 'Bucket' not in params: 

293 return 

294 bucket = params['Bucket'] 

295 if not VALID_BUCKET.search(bucket) and not VALID_S3_ARN.search(bucket): 

296 error_msg = ( 

297 f'Invalid bucket name "{bucket}": Bucket name must match ' 

298 f'the regex "{VALID_BUCKET.pattern}" or be an ARN matching ' 

299 f'the regex "{VALID_S3_ARN.pattern}"' 

300 ) 

301 raise ParamValidationError(report=error_msg) 

302 

303 

304def sse_md5(params, **kwargs): 

305 """ 

306 S3 server-side encryption requires the encryption key to be sent to the 

307 server base64 encoded, as well as a base64-encoded MD5 hash of the 

308 encryption key. This handler does both if the MD5 has not been set by 

309 the caller. 

310 """ 

311 _sse_md5(params, 'SSECustomer') 

312 

313 

314def copy_source_sse_md5(params, **kwargs): 

315 """ 

316 S3 server-side encryption requires the encryption key to be sent to the 

317 server base64 encoded, as well as a base64-encoded MD5 hash of the 

318 encryption key. This handler does both if the MD5 has not been set by 

319 the caller specifically if the parameter is for the copy-source sse-c key. 

320 """ 

321 _sse_md5(params, 'CopySourceSSECustomer') 

322 

323 

324def _sse_md5(params, sse_member_prefix='SSECustomer'): 

325 if not _needs_s3_sse_customization(params, sse_member_prefix): 

326 return 

327 

328 sse_key_member = sse_member_prefix + 'Key' 

329 sse_md5_member = sse_member_prefix + 'KeyMD5' 

330 key_as_bytes = params[sse_key_member] 

331 if isinstance(key_as_bytes, str): 

332 key_as_bytes = key_as_bytes.encode('utf-8') 

333 md5_val = get_md5(key_as_bytes, usedforsecurity=False).digest() 

334 key_md5_str = base64.b64encode(md5_val).decode('utf-8') 

335 key_b64_encoded = base64.b64encode(key_as_bytes).decode('utf-8') 

336 params[sse_key_member] = key_b64_encoded 

337 params[sse_md5_member] = key_md5_str 

338 

339 

340def _needs_s3_sse_customization(params, sse_member_prefix): 

341 return ( 

342 params.get(sse_member_prefix + 'Key') is not None 

343 and sse_member_prefix + 'KeyMD5' not in params 

344 ) 

345 

346 

347def disable_signing(**kwargs): 

348 """ 

349 This handler disables request signing by setting the signer 

350 name to a special sentinel value. 

351 """ 

352 return botocore.UNSIGNED 

353 

354 

355def add_expect_header(model, params, **kwargs): 

356 if model.http.get('method', '') not in ['PUT', 'POST']: 

357 return 

358 if 'body' in params: 

359 body = params['body'] 

360 if hasattr(body, 'read'): 

361 check_body = utils.ensure_boolean( 

362 os.environ.get( 

363 'BOTO_EXPERIMENTAL__NO_EMPTY_CONTINUE', 

364 False, 

365 ) 

366 ) 

367 if check_body and utils.determine_content_length(body) == 0: 

368 return 

369 # Any file like object will use an expect 100-continue 

370 # header regardless of size. 

371 logger.debug("Adding expect 100 continue header to request.") 

372 params['headers']['Expect'] = '100-continue' 

373 

374 

375class DeprecatedServiceDocumenter: 

376 def __init__(self, replacement_service_name): 

377 self._replacement_service_name = replacement_service_name 

378 

379 def inject_deprecation_notice(self, section, event_name, **kwargs): 

380 section.style.start_important() 

381 section.write('This service client is deprecated. Please use ') 

382 section.style.ref( 

383 self._replacement_service_name, 

384 self._replacement_service_name, 

385 ) 

386 section.write(' instead.') 

387 section.style.end_important() 

388 

389 

390def document_copy_source_form(section, event_name, **kwargs): 

391 if 'request-example' in event_name: 

392 parent = section.get_section('structure-value') 

393 param_line = parent.get_section('CopySource') 

394 value_portion = param_line.get_section('member-value') 

395 value_portion.clear_text() 

396 value_portion.write( 

397 "'string' or {'Bucket': 'string', " 

398 "'Key': 'string', 'VersionId': 'string'}" 

399 ) 

400 elif 'request-params' in event_name: 

401 param_section = section.get_section('CopySource') 

402 type_section = param_section.get_section('param-type') 

403 type_section.clear_text() 

404 type_section.write(':type CopySource: str or dict') 

405 doc_section = param_section.get_section('param-documentation') 

406 doc_section.clear_text() 

407 doc_section.write( 

408 "The name of the source bucket, key name of the source object, " 

409 "and optional version ID of the source object. You can either " 

410 "provide this value as a string or a dictionary. The " 

411 "string form is {bucket}/{key} or " 

412 "{bucket}/{key}?versionId={versionId} if you want to copy a " 

413 "specific version. You can also provide this value as a " 

414 "dictionary. The dictionary format is recommended over " 

415 "the string format because it is more explicit. The dictionary " 

416 "format is: {'Bucket': 'bucket', 'Key': 'key', 'VersionId': 'id'}." 

417 " Note that the VersionId key is optional and may be omitted." 

418 " To specify an S3 access point, provide the access point" 

419 " ARN for the ``Bucket`` key in the copy source dictionary. If you" 

420 " want to provide the copy source for an S3 access point as a" 

421 " string instead of a dictionary, the ARN provided must be the" 

422 " full S3 access point object ARN" 

423 " (i.e. {accesspoint_arn}/object/{key})" 

424 ) 

425 

426 

427def handle_copy_source_param(params, **kwargs): 

428 """Convert CopySource param for CopyObject/UploadPartCopy. 

429 

430 This handler will deal with two cases: 

431 

432 * CopySource provided as a string. We'll make a best effort 

433 to URL encode the key name as required. This will require 

434 parsing the bucket and version id from the CopySource value 

435 and only encoding the key. 

436 * CopySource provided as a dict. In this case we're 

437 explicitly given the Bucket, Key, and VersionId so we're 

438 able to encode the key and ensure this value is serialized 

439 and correctly sent to S3. 

440 

441 """ 

442 source = params.get('CopySource') 

443 if source is None: 

444 # The call will eventually fail but we'll let the 

445 # param validator take care of this. It will 

446 # give a better error message. 

447 return 

448 if isinstance(source, str): 

449 params['CopySource'] = _quote_source_header(source) 

450 elif isinstance(source, dict): 

451 params['CopySource'] = _quote_source_header_from_dict(source) 

452 

453 

454def _quote_source_header_from_dict(source_dict): 

455 try: 

456 bucket = source_dict['Bucket'] 

457 key = source_dict['Key'] 

458 version_id = source_dict.get('VersionId') 

459 if VALID_S3_ARN.search(bucket): 

460 final = f'{bucket}/object/{key}' 

461 else: 

462 final = f'{bucket}/{key}' 

463 except KeyError as e: 

464 raise ParamValidationError( 

465 report=f'Missing required parameter: {str(e)}' 

466 ) 

467 final = percent_encode(final, safe=SAFE_CHARS + '/') 

468 if version_id is not None: 

469 final += f'?versionId={version_id}' 

470 return final 

471 

472 

473def _quote_source_header(value): 

474 result = VERSION_ID_SUFFIX.search(value) 

475 if result is None: 

476 return percent_encode(value, safe=SAFE_CHARS + '/') 

477 else: 

478 first, version_id = value[: result.start()], value[result.start() :] 

479 return percent_encode(first, safe=SAFE_CHARS + '/') + version_id 

480 

481 

482def _get_cross_region_presigned_url( 

483 request_signer, request_dict, model, source_region, destination_region 

484): 

485 # The better way to do this is to actually get the 

486 # endpoint_resolver and get the endpoint_url given the 

487 # source region. In this specific case, we know that 

488 # we can safely replace the dest region with the source 

489 # region because of the supported EC2 regions, but in 

490 # general this is not a safe assumption to make. 

491 # I think eventually we should try to plumb through something 

492 # that allows us to resolve endpoints from regions. 

493 request_dict_copy = copy.deepcopy(request_dict) 

494 request_dict_copy['body']['DestinationRegion'] = destination_region 

495 request_dict_copy['url'] = request_dict['url'].replace( 

496 destination_region, source_region 

497 ) 

498 request_dict_copy['method'] = 'GET' 

499 request_dict_copy['headers'] = {} 

500 return request_signer.generate_presigned_url( 

501 request_dict_copy, region_name=source_region, operation_name=model.name 

502 ) 

503 

504 

505def _get_presigned_url_source_and_destination_regions(request_signer, params): 

506 # Gets the source and destination regions to be used 

507 destination_region = request_signer._region_name 

508 source_region = params.get('SourceRegion') 

509 return source_region, destination_region 

510 

511 

512def inject_presigned_url_ec2(params, request_signer, model, **kwargs): 

513 # The customer can still provide this, so we should pass if they do. 

514 if 'PresignedUrl' in params['body']: 

515 return 

516 src, dest = _get_presigned_url_source_and_destination_regions( 

517 request_signer, params['body'] 

518 ) 

519 url = _get_cross_region_presigned_url( 

520 request_signer, params, model, src, dest 

521 ) 

522 params['body']['PresignedUrl'] = url 

523 # EC2 Requires that the destination region be sent over the wire in 

524 # addition to the source region. 

525 params['body']['DestinationRegion'] = dest 

526 

527 

528def inject_presigned_url_rds(params, request_signer, model, **kwargs): 

529 # SourceRegion is not required for RDS operations, so it's possible that 

530 # it isn't set. In that case it's probably a local copy so we don't need 

531 # to do anything else. 

532 if 'SourceRegion' not in params['body']: 

533 return 

534 

535 src, dest = _get_presigned_url_source_and_destination_regions( 

536 request_signer, params['body'] 

537 ) 

538 

539 # Since SourceRegion isn't actually modeled for RDS, it needs to be 

540 # removed from the request params before we send the actual request. 

541 del params['body']['SourceRegion'] 

542 

543 if 'PreSignedUrl' in params['body']: 

544 return 

545 

546 url = _get_cross_region_presigned_url( 

547 request_signer, params, model, src, dest 

548 ) 

549 params['body']['PreSignedUrl'] = url 

550 

551 

552def json_decode_policies(parsed, model, **kwargs): 

553 # Any time an IAM operation returns a policy document 

554 # it is a string that is json that has been urlencoded, 

555 # i.e urlencode(json.dumps(policy_document)). 

556 # To give users something more useful, we will urldecode 

557 # this value and json.loads() the result so that they have 

558 # the policy document as a dictionary. 

559 output_shape = model.output_shape 

560 if output_shape is not None: 

561 _decode_policy_types(parsed, model.output_shape) 

562 

563 

564def _decode_policy_types(parsed, shape): 

565 # IAM consistently uses the policyDocumentType shape to indicate 

566 # strings that have policy documents. 

567 shape_name = 'policyDocumentType' 

568 if shape.type_name == 'structure': 

569 for member_name, member_shape in shape.members.items(): 

570 if ( 

571 member_shape.type_name == 'string' 

572 and member_shape.name == shape_name 

573 and member_name in parsed 

574 ): 

575 parsed[member_name] = decode_quoted_jsondoc( 

576 parsed[member_name] 

577 ) 

578 elif member_name in parsed: 

579 _decode_policy_types(parsed[member_name], member_shape) 

580 if shape.type_name == 'list': 

581 shape_member = shape.member 

582 for item in parsed: 

583 _decode_policy_types(item, shape_member) 

584 

585 

586def parse_get_bucket_location(parsed, http_response, **kwargs): 

587 # s3.GetBucketLocation cannot be modeled properly. To 

588 # account for this we just manually parse the XML document. 

589 # The "parsed" passed in only has the ResponseMetadata 

590 # filled out. This handler will fill in the LocationConstraint 

591 # value. 

592 if http_response.raw is None: 

593 return 

594 response_body = http_response.content 

595 parser = ETree.XMLParser(target=ETree.TreeBuilder(), encoding='utf-8') 

596 parser.feed(response_body) 

597 root = parser.close() 

598 region = root.text 

599 parsed['LocationConstraint'] = region 

600 

601 

602def base64_encode_user_data(params, **kwargs): 

603 if 'UserData' in params: 

604 if isinstance(params['UserData'], str): 

605 # Encode it to bytes if it is text. 

606 params['UserData'] = params['UserData'].encode('utf-8') 

607 params['UserData'] = base64.b64encode(params['UserData']).decode( 

608 'utf-8' 

609 ) 

610 

611 

612def document_base64_encoding(param): 

613 description = ( 

614 '**This value will be base64 encoded automatically. Do ' 

615 'not base64 encode this value prior to performing the ' 

616 'operation.**' 

617 ) 

618 append = AppendParamDocumentation(param, description) 

619 return append.append_documentation 

620 

621 

622def validate_ascii_metadata(params, **kwargs): 

623 """Verify S3 Metadata only contains ascii characters. 

624 

625 From: http://docs.aws.amazon.com/AmazonS3/latest/dev/UsingMetadata.html 

626 

627 "Amazon S3 stores user-defined metadata in lowercase. Each name, value pair 

628 must conform to US-ASCII when using REST and UTF-8 when using SOAP or 

629 browser-based uploads via POST." 

630 

631 """ 

632 metadata = params.get('Metadata') 

633 if not metadata or not isinstance(metadata, dict): 

634 # We have to at least type check the metadata as a dict type 

635 # because this handler is called before param validation. 

636 # We'll go ahead and return because the param validator will 

637 # give a descriptive error message for us. 

638 # We might need a post-param validation event. 

639 return 

640 for key, value in metadata.items(): 

641 try: 

642 key.encode('ascii') 

643 value.encode('ascii') 

644 except UnicodeEncodeError: 

645 error_msg = ( 

646 'Non ascii characters found in S3 metadata ' 

647 f'for key "{key}", value: "{value}". \nS3 metadata can only ' 

648 'contain ASCII characters. ' 

649 ) 

650 raise ParamValidationError(report=error_msg) 

651 

652 

653def fix_route53_ids(params, model, **kwargs): 

654 """ 

655 Check for and split apart Route53 resource IDs, setting 

656 only the last piece. This allows the output of one operation 

657 (e.g. ``'foo/1234'``) to be used as input in another 

658 operation (e.g. it expects just ``'1234'``). 

659 """ 

660 input_shape = model.input_shape 

661 if not input_shape or not hasattr(input_shape, 'members'): 

662 return 

663 

664 members = [ 

665 name 

666 for (name, shape) in input_shape.members.items() 

667 if shape.name in ['ResourceId', 'DelegationSetId', 'ChangeId'] 

668 ] 

669 

670 for name in members: 

671 if name in params: 

672 orig_value = params[name] 

673 params[name] = orig_value.split('/')[-1] 

674 logger.debug('%s %s -> %s', name, orig_value, params[name]) 

675 

676 

677def inject_account_id(params, **kwargs): 

678 if params.get('accountId') is None: 

679 # Glacier requires accountId, but allows you 

680 # to specify '-' for the current owners account. 

681 # We add this default value if the user does not 

682 # provide the accountId as a convenience. 

683 params['accountId'] = '-' 

684 

685 

686def add_glacier_version(model, params, **kwargs): 

687 request_dict = params 

688 request_dict['headers']['x-amz-glacier-version'] = model.metadata[ 

689 'apiVersion' 

690 ] 

691 

692 

693def add_accept_header(model, params, **kwargs): 

694 if params['headers'].get('Accept', None) is None: 

695 request_dict = params 

696 request_dict['headers']['Accept'] = 'application/json' 

697 

698 

699def add_glacier_checksums(params, **kwargs): 

700 """Add glacier checksums to the http request. 

701 

702 This will add two headers to the http request: 

703 

704 * x-amz-content-sha256 

705 * x-amz-sha256-tree-hash 

706 

707 These values will only be added if they are not present 

708 in the HTTP request. 

709 

710 """ 

711 request_dict = params 

712 headers = request_dict['headers'] 

713 body = request_dict['body'] 

714 if isinstance(body, bytes): 

715 # If the user provided a bytes type instead of a file 

716 # like object, we're temporarily create a BytesIO object 

717 # so we can use the util functions to calculate the 

718 # checksums which assume file like objects. Note that 

719 # we're not actually changing the body in the request_dict. 

720 body = BytesIO(body) 

721 starting_position = body.tell() 

722 if 'x-amz-content-sha256' not in headers: 

723 headers['x-amz-content-sha256'] = utils.calculate_sha256( 

724 body, as_hex=True 

725 ) 

726 body.seek(starting_position) 

727 if 'x-amz-sha256-tree-hash' not in headers: 

728 headers['x-amz-sha256-tree-hash'] = utils.calculate_tree_hash(body) 

729 body.seek(starting_position) 

730 

731 

732def document_glacier_tree_hash_checksum(): 

733 doc = ''' 

734 This is a required field. 

735 

736 Ideally you will want to compute this value with checksums from 

737 previous uploaded parts, using the algorithm described in 

738 `Glacier documentation <http://docs.aws.amazon.com/amazonglacier/latest/dev/checksum-calculations.html>`_. 

739 

740 But if you prefer, you can also use botocore.utils.calculate_tree_hash() 

741 to compute it from raw file by:: 

742 

743 checksum = calculate_tree_hash(open('your_file.txt', 'rb')) 

744 

745 ''' 

746 return AppendParamDocumentation('checksum', doc).append_documentation 

747 

748 

749def document_cloudformation_get_template_return_type( 

750 section, event_name, **kwargs 

751): 

752 if 'response-params' in event_name: 

753 template_body_section = section.get_section('TemplateBody') 

754 type_section = template_body_section.get_section('param-type') 

755 type_section.clear_text() 

756 type_section.write('(*dict*) --') 

757 elif 'response-example' in event_name: 

758 parent = section.get_section('structure-value') 

759 param_line = parent.get_section('TemplateBody') 

760 value_portion = param_line.get_section('member-value') 

761 value_portion.clear_text() 

762 value_portion.write('{}') 

763 

764 

765def switch_host_machinelearning(request, **kwargs): 

766 switch_host_with_param(request, 'PredictEndpoint') 

767 

768 

769def check_openssl_supports_tls_version_1_2(**kwargs): 

770 import ssl 

771 

772 try: 

773 openssl_version_tuple = ssl.OPENSSL_VERSION_INFO 

774 if openssl_version_tuple < (1, 0, 1): 

775 warnings.warn( 

776 f'Currently installed openssl version: {ssl.OPENSSL_VERSION} does not ' 

777 'support TLS 1.2, which is required for use of iot-data. ' 

778 'Please use python installed with openssl version 1.0.1 or ' 

779 'higher.', 

780 UnsupportedTLSVersionWarning, 

781 ) 

782 # We cannot check the openssl version on python2.6, so we should just 

783 # pass on this conveniency check. 

784 except AttributeError: 

785 pass 

786 

787 

788def change_get_to_post(request, **kwargs): 

789 # This is useful when we need to change a potentially large GET request 

790 # into a POST with x-www-form-urlencoded encoding. 

791 if request.method == 'GET' and '?' in request.url: 

792 request.headers['Content-Type'] = 'application/x-www-form-urlencoded' 

793 request.method = 'POST' 

794 request.url, request.data = request.url.split('?', 1) 

795 

796 

797def set_list_objects_encoding_type_url(params, context, **kwargs): 

798 if 'EncodingType' not in params: 

799 # We set this context so that we know it wasn't the customer that 

800 # requested the encoding. 

801 context['encoding_type_auto_set'] = True 

802 params['EncodingType'] = 'url' 

803 

804 

805def decode_list_object(parsed, context, **kwargs): 

806 # This is needed because we are passing url as the encoding type. Since the 

807 # paginator is based on the key, we need to handle it before it can be 

808 # round tripped. 

809 # 

810 # From the documentation: If you specify encoding-type request parameter, 

811 # Amazon S3 includes this element in the response, and returns encoded key 

812 # name values in the following response elements: 

813 # Delimiter, Marker, Prefix, NextMarker, Key. 

814 _decode_list_object( 

815 top_level_keys=['Delimiter', 'Marker', 'NextMarker'], 

816 nested_keys=[('Contents', 'Key'), ('CommonPrefixes', 'Prefix')], 

817 parsed=parsed, 

818 context=context, 

819 ) 

820 

821 

822def decode_list_object_v2(parsed, context, **kwargs): 

823 # From the documentation: If you specify encoding-type request parameter, 

824 # Amazon S3 includes this element in the response, and returns encoded key 

825 # name values in the following response elements: 

826 # Delimiter, Prefix, ContinuationToken, Key, and StartAfter. 

827 _decode_list_object( 

828 top_level_keys=['Delimiter', 'Prefix', 'StartAfter'], 

829 nested_keys=[('Contents', 'Key'), ('CommonPrefixes', 'Prefix')], 

830 parsed=parsed, 

831 context=context, 

832 ) 

833 

834 

835def decode_list_object_versions(parsed, context, **kwargs): 

836 # From the documentation: If you specify encoding-type request parameter, 

837 # Amazon S3 includes this element in the response, and returns encoded key 

838 # name values in the following response elements: 

839 # KeyMarker, NextKeyMarker, Prefix, Key, and Delimiter. 

840 _decode_list_object( 

841 top_level_keys=[ 

842 'KeyMarker', 

843 'NextKeyMarker', 

844 'Prefix', 

845 'Delimiter', 

846 ], 

847 nested_keys=[ 

848 ('Versions', 'Key'), 

849 ('DeleteMarkers', 'Key'), 

850 ('CommonPrefixes', 'Prefix'), 

851 ], 

852 parsed=parsed, 

853 context=context, 

854 ) 

855 

856 

857def _decode_list_object(top_level_keys, nested_keys, parsed, context): 

858 if parsed.get('EncodingType') == 'url' and context.get( 

859 'encoding_type_auto_set' 

860 ): 

861 # URL decode top-level keys in the response if present. 

862 for key in top_level_keys: 

863 if key in parsed: 

864 parsed[key] = unquote_str(parsed[key]) 

865 # URL decode nested keys from the response if present. 

866 for top_key, child_key in nested_keys: 

867 if top_key in parsed: 

868 for member in parsed[top_key]: 

869 member[child_key] = unquote_str(member[child_key]) 

870 

871 

872def convert_body_to_file_like_object(params, **kwargs): 

873 if 'Body' in params: 

874 if isinstance(params['Body'], str): 

875 params['Body'] = BytesIO(ensure_bytes(params['Body'])) 

876 elif isinstance(params['Body'], bytes): 

877 params['Body'] = BytesIO(params['Body']) 

878 

879 

880def _add_parameter_aliases(handler_list): 

881 # Mapping of original parameter to parameter alias. 

882 # The key is <service>.<operation>.parameter 

883 # The first part of the key is used for event registration. 

884 # The last part is the original parameter name and the value is the 

885 # alias to expose in documentation. 

886 aliases = { 

887 'ec2.*.Filter': 'Filters', 

888 'logs.CreateExportTask.from': 'fromTime', 

889 'cloudsearchdomain.Search.return': 'returnFields', 

890 } 

891 

892 for original, new_name in aliases.items(): 

893 event_portion, original_name = original.rsplit('.', 1) 

894 parameter_alias = ParameterAlias(original_name, new_name) 

895 

896 # Add the handlers to the list of handlers. 

897 # One handler is to handle when users provide the alias. 

898 # The other handler is to update the documentation to show only 

899 # the alias. 

900 parameter_build_event_handler_tuple = ( 

901 'before-parameter-build.' + event_portion, 

902 parameter_alias.alias_parameter_in_call, 

903 REGISTER_FIRST, 

904 ) 

905 docs_event_handler_tuple = ( 

906 'docs.*.' + event_portion + '.complete-section', 

907 parameter_alias.alias_parameter_in_documentation, 

908 ) 

909 handler_list.append(parameter_build_event_handler_tuple) 

910 handler_list.append(docs_event_handler_tuple) 

911 

912 

913class ParameterAlias: 

914 def __init__(self, original_name, alias_name): 

915 self._original_name = original_name 

916 self._alias_name = alias_name 

917 

918 def alias_parameter_in_call(self, params, model, **kwargs): 

919 if model.input_shape: 

920 # Only consider accepting the alias if it is modeled in the 

921 # input shape. 

922 if self._original_name in model.input_shape.members: 

923 if self._alias_name in params: 

924 if self._original_name in params: 

925 raise AliasConflictParameterError( 

926 original=self._original_name, 

927 alias=self._alias_name, 

928 operation=model.name, 

929 ) 

930 # Remove the alias parameter value and use the old name 

931 # instead. 

932 params[self._original_name] = params.pop(self._alias_name) 

933 

934 def alias_parameter_in_documentation(self, event_name, section, **kwargs): 

935 if event_name.startswith('docs.request-params'): 

936 if self._original_name not in section.available_sections: 

937 return 

938 # Replace the name for parameter type 

939 param_section = section.get_section(self._original_name) 

940 param_type_section = param_section.get_section('param-type') 

941 self._replace_content(param_type_section) 

942 

943 # Replace the name for the parameter description 

944 param_name_section = param_section.get_section('param-name') 

945 self._replace_content(param_name_section) 

946 elif event_name.startswith('docs.request-example'): 

947 section = section.get_section('structure-value') 

948 if self._original_name not in section.available_sections: 

949 return 

950 # Replace the name for the example 

951 param_section = section.get_section(self._original_name) 

952 self._replace_content(param_section) 

953 

954 def _replace_content(self, section): 

955 content = section.getvalue().decode('utf-8') 

956 updated_content = content.replace( 

957 self._original_name, self._alias_name 

958 ) 

959 section.clear_text() 

960 section.write(updated_content) 

961 

962 

963class ClientMethodAlias: 

964 def __init__(self, actual_name): 

965 """Aliases a non-extant method to an existing method. 

966 

967 :param actual_name: The name of the method that actually exists on 

968 the client. 

969 """ 

970 self._actual = actual_name 

971 

972 def __call__(self, client, **kwargs): 

973 return getattr(client, self._actual) 

974 

975 

976# TODO: Remove this class as it is no longer used 

977class HeaderToHostHoister: 

978 """Takes a header and moves it to the front of the hoststring.""" 

979 

980 _VALID_HOSTNAME = re.compile(r'(?!-)[a-z\d-]{1,63}(?<!-)$', re.IGNORECASE) 

981 

982 def __init__(self, header_name): 

983 self._header_name = header_name 

984 

985 def hoist(self, params, **kwargs): 

986 """Hoist a header to the hostname. 

987 

988 Hoist a header to the beginning of the hostname with a suffix "." after 

989 it. The original header should be removed from the header map. This 

990 method is intended to be used as a target for the before-call event. 

991 """ 

992 if self._header_name not in params['headers']: 

993 return 

994 header_value = params['headers'][self._header_name] 

995 self._ensure_header_is_valid_host(header_value) 

996 original_url = params['url'] 

997 new_url = self._prepend_to_host(original_url, header_value) 

998 params['url'] = new_url 

999 

1000 def _ensure_header_is_valid_host(self, header): 

1001 match = self._VALID_HOSTNAME.match(header) 

1002 if not match: 

1003 raise ParamValidationError( 

1004 report=( 

1005 'Hostnames must contain only - and alphanumeric characters, ' 

1006 'and between 1 and 63 characters long.' 

1007 ) 

1008 ) 

1009 

1010 def _prepend_to_host(self, url, prefix): 

1011 url_components = urlsplit(url) 

1012 parts = url_components.netloc.split('.') 

1013 parts = [prefix] + parts 

1014 new_netloc = '.'.join(parts) 

1015 new_components = ( 

1016 url_components.scheme, 

1017 new_netloc, 

1018 url_components.path, 

1019 url_components.query, 

1020 '', 

1021 ) 

1022 new_url = urlunsplit(new_components) 

1023 return new_url 

1024 

1025 

1026def inject_api_version_header_if_needed(model, params, **kwargs): 

1027 if not model.is_endpoint_discovery_operation: 

1028 return 

1029 params['headers']['x-amz-api-version'] = model.service_model.api_version 

1030 

1031 

1032def remove_lex_v2_start_conversation(class_attributes, **kwargs): 

1033 """Operation requires h2 which is currently unsupported in Python""" 

1034 if 'start_conversation' in class_attributes: 

1035 del class_attributes['start_conversation'] 

1036 

1037 

1038def remove_qbusiness_chat(class_attributes, **kwargs): 

1039 """Operation requires h2 which is currently unsupported in Python""" 

1040 if 'chat' in class_attributes: 

1041 del class_attributes['chat'] 

1042 

1043 

1044def remove_bedrock_runtime_invoke_model_with_bidirectional_stream( 

1045 class_attributes, **kwargs 

1046): 

1047 """Operation requires h2 which is currently unsupported in Python""" 

1048 if 'invoke_model_with_bidirectional_stream' in class_attributes: 

1049 del class_attributes['invoke_model_with_bidirectional_stream'] 

1050 

1051 

1052def add_retry_headers(request, **kwargs): 

1053 retries_context = request.context.get('retries') 

1054 if not retries_context: 

1055 return 

1056 headers = request.headers 

1057 headers['amz-sdk-invocation-id'] = retries_context['invocation-id'] 

1058 sdk_retry_keys = ('ttl', 'attempt', 'max') 

1059 sdk_request_headers = [ 

1060 f'{key}={retries_context[key]}' 

1061 for key in sdk_retry_keys 

1062 if key in retries_context 

1063 ] 

1064 headers['amz-sdk-request'] = '; '.join(sdk_request_headers) 

1065 

1066 

1067def remove_bucket_from_url_paths_from_model(params, model, context, **kwargs): 

1068 """Strips leading `{Bucket}/` from any operations that have it. 

1069 

1070 The original value is retained in a separate "authPath" field. This is 

1071 used in the HmacV1Auth signer. See HmacV1Auth.canonical_resource in 

1072 botocore/auth.py for details. 

1073 

1074 This change is applied to the operation model during the first time the 

1075 operation is invoked and then stays in effect for the lifetime of the 

1076 client object. 

1077 

1078 When the ruleset based endpoint resolver is in effect, both the endpoint 

1079 ruleset AND the service model place the bucket name in the final URL. 

1080 The result is an invalid URL. This handler modifies the operation model to 

1081 no longer place the bucket name. Previous versions of botocore fixed the 

1082 URL after the fact when necessary. Since the introduction of ruleset based 

1083 endpoint resolution, the problem exists in ALL URLs that contain a bucket 

1084 name and can therefore be addressed before the URL gets assembled. 

1085 """ 

1086 req_uri = model.http['requestUri'] 

1087 bucket_path = '/{Bucket}' 

1088 if req_uri.startswith(bucket_path): 

1089 model.http['requestUri'] = req_uri[len(bucket_path) :] 

1090 # Strip query off the requestUri before using as authPath. The 

1091 # HmacV1Auth signer will append query params to the authPath during 

1092 # signing. 

1093 req_uri = req_uri.split('?')[0] 

1094 # If the request URI is ONLY a bucket, the auth_path must be 

1095 # terminated with a '/' character to generate a signature that the 

1096 # server will accept. 

1097 needs_slash = req_uri == bucket_path 

1098 model.http['authPath'] = f'{req_uri}/' if needs_slash else req_uri 

1099 

1100 

1101def remove_accid_host_prefix_from_model(params, model, context, **kwargs): 

1102 """Removes the `{AccountId}.` prefix from the operation model. 

1103 

1104 This change is applied to the operation model during the first time the 

1105 operation is invoked and then stays in effect for the lifetime of the 

1106 client object. 

1107 

1108 When the ruleset based endpoint resolver is in effect, both the endpoint 

1109 ruleset AND the service model place the {AccountId}. prefix in the URL. 

1110 The result is an invalid endpoint. This handler modifies the operation 

1111 model to remove the `endpoint.hostPrefix` field while leaving the 

1112 `RequiresAccountId` static context parameter in place. 

1113 """ 

1114 has_ctx_param = any( 

1115 ctx_param.name == 'RequiresAccountId' and ctx_param.value is True 

1116 for ctx_param in model.static_context_parameters 

1117 ) 

1118 if ( 

1119 model.endpoint is not None 

1120 and model.endpoint.get('hostPrefix') == '{AccountId}.' 

1121 and has_ctx_param 

1122 ): 

1123 del model.endpoint['hostPrefix'] 

1124 

1125 

1126def remove_arn_from_signing_path(request, **kwargs): 

1127 auth_path = request.auth_path 

1128 if isinstance(auth_path, str) and auth_path.startswith('/arn%3A'): 

1129 auth_path_parts = auth_path.split('/') 

1130 if len(auth_path_parts) > 1 and ArnParser.is_arn( 

1131 unquote(auth_path_parts[1]) 

1132 ): 

1133 request.auth_path = '/'.join(['', *auth_path_parts[2:]]) 

1134 

1135 

1136def customize_endpoint_resolver_builtins( 

1137 builtins, model, params, context, **kwargs 

1138): 

1139 """Modify builtin parameter values for endpoint resolver 

1140 

1141 Modifies the builtins dict in place. Changes are in effect for one call. 

1142 The corresponding event is emitted only if at least one builtin parameter 

1143 value is required for endpoint resolution for the operation. 

1144 """ 

1145 bucket_name = params.get('Bucket') 

1146 bucket_is_arn = bucket_name is not None and ArnParser.is_arn(bucket_name) 

1147 # In some situations the host will return AuthorizationHeaderMalformed 

1148 # when the signing region of a sigv4 request is not the bucket's 

1149 # region (which is likely unknown by the user of GetBucketLocation). 

1150 # Avoid this by always using path-style addressing. 

1151 if model.name == 'GetBucketLocation': 

1152 builtins[EndpointResolverBuiltins.AWS_S3_FORCE_PATH_STYLE] = True 

1153 # All situations where the bucket name is an ARN are not compatible 

1154 # with path style addressing. 

1155 elif bucket_is_arn: 

1156 builtins[EndpointResolverBuiltins.AWS_S3_FORCE_PATH_STYLE] = False 

1157 

1158 # Bucket names that are invalid host labels require path-style addressing. 

1159 # If path-style addressing was specifically requested, the default builtin 

1160 # value is already set. 

1161 path_style_required = ( 

1162 bucket_name is not None and not VALID_HOST_LABEL_RE.match(bucket_name) 

1163 ) 

1164 path_style_requested = builtins[ 

1165 EndpointResolverBuiltins.AWS_S3_FORCE_PATH_STYLE 

1166 ] 

1167 

1168 # Path-style addressing is incompatible with the global endpoint for 

1169 # presigned URLs. If the bucket name is an ARN, the ARN's region should be 

1170 # used in the endpoint. 

1171 if ( 

1172 context.get('use_global_endpoint') 

1173 and not path_style_required 

1174 and not path_style_requested 

1175 and not bucket_is_arn 

1176 and not utils.is_s3express_bucket(bucket_name) 

1177 ): 

1178 builtins[EndpointResolverBuiltins.AWS_REGION] = 'aws-global' 

1179 builtins[EndpointResolverBuiltins.AWS_S3_USE_GLOBAL_ENDPOINT] = True 

1180 

1181 

1182def remove_content_type_header_for_presigning(request, **kwargs): 

1183 if ( 

1184 request.context.get('is_presign_request') is True 

1185 and 'Content-Type' in request.headers 

1186 ): 

1187 del request.headers['Content-Type'] 

1188 

1189 

1190def handle_expires_header( 

1191 operation_model, response_dict, customized_response_dict, **kwargs 

1192): 

1193 if _has_expires_shape(operation_model.output_shape): 

1194 if expires_value := response_dict.get('headers', {}).get('Expires'): 

1195 customized_response_dict['ExpiresString'] = expires_value 

1196 try: 

1197 utils.parse_timestamp(expires_value) 

1198 except (ValueError, RuntimeError): 

1199 logger.warning( 

1200 f'Failed to parse the "Expires" member as a timestamp: {expires_value}. ' 

1201 f'The unparsed value is available in the response under "ExpiresString".' 

1202 ) 

1203 del response_dict['headers']['Expires'] 

1204 

1205 

1206def _has_expires_shape(shape): 

1207 if not shape: 

1208 return False 

1209 return any( 

1210 member_shape.name == 'Expires' 

1211 and member_shape.serialization.get('name') == 'Expires' 

1212 for member_shape in shape.members.values() 

1213 ) 

1214 

1215 

1216def document_expires_shape(section, event_name, **kwargs): 

1217 # Updates the documentation for S3 operations that include the 'Expires' member 

1218 # in their response structure. Documents a synthetic member 'ExpiresString' and 

1219 # includes a deprecation notice for 'Expires'. 

1220 if 'response-example' in event_name: 

1221 if not section.has_section('structure-value'): 

1222 return 

1223 parent = section.get_section('structure-value') 

1224 if not parent.has_section('Expires'): 

1225 return 

1226 param_line = parent.get_section('Expires') 

1227 param_line.add_new_section('ExpiresString') 

1228 new_param_line = param_line.get_section('ExpiresString') 

1229 new_param_line.write("'ExpiresString': 'string',") 

1230 new_param_line.style.new_line() 

1231 elif 'response-params' in event_name: 

1232 if not section.has_section('Expires'): 

1233 return 

1234 param_section = section.get_section('Expires') 

1235 # Add a deprecation notice for the "Expires" param 

1236 doc_section = param_section.get_section('param-documentation') 

1237 doc_section.style.start_note() 

1238 doc_section.write( 

1239 'This member has been deprecated. Please use ``ExpiresString`` instead.' 

1240 ) 

1241 doc_section.style.end_note() 

1242 # Document the "ExpiresString" param 

1243 new_param_section = param_section.add_new_section('ExpiresString') 

1244 new_param_section.style.new_paragraph() 

1245 new_param_section.write('- **ExpiresString** *(string) --*') 

1246 new_param_section.style.indent() 

1247 new_param_section.style.new_paragraph() 

1248 new_param_section.write( 

1249 'The raw, unparsed value of the ``Expires`` field.' 

1250 ) 

1251 

1252 

1253def _handle_200_error(operation_model, response_dict, **kwargs): 

1254 # S3 can return a 200 response with an error embedded in the body. 

1255 # Convert the 200 to a 500 for retry resolution in ``_update_status_code``. 

1256 if not _should_handle_200_error(operation_model, response_dict): 

1257 # Operations with streaming response blobs are excluded as they 

1258 # can't be reliably distinguished from an S3 error. 

1259 return 

1260 if _looks_like_special_case_error( 

1261 response_dict['status_code'], response_dict['body'] 

1262 ): 

1263 response_dict['status_code'] = 500 

1264 logger.debug( 

1265 f"Error found for response with 200 status code: {response_dict['body']}." 

1266 ) 

1267 

1268 

1269def _should_handle_200_error(operation_model, response_dict): 

1270 output_shape = operation_model.output_shape 

1271 if ( 

1272 not response_dict 

1273 or operation_model.has_event_stream_output 

1274 or not output_shape 

1275 ): 

1276 return False 

1277 payload = output_shape.serialization.get('payload') 

1278 if payload is not None: 

1279 payload_shape = output_shape.members[payload] 

1280 if payload_shape.type_name in ('blob', 'string'): 

1281 return False 

1282 return True 

1283 

1284 

1285def _update_status_code(response, **kwargs): 

1286 # Update the http_response status code when the parsed response has been 

1287 # modified in a handler. This enables retries for cases like ``_handle_200_error``. 

1288 if response is None: 

1289 return 

1290 http_response, parsed = response 

1291 parsed_status_code = parsed.get('ResponseMetadata', {}).get( 

1292 'HTTPStatusCode', http_response.status_code 

1293 ) 

1294 if http_response.status_code != parsed_status_code: 

1295 http_response.status_code = parsed_status_code 

1296 

1297 

1298def _handle_request_validation_mode_member(params, model, **kwargs): 

1299 client_config = kwargs.get("context", {}).get("client_config") 

1300 if client_config is None: 

1301 return 

1302 response_checksum_validation = client_config.response_checksum_validation 

1303 http_checksum = model.http_checksum 

1304 mode_member = http_checksum.get("requestValidationModeMember") 

1305 if ( 

1306 mode_member is not None 

1307 and response_checksum_validation == "when_supported" 

1308 ): 

1309 params.setdefault(mode_member, "ENABLED") 

1310 

1311 

1312def _set_extra_headers_for_unsigned_request( 

1313 request, signature_version, **kwargs 

1314): 

1315 # When sending a checksum in the trailer of an unsigned chunked request, S3 

1316 # requires us to set the "X-Amz-Content-SHA256" header to "STREAMING-UNSIGNED-PAYLOAD-TRAILER". 

1317 checksum_context = request.context.get("checksum", {}) 

1318 algorithm = checksum_context.get("request_algorithm", {}) 

1319 in_trailer = algorithm.get("in") == "trailer" 

1320 headers = request.headers 

1321 if signature_version == botocore.UNSIGNED and in_trailer: 

1322 headers["X-Amz-Content-SHA256"] = "STREAMING-UNSIGNED-PAYLOAD-TRAILER" 

1323 

1324 

1325def _set_auth_scheme_preference_signer(context, signing_name, **kwargs): 

1326 """ 

1327 Determines the appropriate signer to use based on the client configuration, 

1328 authentication scheme preferences, and the availability of a bearer token. 

1329 """ 

1330 client_config = context.get('client_config') 

1331 if client_config is None: 

1332 return 

1333 

1334 signature_version = client_config.signature_version 

1335 auth_scheme_preference = client_config.auth_scheme_preference 

1336 auth_options = context.get('auth_options') 

1337 

1338 signature_version_set_in_code = ( 

1339 isinstance(signature_version, ClientConfigString) 

1340 or signature_version is botocore.UNSIGNED 

1341 ) 

1342 auth_preference_set_in_code = isinstance( 

1343 auth_scheme_preference, ClientConfigString 

1344 ) 

1345 has_in_code_configuration = ( 

1346 signature_version_set_in_code or auth_preference_set_in_code 

1347 ) 

1348 

1349 resolved_signature_version = signature_version 

1350 

1351 # If signature version was not set in code, but an auth scheme preference 

1352 # is available, resolve it based on the preferred schemes and supported auth 

1353 # options for this service. 

1354 if ( 

1355 not signature_version_set_in_code 

1356 and auth_scheme_preference 

1357 and auth_options 

1358 ): 

1359 preferred_schemes = auth_scheme_preference.split(',') 

1360 resolved = botocore.auth.resolve_auth_scheme_preference( 

1361 preferred_schemes, auth_options 

1362 ) 

1363 resolved_signature_version = ( 

1364 botocore.UNSIGNED if resolved == 'none' else resolved 

1365 ) 

1366 

1367 # Prefer 'bearer' signature version if a bearer token is available, and it 

1368 # is allowed for this service. This can override earlier resolution if the 

1369 # config object didn't explicitly set a signature version. 

1370 if _should_prefer_bearer_auth( 

1371 has_in_code_configuration, signing_name, resolved_signature_version 

1372 ): 

1373 register_feature_id('BEARER_SERVICE_ENV_VARS') 

1374 resolved_signature_version = 'bearer' 

1375 

1376 if resolved_signature_version == signature_version: 

1377 return None 

1378 return resolved_signature_version 

1379 

1380 

1381def _should_prefer_bearer_auth( 

1382 has_in_code_configuration, signing_name, resolved_signature_version 

1383): 

1384 if signing_name not in get_bearer_auth_supported_services(): 

1385 return False 

1386 

1387 has_token = get_token_from_environment(signing_name) is not None 

1388 

1389 # Prefer 'bearer' if a bearer token is available, and either: 

1390 # Bearer was already resolved, or 

1391 # No auth-related values were explicitly set in code 

1392 return has_token and ( 

1393 resolved_signature_version == 'bearer' or not has_in_code_configuration 

1394 ) 

1395 

1396 

1397def get_bearer_auth_supported_services(): 

1398 """ 

1399 Returns a set of services that support bearer token authentication. 

1400 These values correspond to the service's `signingName` property as defined 

1401 in model.py, falling back to `endpointPrefix` if `signingName` is not set. 

1402 

1403 Warning: This is a private interface and is subject to abrupt breaking changes, 

1404 including removal, in any botocore release. It is not intended for external use, 

1405 and its usage outside of botocore is not advised or supported. 

1406 """ 

1407 return {'bedrock'} 

1408 

1409 

1410# This is a list of (event_name, handler). 

1411# When a Session is created, everything in this list will be 

1412# automatically registered with that Session. 

1413 

1414BUILTIN_HANDLERS = [ 

1415 ('choose-service-name', handle_service_name_alias), 

1416 ( 

1417 'getattr.mturk.list_hi_ts_for_qualification_type', 

1418 ClientMethodAlias('list_hits_for_qualification_type'), 

1419 ), 

1420 ( 

1421 'getattr.socialmessaging.delete_whatsapp_media_message', 

1422 ClientMethodAlias('delete_whatsapp_message_media'), 

1423 ), 

1424 ( 

1425 'before-parameter-build.s3.UploadPart', 

1426 convert_body_to_file_like_object, 

1427 REGISTER_LAST, 

1428 ), 

1429 ( 

1430 'before-parameter-build.s3.PutObject', 

1431 convert_body_to_file_like_object, 

1432 REGISTER_LAST, 

1433 ), 

1434 ('creating-client-class', add_generate_presigned_url), 

1435 ('creating-client-class.s3', add_generate_presigned_post), 

1436 ('creating-client-class.iot-data', check_openssl_supports_tls_version_1_2), 

1437 ('creating-client-class.lex-runtime-v2', remove_lex_v2_start_conversation), 

1438 ('creating-client-class.qbusiness', remove_qbusiness_chat), 

1439 ( 

1440 'creating-client-class.bedrock-runtime', 

1441 remove_bedrock_runtime_invoke_model_with_bidirectional_stream, 

1442 ), 

1443 ('after-call.iam', json_decode_policies), 

1444 ('after-call.ec2.GetConsoleOutput', decode_console_output), 

1445 ('after-call.cloudformation.GetTemplate', json_decode_template_body), 

1446 ('after-call.s3.GetBucketLocation', parse_get_bucket_location), 

1447 ('before-parse.s3.*', handle_expires_header), 

1448 ('before-parse.s3.*', _handle_200_error, REGISTER_FIRST), 

1449 ('before-parameter-build', generate_idempotent_uuid), 

1450 ('before-parameter-build', _handle_request_validation_mode_member), 

1451 ('before-parameter-build.s3', validate_bucket_name), 

1452 ('before-parameter-build.s3', remove_bucket_from_url_paths_from_model), 

1453 ( 

1454 'before-parameter-build.s3.ListObjects', 

1455 set_list_objects_encoding_type_url, 

1456 ), 

1457 ( 

1458 'before-parameter-build.s3.ListObjectsV2', 

1459 set_list_objects_encoding_type_url, 

1460 ), 

1461 ( 

1462 'before-parameter-build.s3.ListObjectVersions', 

1463 set_list_objects_encoding_type_url, 

1464 ), 

1465 ('before-parameter-build.s3.CopyObject', handle_copy_source_param), 

1466 ('before-parameter-build.s3.UploadPartCopy', handle_copy_source_param), 

1467 ('before-parameter-build.s3.CopyObject', validate_ascii_metadata), 

1468 ('before-parameter-build.s3.PutObject', validate_ascii_metadata), 

1469 ( 

1470 'before-parameter-build.s3.CreateMultipartUpload', 

1471 validate_ascii_metadata, 

1472 ), 

1473 ('before-parameter-build.s3-control', remove_accid_host_prefix_from_model), 

1474 ('docs.*.s3.CopyObject.complete-section', document_copy_source_form), 

1475 ('docs.*.s3.UploadPartCopy.complete-section', document_copy_source_form), 

1476 ('docs.response-example.s3.*.complete-section', document_expires_shape), 

1477 ('docs.response-params.s3.*.complete-section', document_expires_shape), 

1478 ('before-endpoint-resolution.s3', customize_endpoint_resolver_builtins), 

1479 ('before-call', add_recursion_detection_header), 

1480 ('before-call.s3', add_expect_header), 

1481 ('before-call.glacier', add_glacier_version), 

1482 ('before-call.apigateway', add_accept_header), 

1483 ('before-call.s3.DeleteObjects', escape_xml_payload), 

1484 ('before-call.s3.PutBucketLifecycleConfiguration', escape_xml_payload), 

1485 ('before-call.glacier.UploadArchive', add_glacier_checksums), 

1486 ('before-call.glacier.UploadMultipartPart', add_glacier_checksums), 

1487 ('before-call.ec2.CopySnapshot', inject_presigned_url_ec2), 

1488 ('request-created', add_retry_headers), 

1489 ('request-created.machinelearning.Predict', switch_host_machinelearning), 

1490 ('needs-retry.s3.*', _update_status_code, REGISTER_FIRST), 

1491 ('choose-signer.cognito-identity.GetId', disable_signing), 

1492 ('choose-signer.cognito-identity.GetOpenIdToken', disable_signing), 

1493 ('choose-signer.cognito-identity.UnlinkIdentity', disable_signing), 

1494 ( 

1495 'choose-signer.cognito-identity.GetCredentialsForIdentity', 

1496 disable_signing, 

1497 ), 

1498 ('choose-signer.sts.AssumeRoleWithSAML', disable_signing), 

1499 ('choose-signer.sts.AssumeRoleWithWebIdentity', disable_signing), 

1500 ('choose-signer', set_operation_specific_signer), 

1501 ('choose-signer', _set_auth_scheme_preference_signer), 

1502 ('before-parameter-build.s3.HeadObject', sse_md5), 

1503 ('before-parameter-build.s3.GetObject', sse_md5), 

1504 ('before-parameter-build.s3.PutObject', sse_md5), 

1505 ('before-parameter-build.s3.CopyObject', sse_md5), 

1506 ('before-parameter-build.s3.CopyObject', copy_source_sse_md5), 

1507 ('before-parameter-build.s3.CreateMultipartUpload', sse_md5), 

1508 ('before-parameter-build.s3.UploadPart', sse_md5), 

1509 ('before-parameter-build.s3.UploadPartCopy', sse_md5), 

1510 ('before-parameter-build.s3.UploadPartCopy', copy_source_sse_md5), 

1511 ('before-parameter-build.s3.CompleteMultipartUpload', sse_md5), 

1512 ('before-parameter-build.s3.SelectObjectContent', sse_md5), 

1513 ('before-parameter-build.ec2.RunInstances', base64_encode_user_data), 

1514 ( 

1515 'before-parameter-build.autoscaling.CreateLaunchConfiguration', 

1516 base64_encode_user_data, 

1517 ), 

1518 ('before-parameter-build.route53', fix_route53_ids), 

1519 ('before-parameter-build.glacier', inject_account_id), 

1520 ('before-sign.s3', remove_arn_from_signing_path), 

1521 ('before-sign.s3', _set_extra_headers_for_unsigned_request), 

1522 ( 

1523 'before-sign.polly.SynthesizeSpeech', 

1524 remove_content_type_header_for_presigning, 

1525 ), 

1526 ('after-call.s3.ListObjects', decode_list_object), 

1527 ('after-call.s3.ListObjectsV2', decode_list_object_v2), 

1528 ('after-call.s3.ListObjectVersions', decode_list_object_versions), 

1529 # Cloudsearchdomain search operation will be sent by HTTP POST 

1530 ('request-created.cloudsearchdomain.Search', change_get_to_post), 

1531 # Glacier documentation customizations 

1532 ( 

1533 'docs.*.glacier.*.complete-section', 

1534 AutoPopulatedParam( 

1535 'accountId', 

1536 'Note: this parameter is set to "-" by' 

1537 'default if no value is not specified.', 

1538 ).document_auto_populated_param, 

1539 ), 

1540 ( 

1541 'docs.*.glacier.UploadArchive.complete-section', 

1542 AutoPopulatedParam('checksum').document_auto_populated_param, 

1543 ), 

1544 ( 

1545 'docs.*.glacier.UploadMultipartPart.complete-section', 

1546 AutoPopulatedParam('checksum').document_auto_populated_param, 

1547 ), 

1548 ( 

1549 'docs.request-params.glacier.CompleteMultipartUpload.complete-section', 

1550 document_glacier_tree_hash_checksum(), 

1551 ), 

1552 # Cloudformation documentation customizations 

1553 ( 

1554 'docs.*.cloudformation.GetTemplate.complete-section', 

1555 document_cloudformation_get_template_return_type, 

1556 ), 

1557 # UserData base64 encoding documentation customizations 

1558 ( 

1559 'docs.*.ec2.RunInstances.complete-section', 

1560 document_base64_encoding('UserData'), 

1561 ), 

1562 ( 

1563 'docs.*.autoscaling.CreateLaunchConfiguration.complete-section', 

1564 document_base64_encoding('UserData'), 

1565 ), 

1566 # EC2 CopySnapshot documentation customizations 

1567 ( 

1568 'docs.*.ec2.CopySnapshot.complete-section', 

1569 AutoPopulatedParam('PresignedUrl').document_auto_populated_param, 

1570 ), 

1571 ( 

1572 'docs.*.ec2.CopySnapshot.complete-section', 

1573 AutoPopulatedParam('DestinationRegion').document_auto_populated_param, 

1574 ), 

1575 # S3 SSE documentation modifications 

1576 ( 

1577 'docs.*.s3.*.complete-section', 

1578 AutoPopulatedParam('SSECustomerKeyMD5').document_auto_populated_param, 

1579 ), 

1580 # S3 SSE Copy Source documentation modifications 

1581 ( 

1582 'docs.*.s3.*.complete-section', 

1583 AutoPopulatedParam( 

1584 'CopySourceSSECustomerKeyMD5' 

1585 ).document_auto_populated_param, 

1586 ), 

1587 # Add base64 information to Lambda 

1588 ( 

1589 'docs.*.lambda.UpdateFunctionCode.complete-section', 

1590 document_base64_encoding('ZipFile'), 

1591 ), 

1592 # The following S3 operations cannot actually accept a ContentMD5 

1593 ( 

1594 'docs.*.s3.*.complete-section', 

1595 HideParamFromOperations( 

1596 's3', 

1597 'ContentMD5', 

1598 [ 

1599 'DeleteObjects', 

1600 'PutBucketAcl', 

1601 'PutBucketCors', 

1602 'PutBucketLifecycle', 

1603 'PutBucketLogging', 

1604 'PutBucketNotification', 

1605 'PutBucketPolicy', 

1606 'PutBucketReplication', 

1607 'PutBucketRequestPayment', 

1608 'PutBucketTagging', 

1609 'PutBucketVersioning', 

1610 'PutBucketWebsite', 

1611 'PutObjectAcl', 

1612 ], 

1613 ).hide_param, 

1614 ), 

1615 ############# 

1616 # DSQL 

1617 ############# 

1618 ('creating-client-class.dsql', add_dsql_generate_db_auth_token_methods), 

1619 ############# 

1620 # RDS 

1621 ############# 

1622 ('creating-client-class.rds', add_generate_db_auth_token), 

1623 ('before-call.rds.CopyDBClusterSnapshot', inject_presigned_url_rds), 

1624 ('before-call.rds.CreateDBCluster', inject_presigned_url_rds), 

1625 ('before-call.rds.CopyDBSnapshot', inject_presigned_url_rds), 

1626 ('before-call.rds.CreateDBInstanceReadReplica', inject_presigned_url_rds), 

1627 ( 

1628 'before-call.rds.StartDBInstanceAutomatedBackupsReplication', 

1629 inject_presigned_url_rds, 

1630 ), 

1631 # RDS PresignedUrl documentation customizations 

1632 ( 

1633 'docs.*.rds.CopyDBClusterSnapshot.complete-section', 

1634 AutoPopulatedParam('PreSignedUrl').document_auto_populated_param, 

1635 ), 

1636 ( 

1637 'docs.*.rds.CreateDBCluster.complete-section', 

1638 AutoPopulatedParam('PreSignedUrl').document_auto_populated_param, 

1639 ), 

1640 ( 

1641 'docs.*.rds.CopyDBSnapshot.complete-section', 

1642 AutoPopulatedParam('PreSignedUrl').document_auto_populated_param, 

1643 ), 

1644 ( 

1645 'docs.*.rds.CreateDBInstanceReadReplica.complete-section', 

1646 AutoPopulatedParam('PreSignedUrl').document_auto_populated_param, 

1647 ), 

1648 ( 

1649 'docs.*.rds.StartDBInstanceAutomatedBackupsReplication.complete-section', 

1650 AutoPopulatedParam('PreSignedUrl').document_auto_populated_param, 

1651 ), 

1652 ############# 

1653 # Neptune 

1654 ############# 

1655 ('before-call.neptune.CopyDBClusterSnapshot', inject_presigned_url_rds), 

1656 ('before-call.neptune.CreateDBCluster', inject_presigned_url_rds), 

1657 # Neptune PresignedUrl documentation customizations 

1658 ( 

1659 'docs.*.neptune.CopyDBClusterSnapshot.complete-section', 

1660 AutoPopulatedParam('PreSignedUrl').document_auto_populated_param, 

1661 ), 

1662 ( 

1663 'docs.*.neptune.CreateDBCluster.complete-section', 

1664 AutoPopulatedParam('PreSignedUrl').document_auto_populated_param, 

1665 ), 

1666 ############# 

1667 # DocDB 

1668 ############# 

1669 ('before-call.docdb.CopyDBClusterSnapshot', inject_presigned_url_rds), 

1670 ('before-call.docdb.CreateDBCluster', inject_presigned_url_rds), 

1671 # DocDB PresignedUrl documentation customizations 

1672 ( 

1673 'docs.*.docdb.CopyDBClusterSnapshot.complete-section', 

1674 AutoPopulatedParam('PreSignedUrl').document_auto_populated_param, 

1675 ), 

1676 ( 

1677 'docs.*.docdb.CreateDBCluster.complete-section', 

1678 AutoPopulatedParam('PreSignedUrl').document_auto_populated_param, 

1679 ), 

1680 ('before-call', inject_api_version_header_if_needed), 

1681] 

1682_add_parameter_aliases(BUILTIN_HANDLERS)