Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.11/site-packages/botocore/handlers.py: 22%

Shortcuts on this page

r m x   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

612 statements  

1# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. 

2# 

3# Licensed under the Apache License, Version 2.0 (the "License"). You 

4# may not use this file except in compliance with the License. A copy of 

5# the License is located at 

6# 

7# http://aws.amazon.com/apache2.0/ 

8# 

9# or in the "license" file accompanying this file. This file is 

10# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF 

11# ANY KIND, either express or implied. See the License for the specific 

12# language governing permissions and limitations under the License. 

13 

14"""Builtin event handlers. 

15 

16This module contains builtin handlers for events emitted by botocore. 

17""" 

18 

19import base64 

20import copy 

21import logging 

22import os 

23import re 

24import uuid 

25import warnings 

26from io import BytesIO 

27 

28import botocore 

29import botocore.auth 

30from botocore import ( 

31 retryhandler, # noqa: F401 

32 translate, # noqa: F401 

33 utils, 

34) 

35from botocore.args import ClientConfigString 

36from botocore.compat import ( 

37 MD5_AVAILABLE, # noqa: F401 

38 ETree, 

39 OrderedDict, 

40 XMLParseError, 

41 ensure_bytes, 

42 get_md5, 

43 json, 

44 quote, 

45 unquote, 

46 unquote_str, 

47 urlsplit, 

48 urlunsplit, 

49) 

50from botocore.docs.utils import ( 

51 AppendParamDocumentation, 

52 AutoPopulatedParam, 

53 HideParamFromOperations, 

54) 

55from botocore.endpoint_provider import VALID_HOST_LABEL_RE 

56from botocore.exceptions import ( 

57 AliasConflictParameterError, 

58 MissingServiceIdError, # noqa: F401 

59 ParamValidationError, 

60 UnsupportedTLSVersionWarning, 

61) 

62from botocore.regions import EndpointResolverBuiltins 

63from botocore.signers import ( 

64 add_dsql_generate_db_auth_token_methods, 

65 add_generate_db_auth_token, 

66 add_generate_presigned_post, 

67 add_generate_presigned_url, 

68) 

69from botocore.useragent import register_feature_id 

70from botocore.utils import ( 

71 SAFE_CHARS, 

72 SERVICE_NAME_ALIASES, # noqa: F401 

73 ArnParser, 

74 get_token_from_environment, 

75 hyphenize_service_id, # noqa: F401 

76 is_global_accesspoint, # noqa: F401 

77 percent_encode, 

78 switch_host_with_param, 

79) 

80 

81logger = logging.getLogger(__name__) 

82 

83REGISTER_FIRST = object() 

84REGISTER_LAST = object() 

85# From the S3 docs: 

86# The rules for bucket names in the US Standard region allow bucket names 

87# to be as long as 255 characters, and bucket names can contain any 

88# combination of uppercase letters, lowercase letters, numbers, periods 

89# (.), hyphens (-), and underscores (_). 

90VALID_BUCKET = re.compile(r'^[a-zA-Z0-9.\-_]{1,255}$') 

91_ACCESSPOINT_ARN = ( 

92 r'^arn:(aws).*:(s3|s3-object-lambda):[a-z\-0-9]*:[0-9]{12}:accesspoint[/:]' 

93 r'[a-zA-Z0-9\-.]{1,63}$' 

94) 

95_OUTPOST_ARN = ( 

96 r'^arn:(aws).*:s3-outposts:[a-z\-0-9]+:[0-9]{12}:outpost[/:]' 

97 r'[a-zA-Z0-9\-]{1,63}[/:]accesspoint[/:][a-zA-Z0-9\-]{1,63}$' 

98) 

99VALID_S3_ARN = re.compile('|'.join([_ACCESSPOINT_ARN, _OUTPOST_ARN])) 

100# signing names used for the services s3 and s3-control, for example in 

101# botocore/data/s3/2006-03-01/endpoints-rule-set-1.json 

102S3_SIGNING_NAMES = ('s3', 's3-outposts', 's3-object-lambda', 's3express') 

103VERSION_ID_SUFFIX = re.compile(r'\?versionId=[^\s]+$') 

104 

105 

106def handle_service_name_alias(service_name, **kwargs): 

107 return SERVICE_NAME_ALIASES.get(service_name, service_name) 

108 

109 

110def add_recursion_detection_header(params, **kwargs): 

111 has_lambda_name = 'AWS_LAMBDA_FUNCTION_NAME' in os.environ 

112 trace_id = os.environ.get('_X_AMZN_TRACE_ID') 

113 if has_lambda_name and trace_id: 

114 headers = params['headers'] 

115 if 'X-Amzn-Trace-Id' not in headers: 

116 headers['X-Amzn-Trace-Id'] = quote(trace_id, safe='-=;:+&[]{}"\',') 

117 

118 

119def escape_xml_payload(params, **kwargs): 

120 # Replace \r and \n with the escaped sequence over the whole XML document 

121 # to avoid linebreak normalization modifying customer input when the 

122 # document is parsed. Ideally, we would do this in ElementTree.tostring, 

123 # but it doesn't allow us to override entity escaping for text fields. For 

124 # this operation \r and \n can only appear in the XML document if they were 

125 # passed as part of the customer input. 

126 body = params['body'] 

127 if b'\r' in body: 

128 body = body.replace(b'\r', b'
') 

129 if b'\n' in body: 

130 body = body.replace(b'\n', b'
') 

131 

132 params['body'] = body 

133 

134 

135def check_for_200_error(response, **kwargs): 

136 """This function has been deprecated, but is kept for backwards compatibility.""" 

137 # From: http://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectCOPY.html 

138 # There are two opportunities for a copy request to return an error. One 

139 # can occur when Amazon S3 receives the copy request and the other can 

140 # occur while Amazon S3 is copying the files. If the error occurs before 

141 # the copy operation starts, you receive a standard Amazon S3 error. If the 

142 # error occurs during the copy operation, the error response is embedded in 

143 # the 200 OK response. This means that a 200 OK response can contain either 

144 # a success or an error. Make sure to design your application to parse the 

145 # contents of the response and handle it appropriately. 

146 # 

147 # So this handler checks for this case. Even though the server sends a 

148 # 200 response, conceptually this should be handled exactly like a 

149 # 500 response (with respect to raising exceptions, retries, etc.) 

150 # We're connected *before* all the other retry logic handlers, so as long 

151 # as we switch the error code to 500, we'll retry the error as expected. 

152 if response is None: 

153 # A None response can happen if an exception is raised while 

154 # trying to retrieve the response. See Endpoint._get_response(). 

155 return 

156 http_response, parsed = response 

157 if _looks_like_special_case_error( 

158 http_response.status_code, http_response.content 

159 ): 

160 logger.debug( 

161 "Error found for response with 200 status code, " 

162 "errors: %s, changing status code to " 

163 "500.", 

164 parsed, 

165 ) 

166 http_response.status_code = 500 

167 

168 

169def _looks_like_special_case_error(status_code, body): 

170 if status_code == 200 and body: 

171 try: 

172 parser = ETree.XMLParser( 

173 target=ETree.TreeBuilder(), encoding='utf-8' 

174 ) 

175 parser.feed(body) 

176 root = parser.close() 

177 except XMLParseError: 

178 # In cases of network disruptions, we may end up with a partial 

179 # streamed response from S3. We need to treat these cases as 

180 # 500 Service Errors and try again. 

181 return True 

182 if root.tag == 'Error': 

183 return True 

184 return False 

185 

186 

187def set_operation_specific_signer(context, signing_name, **kwargs): 

188 """Choose the operation-specific signer. 

189 

190 Individual operations may have a different auth type than the service as a 

191 whole. This will most often manifest as operations that should not be 

192 authenticated at all, but can include other auth modes such as sigv4 

193 without body signing. 

194 """ 

195 auth_type = context.get('auth_type') 

196 

197 # Auth type will be None if the operation doesn't have a configured auth 

198 # type. 

199 if not auth_type: 

200 return 

201 

202 # Auth type will be the string value 'none' if the operation should not 

203 # be signed at all. 

204 if auth_type == 'none': 

205 return botocore.UNSIGNED 

206 

207 if auth_type == 'bearer': 

208 return 'bearer' 

209 

210 # If the operation needs an unsigned body, we set additional context 

211 # allowing the signer to be aware of this. 

212 if context.get('unsigned_payload') or auth_type == 'v4-unsigned-body': 

213 context['payload_signing_enabled'] = False 

214 

215 if auth_type.startswith('v4'): 

216 if auth_type == 'v4-s3express': 

217 return auth_type 

218 

219 if auth_type == 'v4a': 

220 # If sigv4a is chosen, we must add additional signing config for 

221 # global signature. 

222 region = _resolve_sigv4a_region(context) 

223 signing = {'region': region, 'signing_name': signing_name} 

224 if 'signing' in context: 

225 context['signing'].update(signing) 

226 else: 

227 context['signing'] = signing 

228 signature_version = 'v4a' 

229 else: 

230 signature_version = 'v4' 

231 

232 # Signing names used by s3 and s3-control use customized signers "s3v4" 

233 # and "s3v4a". 

234 if signing_name in S3_SIGNING_NAMES: 

235 signature_version = f's3{signature_version}' 

236 

237 return signature_version 

238 

239 

240def _handle_sqs_compatible_error(parsed, context, **kwargs): 

241 """ 

242 Ensures backward compatibility for SQS errors. 

243 

244 SQS's migration from the Query protocol to JSON was done prior to SDKs allowing a 

245 service to support multiple protocols. Because of this, SQS is missing the "error" 

246 key from its modeled exceptions, which is used by most query compatible services 

247 to map error codes to the proper exception. Instead, SQS uses the error's shape name, 

248 which is preserved in the QueryErrorCode key. 

249 """ 

250 parsed_error = parsed.get("Error", {}) 

251 if not parsed_error: 

252 return 

253 

254 if query_code := parsed_error.get("QueryErrorCode"): 

255 context['error_code_override'] = query_code 

256 

257 

258def _resolve_sigv4a_region(context): 

259 region = None 

260 if 'client_config' in context: 

261 region = context['client_config'].sigv4a_signing_region_set 

262 if not region and context.get('signing', {}).get('region'): 

263 region = context['signing']['region'] 

264 return region or '*' 

265 

266 

267def decode_console_output(parsed, **kwargs): 

268 if 'Output' in parsed: 

269 try: 

270 # We're using 'replace' for errors because it is 

271 # possible that console output contains non string 

272 # chars we can't utf-8 decode. 

273 value = base64.b64decode( 

274 bytes(parsed['Output'], 'latin-1') 

275 ).decode('utf-8', 'replace') 

276 parsed['Output'] = value 

277 except (ValueError, TypeError, AttributeError): 

278 logger.debug('Error decoding base64', exc_info=True) 

279 

280 

281def generate_idempotent_uuid(params, model, **kwargs): 

282 for name in model.idempotent_members: 

283 if name not in params: 

284 params[name] = str(uuid.uuid4()) 

285 logger.debug( 

286 "injecting idempotency token (%s) into param '%s'.", 

287 params[name], 

288 name, 

289 ) 

290 

291 

292def decode_quoted_jsondoc(value): 

293 try: 

294 value = json.loads(unquote(value)) 

295 except (ValueError, TypeError): 

296 logger.debug('Error loading quoted JSON', exc_info=True) 

297 return value 

298 

299 

300def json_decode_template_body(parsed, **kwargs): 

301 if 'TemplateBody' in parsed: 

302 try: 

303 value = json.loads( 

304 parsed['TemplateBody'], object_pairs_hook=OrderedDict 

305 ) 

306 parsed['TemplateBody'] = value 

307 except (ValueError, TypeError): 

308 logger.debug('error loading JSON', exc_info=True) 

309 

310 

311def validate_bucket_name(params, **kwargs): 

312 if 'Bucket' not in params: 

313 return 

314 bucket = params['Bucket'] 

315 if not VALID_BUCKET.search(bucket) and not VALID_S3_ARN.search(bucket): 

316 error_msg = ( 

317 f'Invalid bucket name "{bucket}": Bucket name must match ' 

318 f'the regex "{VALID_BUCKET.pattern}" or be an ARN matching ' 

319 f'the regex "{VALID_S3_ARN.pattern}"' 

320 ) 

321 raise ParamValidationError(report=error_msg) 

322 

323 

324def sse_md5(params, **kwargs): 

325 """ 

326 S3 server-side encryption requires the encryption key to be sent to the 

327 server base64 encoded, as well as a base64-encoded MD5 hash of the 

328 encryption key. This handler does both if the MD5 has not been set by 

329 the caller. 

330 """ 

331 _sse_md5(params, 'SSECustomer') 

332 

333 

334def copy_source_sse_md5(params, **kwargs): 

335 """ 

336 S3 server-side encryption requires the encryption key to be sent to the 

337 server base64 encoded, as well as a base64-encoded MD5 hash of the 

338 encryption key. This handler does both if the MD5 has not been set by 

339 the caller specifically if the parameter is for the copy-source sse-c key. 

340 """ 

341 _sse_md5(params, 'CopySourceSSECustomer') 

342 

343 

344def _sse_md5(params, sse_member_prefix='SSECustomer'): 

345 if not _needs_s3_sse_customization(params, sse_member_prefix): 

346 return 

347 

348 sse_key_member = sse_member_prefix + 'Key' 

349 sse_md5_member = sse_member_prefix + 'KeyMD5' 

350 key_as_bytes = params[sse_key_member] 

351 if isinstance(key_as_bytes, str): 

352 key_as_bytes = key_as_bytes.encode('utf-8') 

353 md5_val = get_md5(key_as_bytes, usedforsecurity=False).digest() 

354 key_md5_str = base64.b64encode(md5_val).decode('utf-8') 

355 key_b64_encoded = base64.b64encode(key_as_bytes).decode('utf-8') 

356 params[sse_key_member] = key_b64_encoded 

357 params[sse_md5_member] = key_md5_str 

358 

359 

360def _needs_s3_sse_customization(params, sse_member_prefix): 

361 return ( 

362 params.get(sse_member_prefix + 'Key') is not None 

363 and sse_member_prefix + 'KeyMD5' not in params 

364 ) 

365 

366 

367def disable_signing(**kwargs): 

368 """ 

369 This handler disables request signing by setting the signer 

370 name to a special sentinel value. 

371 """ 

372 return botocore.UNSIGNED 

373 

374 

375def add_expect_header(model, params, **kwargs): 

376 if model.http.get('method', '') not in ['PUT', 'POST']: 

377 return 

378 if 'body' in params: 

379 body = params['body'] 

380 if hasattr(body, 'read'): 

381 check_body = utils.ensure_boolean( 

382 os.environ.get( 

383 'BOTO_EXPERIMENTAL__NO_EMPTY_CONTINUE', 

384 False, 

385 ) 

386 ) 

387 if check_body and utils.determine_content_length(body) == 0: 

388 return 

389 # Any file like object will use an expect 100-continue 

390 # header regardless of size. 

391 logger.debug("Adding expect 100 continue header to request.") 

392 params['headers']['Expect'] = '100-continue' 

393 

394 

395class DeprecatedServiceDocumenter: 

396 def __init__(self, replacement_service_name): 

397 self._replacement_service_name = replacement_service_name 

398 

399 def inject_deprecation_notice(self, section, event_name, **kwargs): 

400 section.style.start_important() 

401 section.write('This service client is deprecated. Please use ') 

402 section.style.ref( 

403 self._replacement_service_name, 

404 self._replacement_service_name, 

405 ) 

406 section.write(' instead.') 

407 section.style.end_important() 

408 

409 

410def document_copy_source_form(section, event_name, **kwargs): 

411 if 'request-example' in event_name: 

412 parent = section.get_section('structure-value') 

413 param_line = parent.get_section('CopySource') 

414 value_portion = param_line.get_section('member-value') 

415 value_portion.clear_text() 

416 value_portion.write( 

417 "'string' or {'Bucket': 'string', " 

418 "'Key': 'string', 'VersionId': 'string'}" 

419 ) 

420 elif 'request-params' in event_name: 

421 param_section = section.get_section('CopySource') 

422 type_section = param_section.get_section('param-type') 

423 type_section.clear_text() 

424 type_section.write(':type CopySource: str or dict') 

425 doc_section = param_section.get_section('param-documentation') 

426 doc_section.clear_text() 

427 doc_section.write( 

428 "The name of the source bucket, key name of the source object, " 

429 "and optional version ID of the source object. You can either " 

430 "provide this value as a string or a dictionary. The " 

431 "string form is {bucket}/{key} or " 

432 "{bucket}/{key}?versionId={versionId} if you want to copy a " 

433 "specific version. You can also provide this value as a " 

434 "dictionary. The dictionary format is recommended over " 

435 "the string format because it is more explicit. The dictionary " 

436 "format is: {'Bucket': 'bucket', 'Key': 'key', 'VersionId': 'id'}." 

437 " Note that the VersionId key is optional and may be omitted." 

438 " To specify an S3 access point, provide the access point" 

439 " ARN for the ``Bucket`` key in the copy source dictionary. If you" 

440 " want to provide the copy source for an S3 access point as a" 

441 " string instead of a dictionary, the ARN provided must be the" 

442 " full S3 access point object ARN" 

443 " (i.e. {accesspoint_arn}/object/{key})" 

444 ) 

445 

446 

447def handle_copy_source_param(params, **kwargs): 

448 """Convert CopySource param for CopyObject/UploadPartCopy. 

449 

450 This handler will deal with two cases: 

451 

452 * CopySource provided as a string. We'll make a best effort 

453 to URL encode the key name as required. This will require 

454 parsing the bucket and version id from the CopySource value 

455 and only encoding the key. 

456 * CopySource provided as a dict. In this case we're 

457 explicitly given the Bucket, Key, and VersionId so we're 

458 able to encode the key and ensure this value is serialized 

459 and correctly sent to S3. 

460 

461 """ 

462 source = params.get('CopySource') 

463 if source is None: 

464 # The call will eventually fail but we'll let the 

465 # param validator take care of this. It will 

466 # give a better error message. 

467 return 

468 if isinstance(source, str): 

469 params['CopySource'] = _quote_source_header(source) 

470 elif isinstance(source, dict): 

471 params['CopySource'] = _quote_source_header_from_dict(source) 

472 

473 

474def _quote_source_header_from_dict(source_dict): 

475 try: 

476 bucket = source_dict['Bucket'] 

477 key = source_dict['Key'] 

478 version_id = source_dict.get('VersionId') 

479 if VALID_S3_ARN.search(bucket): 

480 final = f'{bucket}/object/{key}' 

481 else: 

482 final = f'{bucket}/{key}' 

483 except KeyError as e: 

484 raise ParamValidationError( 

485 report=f'Missing required parameter: {str(e)}' 

486 ) 

487 final = percent_encode(final, safe=SAFE_CHARS + '/') 

488 if version_id is not None: 

489 final += f'?versionId={version_id}' 

490 return final 

491 

492 

493def _quote_source_header(value): 

494 result = VERSION_ID_SUFFIX.search(value) 

495 if result is None: 

496 return percent_encode(value, safe=SAFE_CHARS + '/') 

497 else: 

498 first, version_id = value[: result.start()], value[result.start() :] 

499 return percent_encode(first, safe=SAFE_CHARS + '/') + version_id 

500 

501 

502def _get_cross_region_presigned_url( 

503 request_signer, request_dict, model, source_region, destination_region 

504): 

505 # The better way to do this is to actually get the 

506 # endpoint_resolver and get the endpoint_url given the 

507 # source region. In this specific case, we know that 

508 # we can safely replace the dest region with the source 

509 # region because of the supported EC2 regions, but in 

510 # general this is not a safe assumption to make. 

511 # I think eventually we should try to plumb through something 

512 # that allows us to resolve endpoints from regions. 

513 request_dict_copy = copy.deepcopy(request_dict) 

514 request_dict_copy['body']['DestinationRegion'] = destination_region 

515 request_dict_copy['url'] = request_dict['url'].replace( 

516 destination_region, source_region 

517 ) 

518 request_dict_copy['method'] = 'GET' 

519 request_dict_copy['headers'] = {} 

520 return request_signer.generate_presigned_url( 

521 request_dict_copy, region_name=source_region, operation_name=model.name 

522 ) 

523 

524 

525def _get_presigned_url_source_and_destination_regions(request_signer, params): 

526 # Gets the source and destination regions to be used 

527 destination_region = request_signer._region_name 

528 source_region = params.get('SourceRegion') 

529 return source_region, destination_region 

530 

531 

532def inject_presigned_url_ec2(params, request_signer, model, **kwargs): 

533 # The customer can still provide this, so we should pass if they do. 

534 if 'PresignedUrl' in params['body']: 

535 return 

536 src, dest = _get_presigned_url_source_and_destination_regions( 

537 request_signer, params['body'] 

538 ) 

539 url = _get_cross_region_presigned_url( 

540 request_signer, params, model, src, dest 

541 ) 

542 params['body']['PresignedUrl'] = url 

543 # EC2 Requires that the destination region be sent over the wire in 

544 # addition to the source region. 

545 params['body']['DestinationRegion'] = dest 

546 

547 

548def inject_presigned_url_rds(params, request_signer, model, **kwargs): 

549 # SourceRegion is not required for RDS operations, so it's possible that 

550 # it isn't set. In that case it's probably a local copy so we don't need 

551 # to do anything else. 

552 if 'SourceRegion' not in params['body']: 

553 return 

554 

555 src, dest = _get_presigned_url_source_and_destination_regions( 

556 request_signer, params['body'] 

557 ) 

558 

559 # Since SourceRegion isn't actually modeled for RDS, it needs to be 

560 # removed from the request params before we send the actual request. 

561 del params['body']['SourceRegion'] 

562 

563 if 'PreSignedUrl' in params['body']: 

564 return 

565 

566 url = _get_cross_region_presigned_url( 

567 request_signer, params, model, src, dest 

568 ) 

569 params['body']['PreSignedUrl'] = url 

570 

571 

572def json_decode_policies(parsed, model, **kwargs): 

573 # Any time an IAM operation returns a policy document 

574 # it is a string that is json that has been urlencoded, 

575 # i.e urlencode(json.dumps(policy_document)). 

576 # To give users something more useful, we will urldecode 

577 # this value and json.loads() the result so that they have 

578 # the policy document as a dictionary. 

579 output_shape = model.output_shape 

580 if output_shape is not None: 

581 _decode_policy_types(parsed, model.output_shape) 

582 

583 

584def _decode_policy_types(parsed, shape): 

585 # IAM consistently uses the policyDocumentType shape to indicate 

586 # strings that have policy documents. 

587 shape_name = 'policyDocumentType' 

588 if shape.type_name == 'structure': 

589 for member_name, member_shape in shape.members.items(): 

590 if ( 

591 member_shape.type_name == 'string' 

592 and member_shape.name == shape_name 

593 and member_name in parsed 

594 ): 

595 parsed[member_name] = decode_quoted_jsondoc( 

596 parsed[member_name] 

597 ) 

598 elif member_name in parsed: 

599 _decode_policy_types(parsed[member_name], member_shape) 

600 if shape.type_name == 'list': 

601 shape_member = shape.member 

602 for item in parsed: 

603 _decode_policy_types(item, shape_member) 

604 

605 

606def parse_get_bucket_location(parsed, http_response, **kwargs): 

607 # s3.GetBucketLocation cannot be modeled properly. To 

608 # account for this we just manually parse the XML document. 

609 # The "parsed" passed in only has the ResponseMetadata 

610 # filled out. This handler will fill in the LocationConstraint 

611 # value. 

612 if http_response.raw is None: 

613 return 

614 response_body = http_response.content 

615 parser = ETree.XMLParser(target=ETree.TreeBuilder(), encoding='utf-8') 

616 parser.feed(response_body) 

617 root = parser.close() 

618 region = root.text 

619 parsed['LocationConstraint'] = region 

620 

621 

622def base64_encode_user_data(params, **kwargs): 

623 if 'UserData' in params: 

624 if isinstance(params['UserData'], str): 

625 # Encode it to bytes if it is text. 

626 params['UserData'] = params['UserData'].encode('utf-8') 

627 params['UserData'] = base64.b64encode(params['UserData']).decode( 

628 'utf-8' 

629 ) 

630 

631 

632def document_base64_encoding(param): 

633 description = ( 

634 '**This value will be base64 encoded automatically. Do ' 

635 'not base64 encode this value prior to performing the ' 

636 'operation.**' 

637 ) 

638 append = AppendParamDocumentation(param, description) 

639 return append.append_documentation 

640 

641 

642def validate_ascii_metadata(params, **kwargs): 

643 """Verify S3 Metadata only contains ascii characters. 

644 

645 From: http://docs.aws.amazon.com/AmazonS3/latest/dev/UsingMetadata.html 

646 

647 "Amazon S3 stores user-defined metadata in lowercase. Each name, value pair 

648 must conform to US-ASCII when using REST and UTF-8 when using SOAP or 

649 browser-based uploads via POST." 

650 

651 """ 

652 metadata = params.get('Metadata') 

653 if not metadata or not isinstance(metadata, dict): 

654 # We have to at least type check the metadata as a dict type 

655 # because this handler is called before param validation. 

656 # We'll go ahead and return because the param validator will 

657 # give a descriptive error message for us. 

658 # We might need a post-param validation event. 

659 return 

660 for key, value in metadata.items(): 

661 try: 

662 key.encode('ascii') 

663 value.encode('ascii') 

664 except UnicodeEncodeError: 

665 error_msg = ( 

666 'Non ascii characters found in S3 metadata ' 

667 f'for key "{key}", value: "{value}". \nS3 metadata can only ' 

668 'contain ASCII characters. ' 

669 ) 

670 raise ParamValidationError(report=error_msg) 

671 

672 

673def fix_route53_ids(params, model, **kwargs): 

674 """ 

675 Check for and split apart Route53 resource IDs, setting 

676 only the last piece. This allows the output of one operation 

677 (e.g. ``'foo/1234'``) to be used as input in another 

678 operation (e.g. it expects just ``'1234'``). 

679 """ 

680 input_shape = model.input_shape 

681 if not input_shape or not hasattr(input_shape, 'members'): 

682 return 

683 

684 members = [ 

685 name 

686 for (name, shape) in input_shape.members.items() 

687 if shape.name in ['ResourceId', 'DelegationSetId', 'ChangeId'] 

688 ] 

689 

690 for name in members: 

691 if name in params: 

692 orig_value = params[name] 

693 params[name] = orig_value.split('/')[-1] 

694 logger.debug('%s %s -> %s', name, orig_value, params[name]) 

695 

696 

697def inject_account_id(params, **kwargs): 

698 if params.get('accountId') is None: 

699 # Glacier requires accountId, but allows you 

700 # to specify '-' for the current owners account. 

701 # We add this default value if the user does not 

702 # provide the accountId as a convenience. 

703 params['accountId'] = '-' 

704 

705 

706def add_glacier_version(model, params, **kwargs): 

707 request_dict = params 

708 request_dict['headers']['x-amz-glacier-version'] = model.metadata[ 

709 'apiVersion' 

710 ] 

711 

712 

713def add_accept_header(model, params, **kwargs): 

714 if params['headers'].get('Accept', None) is None: 

715 request_dict = params 

716 request_dict['headers']['Accept'] = 'application/json' 

717 

718 

719def add_glacier_checksums(params, **kwargs): 

720 """Add glacier checksums to the http request. 

721 

722 This will add two headers to the http request: 

723 

724 * x-amz-content-sha256 

725 * x-amz-sha256-tree-hash 

726 

727 These values will only be added if they are not present 

728 in the HTTP request. 

729 

730 """ 

731 request_dict = params 

732 headers = request_dict['headers'] 

733 body = request_dict['body'] 

734 if isinstance(body, bytes): 

735 # If the user provided a bytes type instead of a file 

736 # like object, we're temporarily create a BytesIO object 

737 # so we can use the util functions to calculate the 

738 # checksums which assume file like objects. Note that 

739 # we're not actually changing the body in the request_dict. 

740 body = BytesIO(body) 

741 starting_position = body.tell() 

742 if 'x-amz-content-sha256' not in headers: 

743 headers['x-amz-content-sha256'] = utils.calculate_sha256( 

744 body, as_hex=True 

745 ) 

746 body.seek(starting_position) 

747 if 'x-amz-sha256-tree-hash' not in headers: 

748 headers['x-amz-sha256-tree-hash'] = utils.calculate_tree_hash(body) 

749 body.seek(starting_position) 

750 

751 

752def document_glacier_tree_hash_checksum(): 

753 doc = ''' 

754 This is a required field. 

755 

756 Ideally you will want to compute this value with checksums from 

757 previous uploaded parts, using the algorithm described in 

758 `Glacier documentation <http://docs.aws.amazon.com/amazonglacier/latest/dev/checksum-calculations.html>`_. 

759 

760 But if you prefer, you can also use botocore.utils.calculate_tree_hash() 

761 to compute it from raw file by:: 

762 

763 checksum = calculate_tree_hash(open('your_file.txt', 'rb')) 

764 

765 ''' 

766 return AppendParamDocumentation('checksum', doc).append_documentation 

767 

768 

769def document_cloudformation_get_template_return_type( 

770 section, event_name, **kwargs 

771): 

772 if 'response-params' in event_name: 

773 template_body_section = section.get_section('TemplateBody') 

774 type_section = template_body_section.get_section('param-type') 

775 type_section.clear_text() 

776 type_section.write('(*dict*) --') 

777 elif 'response-example' in event_name: 

778 parent = section.get_section('structure-value') 

779 param_line = parent.get_section('TemplateBody') 

780 value_portion = param_line.get_section('member-value') 

781 value_portion.clear_text() 

782 value_portion.write('{}') 

783 

784 

785def switch_host_machinelearning(request, **kwargs): 

786 switch_host_with_param(request, 'PredictEndpoint') 

787 

788 

789def check_openssl_supports_tls_version_1_2(**kwargs): 

790 import ssl 

791 

792 try: 

793 openssl_version_tuple = ssl.OPENSSL_VERSION_INFO 

794 if openssl_version_tuple < (1, 0, 1): 

795 warnings.warn( 

796 f'Currently installed openssl version: {ssl.OPENSSL_VERSION} does not ' 

797 'support TLS 1.2, which is required for use of iot-data. ' 

798 'Please use python installed with openssl version 1.0.1 or ' 

799 'higher.', 

800 UnsupportedTLSVersionWarning, 

801 ) 

802 # We cannot check the openssl version on python2.6, so we should just 

803 # pass on this conveniency check. 

804 except AttributeError: 

805 pass 

806 

807 

808def change_get_to_post(request, **kwargs): 

809 # This is useful when we need to change a potentially large GET request 

810 # into a POST with x-www-form-urlencoded encoding. 

811 if request.method == 'GET' and '?' in request.url: 

812 request.headers['Content-Type'] = 'application/x-www-form-urlencoded' 

813 request.method = 'POST' 

814 request.url, request.data = request.url.split('?', 1) 

815 

816 

817def set_list_objects_encoding_type_url(params, context, **kwargs): 

818 if 'EncodingType' not in params: 

819 # We set this context so that we know it wasn't the customer that 

820 # requested the encoding. 

821 context['encoding_type_auto_set'] = True 

822 params['EncodingType'] = 'url' 

823 

824 

825def decode_list_object(parsed, context, **kwargs): 

826 # This is needed because we are passing url as the encoding type. Since the 

827 # paginator is based on the key, we need to handle it before it can be 

828 # round tripped. 

829 # 

830 # From the documentation: If you specify encoding-type request parameter, 

831 # Amazon S3 includes this element in the response, and returns encoded key 

832 # name values in the following response elements: 

833 # Delimiter, Marker, Prefix, NextMarker, Key. 

834 _decode_list_object( 

835 top_level_keys=['Delimiter', 'Marker', 'NextMarker'], 

836 nested_keys=[('Contents', 'Key'), ('CommonPrefixes', 'Prefix')], 

837 parsed=parsed, 

838 context=context, 

839 ) 

840 

841 

842def decode_list_object_v2(parsed, context, **kwargs): 

843 # From the documentation: If you specify encoding-type request parameter, 

844 # Amazon S3 includes this element in the response, and returns encoded key 

845 # name values in the following response elements: 

846 # Delimiter, Prefix, ContinuationToken, Key, and StartAfter. 

847 _decode_list_object( 

848 top_level_keys=['Delimiter', 'Prefix', 'StartAfter'], 

849 nested_keys=[('Contents', 'Key'), ('CommonPrefixes', 'Prefix')], 

850 parsed=parsed, 

851 context=context, 

852 ) 

853 

854 

855def decode_list_object_versions(parsed, context, **kwargs): 

856 # From the documentation: If you specify encoding-type request parameter, 

857 # Amazon S3 includes this element in the response, and returns encoded key 

858 # name values in the following response elements: 

859 # KeyMarker, NextKeyMarker, Prefix, Key, and Delimiter. 

860 _decode_list_object( 

861 top_level_keys=[ 

862 'KeyMarker', 

863 'NextKeyMarker', 

864 'Prefix', 

865 'Delimiter', 

866 ], 

867 nested_keys=[ 

868 ('Versions', 'Key'), 

869 ('DeleteMarkers', 'Key'), 

870 ('CommonPrefixes', 'Prefix'), 

871 ], 

872 parsed=parsed, 

873 context=context, 

874 ) 

875 

876 

877def _decode_list_object(top_level_keys, nested_keys, parsed, context): 

878 if parsed.get('EncodingType') == 'url' and context.get( 

879 'encoding_type_auto_set' 

880 ): 

881 # URL decode top-level keys in the response if present. 

882 for key in top_level_keys: 

883 if key in parsed: 

884 parsed[key] = unquote_str(parsed[key]) 

885 # URL decode nested keys from the response if present. 

886 for top_key, child_key in nested_keys: 

887 if top_key in parsed: 

888 for member in parsed[top_key]: 

889 member[child_key] = unquote_str(member[child_key]) 

890 

891 

892def convert_body_to_file_like_object(params, **kwargs): 

893 if 'Body' in params: 

894 if isinstance(params['Body'], str): 

895 params['Body'] = BytesIO(ensure_bytes(params['Body'])) 

896 elif isinstance(params['Body'], bytes): 

897 params['Body'] = BytesIO(params['Body']) 

898 

899 

900def _add_parameter_aliases(handler_list): 

901 # Mapping of original parameter to parameter alias. 

902 # The key is <service>.<operation>.parameter 

903 # The first part of the key is used for event registration. 

904 # The last part is the original parameter name and the value is the 

905 # alias to expose in documentation. 

906 aliases = { 

907 'ec2.*.Filter': 'Filters', 

908 'logs.CreateExportTask.from': 'fromTime', 

909 'cloudsearchdomain.Search.return': 'returnFields', 

910 } 

911 

912 for original, new_name in aliases.items(): 

913 event_portion, original_name = original.rsplit('.', 1) 

914 parameter_alias = ParameterAlias(original_name, new_name) 

915 

916 # Add the handlers to the list of handlers. 

917 # One handler is to handle when users provide the alias. 

918 # The other handler is to update the documentation to show only 

919 # the alias. 

920 parameter_build_event_handler_tuple = ( 

921 'before-parameter-build.' + event_portion, 

922 parameter_alias.alias_parameter_in_call, 

923 REGISTER_FIRST, 

924 ) 

925 docs_event_handler_tuple = ( 

926 'docs.*.' + event_portion + '.complete-section', 

927 parameter_alias.alias_parameter_in_documentation, 

928 ) 

929 handler_list.append(parameter_build_event_handler_tuple) 

930 handler_list.append(docs_event_handler_tuple) 

931 

932 

933class ParameterAlias: 

934 def __init__(self, original_name, alias_name): 

935 self._original_name = original_name 

936 self._alias_name = alias_name 

937 

938 def alias_parameter_in_call(self, params, model, **kwargs): 

939 if model.input_shape: 

940 # Only consider accepting the alias if it is modeled in the 

941 # input shape. 

942 if self._original_name in model.input_shape.members: 

943 if self._alias_name in params: 

944 if self._original_name in params: 

945 raise AliasConflictParameterError( 

946 original=self._original_name, 

947 alias=self._alias_name, 

948 operation=model.name, 

949 ) 

950 # Remove the alias parameter value and use the old name 

951 # instead. 

952 params[self._original_name] = params.pop(self._alias_name) 

953 

954 def alias_parameter_in_documentation(self, event_name, section, **kwargs): 

955 if event_name.startswith('docs.request-params'): 

956 if self._original_name not in section.available_sections: 

957 return 

958 # Replace the name for parameter type 

959 param_section = section.get_section(self._original_name) 

960 param_type_section = param_section.get_section('param-type') 

961 self._replace_content(param_type_section) 

962 

963 # Replace the name for the parameter description 

964 param_name_section = param_section.get_section('param-name') 

965 self._replace_content(param_name_section) 

966 elif event_name.startswith('docs.request-example'): 

967 section = section.get_section('structure-value') 

968 if self._original_name not in section.available_sections: 

969 return 

970 # Replace the name for the example 

971 param_section = section.get_section(self._original_name) 

972 self._replace_content(param_section) 

973 

974 def _replace_content(self, section): 

975 content = section.getvalue().decode('utf-8') 

976 updated_content = content.replace( 

977 self._original_name, self._alias_name 

978 ) 

979 section.clear_text() 

980 section.write(updated_content) 

981 

982 

983class ClientMethodAlias: 

984 def __init__(self, actual_name): 

985 """Aliases a non-extant method to an existing method. 

986 

987 :param actual_name: The name of the method that actually exists on 

988 the client. 

989 """ 

990 self._actual = actual_name 

991 

992 def __call__(self, client, **kwargs): 

993 return getattr(client, self._actual) 

994 

995 

996# TODO: Remove this class as it is no longer used 

997class HeaderToHostHoister: 

998 """Takes a header and moves it to the front of the hoststring.""" 

999 

1000 _VALID_HOSTNAME = re.compile(r'(?!-)[a-z\d-]{1,63}(?<!-)$', re.IGNORECASE) 

1001 

1002 def __init__(self, header_name): 

1003 self._header_name = header_name 

1004 

1005 def hoist(self, params, **kwargs): 

1006 """Hoist a header to the hostname. 

1007 

1008 Hoist a header to the beginning of the hostname with a suffix "." after 

1009 it. The original header should be removed from the header map. This 

1010 method is intended to be used as a target for the before-call event. 

1011 """ 

1012 if self._header_name not in params['headers']: 

1013 return 

1014 header_value = params['headers'][self._header_name] 

1015 self._ensure_header_is_valid_host(header_value) 

1016 original_url = params['url'] 

1017 new_url = self._prepend_to_host(original_url, header_value) 

1018 params['url'] = new_url 

1019 

1020 def _ensure_header_is_valid_host(self, header): 

1021 match = self._VALID_HOSTNAME.match(header) 

1022 if not match: 

1023 raise ParamValidationError( 

1024 report=( 

1025 'Hostnames must contain only - and alphanumeric characters, ' 

1026 'and between 1 and 63 characters long.' 

1027 ) 

1028 ) 

1029 

1030 def _prepend_to_host(self, url, prefix): 

1031 url_components = urlsplit(url) 

1032 parts = url_components.netloc.split('.') 

1033 parts = [prefix] + parts 

1034 new_netloc = '.'.join(parts) 

1035 new_components = ( 

1036 url_components.scheme, 

1037 new_netloc, 

1038 url_components.path, 

1039 url_components.query, 

1040 '', 

1041 ) 

1042 new_url = urlunsplit(new_components) 

1043 return new_url 

1044 

1045 

1046def inject_api_version_header_if_needed(model, params, **kwargs): 

1047 if not model.is_endpoint_discovery_operation: 

1048 return 

1049 params['headers']['x-amz-api-version'] = model.service_model.api_version 

1050 

1051 

1052def remove_lex_v2_start_conversation(class_attributes, **kwargs): 

1053 """Operation requires h2 which is currently unsupported in Python""" 

1054 if 'start_conversation' in class_attributes: 

1055 del class_attributes['start_conversation'] 

1056 

1057 

1058def remove_qbusiness_chat(class_attributes, **kwargs): 

1059 """Operation requires h2 which is currently unsupported in Python""" 

1060 if 'chat' in class_attributes: 

1061 del class_attributes['chat'] 

1062 

1063 

1064def remove_bedrock_runtime_invoke_model_with_bidirectional_stream( 

1065 class_attributes, **kwargs 

1066): 

1067 """Operation requires h2 which is currently unsupported in Python""" 

1068 if 'invoke_model_with_bidirectional_stream' in class_attributes: 

1069 del class_attributes['invoke_model_with_bidirectional_stream'] 

1070 

1071 

1072def add_retry_headers(request, **kwargs): 

1073 retries_context = request.context.get('retries') 

1074 if not retries_context: 

1075 return 

1076 headers = request.headers 

1077 headers['amz-sdk-invocation-id'] = retries_context['invocation-id'] 

1078 sdk_retry_keys = ('ttl', 'attempt', 'max') 

1079 sdk_request_headers = [ 

1080 f'{key}={retries_context[key]}' 

1081 for key in sdk_retry_keys 

1082 if key in retries_context 

1083 ] 

1084 headers['amz-sdk-request'] = '; '.join(sdk_request_headers) 

1085 

1086 

1087def remove_bucket_from_url_paths_from_model(params, model, context, **kwargs): 

1088 """Strips leading `{Bucket}/` from any operations that have it. 

1089 

1090 The original value is retained in a separate "authPath" field. This is 

1091 used in the HmacV1Auth signer. See HmacV1Auth.canonical_resource in 

1092 botocore/auth.py for details. 

1093 

1094 This change is applied to the operation model during the first time the 

1095 operation is invoked and then stays in effect for the lifetime of the 

1096 client object. 

1097 

1098 When the ruleset based endpoint resolver is in effect, both the endpoint 

1099 ruleset AND the service model place the bucket name in the final URL. 

1100 The result is an invalid URL. This handler modifies the operation model to 

1101 no longer place the bucket name. Previous versions of botocore fixed the 

1102 URL after the fact when necessary. Since the introduction of ruleset based 

1103 endpoint resolution, the problem exists in ALL URLs that contain a bucket 

1104 name and can therefore be addressed before the URL gets assembled. 

1105 """ 

1106 req_uri = model.http['requestUri'] 

1107 bucket_path = '/{Bucket}' 

1108 if req_uri.startswith(bucket_path): 

1109 model.http['requestUri'] = req_uri[len(bucket_path) :] 

1110 # Strip query off the requestUri before using as authPath. The 

1111 # HmacV1Auth signer will append query params to the authPath during 

1112 # signing. 

1113 req_uri = req_uri.split('?')[0] 

1114 # If the request URI is ONLY a bucket, the auth_path must be 

1115 # terminated with a '/' character to generate a signature that the 

1116 # server will accept. 

1117 needs_slash = req_uri == bucket_path 

1118 model.http['authPath'] = f'{req_uri}/' if needs_slash else req_uri 

1119 

1120 

1121def remove_accid_host_prefix_from_model(params, model, context, **kwargs): 

1122 """Removes the `{AccountId}.` prefix from the operation model. 

1123 

1124 This change is applied to the operation model during the first time the 

1125 operation is invoked and then stays in effect for the lifetime of the 

1126 client object. 

1127 

1128 When the ruleset based endpoint resolver is in effect, both the endpoint 

1129 ruleset AND the service model place the {AccountId}. prefix in the URL. 

1130 The result is an invalid endpoint. This handler modifies the operation 

1131 model to remove the `endpoint.hostPrefix` field while leaving the 

1132 `RequiresAccountId` static context parameter in place. 

1133 """ 

1134 has_ctx_param = any( 

1135 ctx_param.name == 'RequiresAccountId' and ctx_param.value is True 

1136 for ctx_param in model.static_context_parameters 

1137 ) 

1138 if ( 

1139 model.endpoint is not None 

1140 and model.endpoint.get('hostPrefix') == '{AccountId}.' 

1141 and has_ctx_param 

1142 ): 

1143 del model.endpoint['hostPrefix'] 

1144 

1145 

1146def remove_arn_from_signing_path(request, **kwargs): 

1147 auth_path = request.auth_path 

1148 if isinstance(auth_path, str) and auth_path.startswith('/arn%3A'): 

1149 auth_path_parts = auth_path.split('/') 

1150 if len(auth_path_parts) > 1 and ArnParser.is_arn( 

1151 unquote(auth_path_parts[1]) 

1152 ): 

1153 request.auth_path = '/'.join(['', *auth_path_parts[2:]]) 

1154 

1155 

1156def customize_endpoint_resolver_builtins( 

1157 builtins, model, params, context, **kwargs 

1158): 

1159 """Modify builtin parameter values for endpoint resolver 

1160 

1161 Modifies the builtins dict in place. Changes are in effect for one call. 

1162 The corresponding event is emitted only if at least one builtin parameter 

1163 value is required for endpoint resolution for the operation. 

1164 """ 

1165 bucket_name = params.get('Bucket') 

1166 bucket_is_arn = bucket_name is not None and ArnParser.is_arn(bucket_name) 

1167 # In some situations the host will return AuthorizationHeaderMalformed 

1168 # when the signing region of a sigv4 request is not the bucket's 

1169 # region (which is likely unknown by the user of GetBucketLocation). 

1170 # Avoid this by always using path-style addressing. 

1171 if model.name == 'GetBucketLocation': 

1172 builtins[EndpointResolverBuiltins.AWS_S3_FORCE_PATH_STYLE] = True 

1173 # All situations where the bucket name is an ARN are not compatible 

1174 # with path style addressing. 

1175 elif bucket_is_arn: 

1176 builtins[EndpointResolverBuiltins.AWS_S3_FORCE_PATH_STYLE] = False 

1177 

1178 # Bucket names that are invalid host labels require path-style addressing. 

1179 # If path-style addressing was specifically requested, the default builtin 

1180 # value is already set. 

1181 path_style_required = ( 

1182 bucket_name is not None and not VALID_HOST_LABEL_RE.match(bucket_name) 

1183 ) 

1184 path_style_requested = builtins[ 

1185 EndpointResolverBuiltins.AWS_S3_FORCE_PATH_STYLE 

1186 ] 

1187 

1188 # Path-style addressing is incompatible with the global endpoint for 

1189 # presigned URLs. If the bucket name is an ARN, the ARN's region should be 

1190 # used in the endpoint. 

1191 if ( 

1192 context.get('use_global_endpoint') 

1193 and not path_style_required 

1194 and not path_style_requested 

1195 and not bucket_is_arn 

1196 and not utils.is_s3express_bucket(bucket_name) 

1197 ): 

1198 builtins[EndpointResolverBuiltins.AWS_REGION] = 'aws-global' 

1199 builtins[EndpointResolverBuiltins.AWS_S3_USE_GLOBAL_ENDPOINT] = True 

1200 

1201 

1202def remove_content_type_header_for_presigning(request, **kwargs): 

1203 if ( 

1204 request.context.get('is_presign_request') is True 

1205 and 'Content-Type' in request.headers 

1206 ): 

1207 del request.headers['Content-Type'] 

1208 

1209 

1210def handle_expires_header( 

1211 operation_model, response_dict, customized_response_dict, **kwargs 

1212): 

1213 if _has_expires_shape(operation_model.output_shape): 

1214 if expires_value := response_dict.get('headers', {}).get('Expires'): 

1215 customized_response_dict['ExpiresString'] = expires_value 

1216 try: 

1217 utils.parse_timestamp(expires_value) 

1218 except (ValueError, RuntimeError): 

1219 logger.warning( 

1220 'Failed to parse the "Expires" member as a timestamp: %s. ' 

1221 'The unparsed value is available in the response under "ExpiresString".', 

1222 expires_value, 

1223 ) 

1224 del response_dict['headers']['Expires'] 

1225 

1226 

1227def _has_expires_shape(shape): 

1228 if not shape: 

1229 return False 

1230 return any( 

1231 member_shape.name == 'Expires' 

1232 and member_shape.serialization.get('name') == 'Expires' 

1233 for member_shape in shape.members.values() 

1234 ) 

1235 

1236 

1237def document_expires_shape(section, event_name, **kwargs): 

1238 # Updates the documentation for S3 operations that include the 'Expires' member 

1239 # in their response structure. Documents a synthetic member 'ExpiresString' and 

1240 # includes a deprecation notice for 'Expires'. 

1241 if 'response-example' in event_name: 

1242 if not section.has_section('structure-value'): 

1243 return 

1244 parent = section.get_section('structure-value') 

1245 if not parent.has_section('Expires'): 

1246 return 

1247 param_line = parent.get_section('Expires') 

1248 param_line.add_new_section('ExpiresString') 

1249 new_param_line = param_line.get_section('ExpiresString') 

1250 new_param_line.write("'ExpiresString': 'string',") 

1251 new_param_line.style.new_line() 

1252 elif 'response-params' in event_name: 

1253 if not section.has_section('Expires'): 

1254 return 

1255 param_section = section.get_section('Expires') 

1256 # Add a deprecation notice for the "Expires" param 

1257 doc_section = param_section.get_section('param-documentation') 

1258 doc_section.style.start_note() 

1259 doc_section.write( 

1260 'This member has been deprecated. Please use ``ExpiresString`` instead.' 

1261 ) 

1262 doc_section.style.end_note() 

1263 # Document the "ExpiresString" param 

1264 new_param_section = param_section.add_new_section('ExpiresString') 

1265 new_param_section.style.new_paragraph() 

1266 new_param_section.write('- **ExpiresString** *(string) --*') 

1267 new_param_section.style.indent() 

1268 new_param_section.style.new_paragraph() 

1269 new_param_section.write( 

1270 'The raw, unparsed value of the ``Expires`` field.' 

1271 ) 

1272 

1273 

1274def _handle_200_error(operation_model, response_dict, **kwargs): 

1275 # S3 can return a 200 response with an error embedded in the body. 

1276 # Convert the 200 to a 500 for retry resolution in ``_update_status_code``. 

1277 if not _should_handle_200_error(operation_model, response_dict): 

1278 # Operations with streaming response blobs are excluded as they 

1279 # can't be reliably distinguished from an S3 error. 

1280 return 

1281 if _looks_like_special_case_error( 

1282 response_dict['status_code'], response_dict['body'] 

1283 ): 

1284 response_dict['status_code'] = 500 

1285 logger.debug( 

1286 "Error found for response with 200 status code: %s.", 

1287 response_dict['body'], 

1288 ) 

1289 

1290 

1291def _should_handle_200_error(operation_model, response_dict): 

1292 output_shape = operation_model.output_shape 

1293 if ( 

1294 not response_dict 

1295 or operation_model.has_event_stream_output 

1296 or not output_shape 

1297 ): 

1298 return False 

1299 payload = output_shape.serialization.get('payload') 

1300 if payload is not None: 

1301 payload_shape = output_shape.members[payload] 

1302 if payload_shape.type_name in ('blob', 'string'): 

1303 return False 

1304 return True 

1305 

1306 

1307def _update_status_code(response, **kwargs): 

1308 # Update the http_response status code when the parsed response has been 

1309 # modified in a handler. This enables retries for cases like ``_handle_200_error``. 

1310 if response is None: 

1311 return 

1312 http_response, parsed = response 

1313 parsed_status_code = parsed.get('ResponseMetadata', {}).get( 

1314 'HTTPStatusCode', http_response.status_code 

1315 ) 

1316 if http_response.status_code != parsed_status_code: 

1317 http_response.status_code = parsed_status_code 

1318 

1319 

1320def _handle_request_validation_mode_member(params, model, **kwargs): 

1321 client_config = kwargs.get("context", {}).get("client_config") 

1322 if client_config is None: 

1323 return 

1324 response_checksum_validation = client_config.response_checksum_validation 

1325 http_checksum = model.http_checksum 

1326 mode_member = http_checksum.get("requestValidationModeMember") 

1327 if ( 

1328 mode_member is not None 

1329 and response_checksum_validation == "when_supported" 

1330 ): 

1331 params.setdefault(mode_member, "ENABLED") 

1332 

1333 

1334def _set_extra_headers_for_unsigned_request( 

1335 request, signature_version, **kwargs 

1336): 

1337 # When sending a checksum in the trailer of an unsigned chunked request, S3 

1338 # requires us to set the "X-Amz-Content-SHA256" header to "STREAMING-UNSIGNED-PAYLOAD-TRAILER". 

1339 checksum_context = request.context.get("checksum", {}) 

1340 algorithm = checksum_context.get("request_algorithm", {}) 

1341 in_trailer = algorithm.get("in") == "trailer" 

1342 headers = request.headers 

1343 if signature_version == botocore.UNSIGNED and in_trailer: 

1344 headers["X-Amz-Content-SHA256"] = "STREAMING-UNSIGNED-PAYLOAD-TRAILER" 

1345 

1346 

1347def _set_auth_scheme_preference_signer(context, signing_name, **kwargs): 

1348 """ 

1349 Determines the appropriate signer to use based on the client configuration, 

1350 authentication scheme preferences, and the availability of a bearer token. 

1351 """ 

1352 client_config = context.get('client_config') 

1353 if client_config is None: 

1354 return 

1355 

1356 signature_version = client_config.signature_version 

1357 auth_scheme_preference = client_config.auth_scheme_preference 

1358 auth_options = context.get('auth_options') 

1359 

1360 signature_version_set_in_code = ( 

1361 isinstance(signature_version, ClientConfigString) 

1362 or signature_version is botocore.UNSIGNED 

1363 ) 

1364 auth_preference_set_in_code = isinstance( 

1365 auth_scheme_preference, ClientConfigString 

1366 ) 

1367 has_in_code_configuration = ( 

1368 signature_version_set_in_code or auth_preference_set_in_code 

1369 ) 

1370 

1371 resolved_signature_version = signature_version 

1372 

1373 # If signature version was not set in code, but an auth scheme preference 

1374 # is available, resolve it based on the preferred schemes and supported auth 

1375 # options for this service. 

1376 if ( 

1377 not signature_version_set_in_code 

1378 and auth_scheme_preference 

1379 and auth_options 

1380 ): 

1381 preferred_schemes = auth_scheme_preference.split(',') 

1382 resolved = botocore.auth.resolve_auth_scheme_preference( 

1383 preferred_schemes, auth_options 

1384 ) 

1385 resolved_signature_version = ( 

1386 botocore.UNSIGNED if resolved == 'none' else resolved 

1387 ) 

1388 

1389 # Prefer 'bearer' signature version if a bearer token is available, and it 

1390 # is allowed for this service. This can override earlier resolution if the 

1391 # config object didn't explicitly set a signature version. 

1392 if _should_prefer_bearer_auth( 

1393 has_in_code_configuration, 

1394 signing_name, 

1395 resolved_signature_version, 

1396 auth_options, 

1397 ): 

1398 register_feature_id('BEARER_SERVICE_ENV_VARS') 

1399 resolved_signature_version = 'bearer' 

1400 

1401 if resolved_signature_version == signature_version: 

1402 return None 

1403 return resolved_signature_version 

1404 

1405 

1406def _should_prefer_bearer_auth( 

1407 has_in_code_configuration, 

1408 signing_name, 

1409 resolved_signature_version, 

1410 auth_options, 

1411): 

1412 if signing_name not in get_bearer_auth_supported_services(): 

1413 return False 

1414 

1415 if not auth_options or 'smithy.api#httpBearerAuth' not in auth_options: 

1416 return False 

1417 

1418 has_token = get_token_from_environment(signing_name) is not None 

1419 

1420 # Prefer 'bearer' if a bearer token is available, and either: 

1421 # Bearer was already resolved, or 

1422 # No auth-related values were explicitly set in code 

1423 return has_token and ( 

1424 resolved_signature_version == 'bearer' or not has_in_code_configuration 

1425 ) 

1426 

1427 

1428def get_bearer_auth_supported_services(): 

1429 """ 

1430 Returns a set of services that support bearer token authentication. 

1431 These values correspond to the service's `signingName` property as defined 

1432 in model.py, falling back to `endpointPrefix` if `signingName` is not set. 

1433 

1434 Warning: This is a private interface and is subject to abrupt breaking changes, 

1435 including removal, in any botocore release. It is not intended for external use, 

1436 and its usage outside of botocore is not advised or supported. 

1437 """ 

1438 return {'bedrock'} 

1439 

1440 

1441# This is a list of (event_name, handler). 

1442# When a Session is created, everything in this list will be 

1443# automatically registered with that Session. 

1444 

1445BUILTIN_HANDLERS = [ 

1446 ('choose-service-name', handle_service_name_alias), 

1447 ( 

1448 'getattr.mturk.list_hi_ts_for_qualification_type', 

1449 ClientMethodAlias('list_hits_for_qualification_type'), 

1450 ), 

1451 ( 

1452 'getattr.socialmessaging.delete_whatsapp_media_message', 

1453 ClientMethodAlias('delete_whatsapp_message_media'), 

1454 ), 

1455 ( 

1456 'before-parameter-build.s3.UploadPart', 

1457 convert_body_to_file_like_object, 

1458 REGISTER_LAST, 

1459 ), 

1460 ( 

1461 'before-parameter-build.s3.PutObject', 

1462 convert_body_to_file_like_object, 

1463 REGISTER_LAST, 

1464 ), 

1465 ('creating-client-class', add_generate_presigned_url), 

1466 ('creating-client-class.s3', add_generate_presigned_post), 

1467 ('creating-client-class.iot-data', check_openssl_supports_tls_version_1_2), 

1468 ('creating-client-class.lex-runtime-v2', remove_lex_v2_start_conversation), 

1469 ('creating-client-class.qbusiness', remove_qbusiness_chat), 

1470 ( 

1471 'creating-client-class.bedrock-runtime', 

1472 remove_bedrock_runtime_invoke_model_with_bidirectional_stream, 

1473 ), 

1474 ('after-call.iam', json_decode_policies), 

1475 ('after-call.ec2.GetConsoleOutput', decode_console_output), 

1476 ('after-call.cloudformation.GetTemplate', json_decode_template_body), 

1477 ('after-call.s3.GetBucketLocation', parse_get_bucket_location), 

1478 ( 

1479 'after-call.sqs.*', 

1480 _handle_sqs_compatible_error, 

1481 ), 

1482 ('before-parse.s3.*', handle_expires_header), 

1483 ('before-parse.s3.*', _handle_200_error, REGISTER_FIRST), 

1484 ('before-parameter-build', generate_idempotent_uuid), 

1485 ('before-parameter-build', _handle_request_validation_mode_member), 

1486 ('before-parameter-build.s3', validate_bucket_name), 

1487 ('before-parameter-build.s3', remove_bucket_from_url_paths_from_model), 

1488 ( 

1489 'before-parameter-build.s3.ListObjects', 

1490 set_list_objects_encoding_type_url, 

1491 ), 

1492 ( 

1493 'before-parameter-build.s3.ListObjectsV2', 

1494 set_list_objects_encoding_type_url, 

1495 ), 

1496 ( 

1497 'before-parameter-build.s3.ListObjectVersions', 

1498 set_list_objects_encoding_type_url, 

1499 ), 

1500 ('before-parameter-build.s3.CopyObject', handle_copy_source_param), 

1501 ('before-parameter-build.s3.UploadPartCopy', handle_copy_source_param), 

1502 ('before-parameter-build.s3.CopyObject', validate_ascii_metadata), 

1503 ('before-parameter-build.s3.PutObject', validate_ascii_metadata), 

1504 ( 

1505 'before-parameter-build.s3.CreateMultipartUpload', 

1506 validate_ascii_metadata, 

1507 ), 

1508 ('before-parameter-build.s3-control', remove_accid_host_prefix_from_model), 

1509 ('docs.*.s3.CopyObject.complete-section', document_copy_source_form), 

1510 ('docs.*.s3.UploadPartCopy.complete-section', document_copy_source_form), 

1511 ('docs.response-example.s3.*.complete-section', document_expires_shape), 

1512 ('docs.response-params.s3.*.complete-section', document_expires_shape), 

1513 ('before-endpoint-resolution.s3', customize_endpoint_resolver_builtins), 

1514 ('before-call', add_recursion_detection_header), 

1515 ('before-call.s3', add_expect_header), 

1516 ('before-call.glacier', add_glacier_version), 

1517 ('before-call.apigateway', add_accept_header), 

1518 ('before-call.s3.DeleteObjects', escape_xml_payload), 

1519 ('before-call.s3.PutBucketLifecycleConfiguration', escape_xml_payload), 

1520 ('before-call.glacier.UploadArchive', add_glacier_checksums), 

1521 ('before-call.glacier.UploadMultipartPart', add_glacier_checksums), 

1522 ('before-call.ec2.CopySnapshot', inject_presigned_url_ec2), 

1523 ('request-created', add_retry_headers), 

1524 ('request-created.machinelearning.Predict', switch_host_machinelearning), 

1525 ('needs-retry.s3.*', _update_status_code, REGISTER_FIRST), 

1526 ('choose-signer.cognito-identity.GetId', disable_signing), 

1527 ('choose-signer.cognito-identity.GetOpenIdToken', disable_signing), 

1528 ('choose-signer.cognito-identity.UnlinkIdentity', disable_signing), 

1529 ( 

1530 'choose-signer.cognito-identity.GetCredentialsForIdentity', 

1531 disable_signing, 

1532 ), 

1533 ('choose-signer.sts.AssumeRoleWithSAML', disable_signing), 

1534 ('choose-signer.sts.AssumeRoleWithWebIdentity', disable_signing), 

1535 ('choose-signer', set_operation_specific_signer), 

1536 ('choose-signer', _set_auth_scheme_preference_signer), 

1537 ('before-parameter-build.s3.HeadObject', sse_md5), 

1538 ('before-parameter-build.s3.GetObject', sse_md5), 

1539 ('before-parameter-build.s3.PutObject', sse_md5), 

1540 ('before-parameter-build.s3.CopyObject', sse_md5), 

1541 ('before-parameter-build.s3.CopyObject', copy_source_sse_md5), 

1542 ('before-parameter-build.s3.CreateMultipartUpload', sse_md5), 

1543 ('before-parameter-build.s3.UploadPart', sse_md5), 

1544 ('before-parameter-build.s3.UploadPartCopy', sse_md5), 

1545 ('before-parameter-build.s3.UploadPartCopy', copy_source_sse_md5), 

1546 ('before-parameter-build.s3.CompleteMultipartUpload', sse_md5), 

1547 ('before-parameter-build.s3.SelectObjectContent', sse_md5), 

1548 ('before-parameter-build.ec2.RunInstances', base64_encode_user_data), 

1549 ( 

1550 'before-parameter-build.autoscaling.CreateLaunchConfiguration', 

1551 base64_encode_user_data, 

1552 ), 

1553 ('before-parameter-build.route53', fix_route53_ids), 

1554 ('before-parameter-build.glacier', inject_account_id), 

1555 ('before-sign.s3', remove_arn_from_signing_path), 

1556 ('before-sign.s3', _set_extra_headers_for_unsigned_request), 

1557 ( 

1558 'before-sign.polly.SynthesizeSpeech', 

1559 remove_content_type_header_for_presigning, 

1560 ), 

1561 ('after-call.s3.ListObjects', decode_list_object), 

1562 ('after-call.s3.ListObjectsV2', decode_list_object_v2), 

1563 ('after-call.s3.ListObjectVersions', decode_list_object_versions), 

1564 # Cloudsearchdomain search operation will be sent by HTTP POST 

1565 ('request-created.cloudsearchdomain.Search', change_get_to_post), 

1566 # Glacier documentation customizations 

1567 ( 

1568 'docs.*.glacier.*.complete-section', 

1569 AutoPopulatedParam( 

1570 'accountId', 

1571 'Note: this parameter is set to "-" by' 

1572 'default if no value is not specified.', 

1573 ).document_auto_populated_param, 

1574 ), 

1575 ( 

1576 'docs.*.glacier.UploadArchive.complete-section', 

1577 AutoPopulatedParam('checksum').document_auto_populated_param, 

1578 ), 

1579 ( 

1580 'docs.*.glacier.UploadMultipartPart.complete-section', 

1581 AutoPopulatedParam('checksum').document_auto_populated_param, 

1582 ), 

1583 ( 

1584 'docs.request-params.glacier.CompleteMultipartUpload.complete-section', 

1585 document_glacier_tree_hash_checksum(), 

1586 ), 

1587 # Cloudformation documentation customizations 

1588 ( 

1589 'docs.*.cloudformation.GetTemplate.complete-section', 

1590 document_cloudformation_get_template_return_type, 

1591 ), 

1592 # UserData base64 encoding documentation customizations 

1593 ( 

1594 'docs.*.ec2.RunInstances.complete-section', 

1595 document_base64_encoding('UserData'), 

1596 ), 

1597 ( 

1598 'docs.*.autoscaling.CreateLaunchConfiguration.complete-section', 

1599 document_base64_encoding('UserData'), 

1600 ), 

1601 # EC2 CopySnapshot documentation customizations 

1602 ( 

1603 'docs.*.ec2.CopySnapshot.complete-section', 

1604 AutoPopulatedParam('PresignedUrl').document_auto_populated_param, 

1605 ), 

1606 ( 

1607 'docs.*.ec2.CopySnapshot.complete-section', 

1608 AutoPopulatedParam('DestinationRegion').document_auto_populated_param, 

1609 ), 

1610 # S3 SSE documentation modifications 

1611 ( 

1612 'docs.*.s3.*.complete-section', 

1613 AutoPopulatedParam('SSECustomerKeyMD5').document_auto_populated_param, 

1614 ), 

1615 # S3 SSE Copy Source documentation modifications 

1616 ( 

1617 'docs.*.s3.*.complete-section', 

1618 AutoPopulatedParam( 

1619 'CopySourceSSECustomerKeyMD5' 

1620 ).document_auto_populated_param, 

1621 ), 

1622 # Add base64 information to Lambda 

1623 ( 

1624 'docs.*.lambda.UpdateFunctionCode.complete-section', 

1625 document_base64_encoding('ZipFile'), 

1626 ), 

1627 # The following S3 operations cannot actually accept a ContentMD5 

1628 ( 

1629 'docs.*.s3.*.complete-section', 

1630 HideParamFromOperations( 

1631 's3', 

1632 'ContentMD5', 

1633 [ 

1634 'DeleteObjects', 

1635 'PutBucketAcl', 

1636 'PutBucketCors', 

1637 'PutBucketLifecycle', 

1638 'PutBucketLogging', 

1639 'PutBucketNotification', 

1640 'PutBucketPolicy', 

1641 'PutBucketReplication', 

1642 'PutBucketRequestPayment', 

1643 'PutBucketTagging', 

1644 'PutBucketVersioning', 

1645 'PutBucketWebsite', 

1646 'PutObjectAcl', 

1647 ], 

1648 ).hide_param, 

1649 ), 

1650 ############# 

1651 # DSQL 

1652 ############# 

1653 ('creating-client-class.dsql', add_dsql_generate_db_auth_token_methods), 

1654 ############# 

1655 # RDS 

1656 ############# 

1657 ('creating-client-class.rds', add_generate_db_auth_token), 

1658 ('before-call.rds.CopyDBClusterSnapshot', inject_presigned_url_rds), 

1659 ('before-call.rds.CreateDBCluster', inject_presigned_url_rds), 

1660 ('before-call.rds.CopyDBSnapshot', inject_presigned_url_rds), 

1661 ('before-call.rds.CreateDBInstanceReadReplica', inject_presigned_url_rds), 

1662 ( 

1663 'before-call.rds.StartDBInstanceAutomatedBackupsReplication', 

1664 inject_presigned_url_rds, 

1665 ), 

1666 # RDS PresignedUrl documentation customizations 

1667 ( 

1668 'docs.*.rds.CopyDBClusterSnapshot.complete-section', 

1669 AutoPopulatedParam('PreSignedUrl').document_auto_populated_param, 

1670 ), 

1671 ( 

1672 'docs.*.rds.CreateDBCluster.complete-section', 

1673 AutoPopulatedParam('PreSignedUrl').document_auto_populated_param, 

1674 ), 

1675 ( 

1676 'docs.*.rds.CopyDBSnapshot.complete-section', 

1677 AutoPopulatedParam('PreSignedUrl').document_auto_populated_param, 

1678 ), 

1679 ( 

1680 'docs.*.rds.CreateDBInstanceReadReplica.complete-section', 

1681 AutoPopulatedParam('PreSignedUrl').document_auto_populated_param, 

1682 ), 

1683 ( 

1684 'docs.*.rds.StartDBInstanceAutomatedBackupsReplication.complete-section', 

1685 AutoPopulatedParam('PreSignedUrl').document_auto_populated_param, 

1686 ), 

1687 ############# 

1688 # Neptune 

1689 ############# 

1690 ('before-call.neptune.CopyDBClusterSnapshot', inject_presigned_url_rds), 

1691 ('before-call.neptune.CreateDBCluster', inject_presigned_url_rds), 

1692 # Neptune PresignedUrl documentation customizations 

1693 ( 

1694 'docs.*.neptune.CopyDBClusterSnapshot.complete-section', 

1695 AutoPopulatedParam('PreSignedUrl').document_auto_populated_param, 

1696 ), 

1697 ( 

1698 'docs.*.neptune.CreateDBCluster.complete-section', 

1699 AutoPopulatedParam('PreSignedUrl').document_auto_populated_param, 

1700 ), 

1701 ############# 

1702 # DocDB 

1703 ############# 

1704 ('before-call.docdb.CopyDBClusterSnapshot', inject_presigned_url_rds), 

1705 ('before-call.docdb.CreateDBCluster', inject_presigned_url_rds), 

1706 # DocDB PresignedUrl documentation customizations 

1707 ( 

1708 'docs.*.docdb.CopyDBClusterSnapshot.complete-section', 

1709 AutoPopulatedParam('PreSignedUrl').document_auto_populated_param, 

1710 ), 

1711 ( 

1712 'docs.*.docdb.CreateDBCluster.complete-section', 

1713 AutoPopulatedParam('PreSignedUrl').document_auto_populated_param, 

1714 ), 

1715 ('before-call', inject_api_version_header_if_needed), 

1716] 

1717_add_parameter_aliases(BUILTIN_HANDLERS)