1# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2#
3# Licensed under the Apache License, Version 2.0 (the "License"). You
4# may not use this file except in compliance with the License. A copy of
5# the License is located at
6#
7# http://aws.amazon.com/apache2.0/
8#
9# or in the "license" file accompanying this file. This file is
10# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11# ANY KIND, either express or implied. See the License for the specific
12# language governing permissions and limitations under the License.
13
14"""Builtin event handlers.
15
16This module contains builtin handlers for events emitted by botocore.
17"""
18
19import base64
20import copy
21import logging
22import os
23import re
24import uuid
25import warnings
26from io import BytesIO
27
28import botocore
29import botocore.auth
30from botocore import utils
31from botocore.compat import (
32 ETree,
33 OrderedDict,
34 XMLParseError,
35 ensure_bytes,
36 get_md5,
37 json,
38 quote,
39 unquote,
40 unquote_str,
41 urlsplit,
42 urlunsplit,
43)
44from botocore.docs.utils import (
45 AppendParamDocumentation,
46 AutoPopulatedParam,
47 HideParamFromOperations,
48)
49from botocore.endpoint_provider import VALID_HOST_LABEL_RE
50from botocore.exceptions import (
51 AliasConflictParameterError,
52 ParamValidationError,
53 UnsupportedTLSVersionWarning,
54)
55from botocore.regions import EndpointResolverBuiltins
56from botocore.signers import (
57 add_generate_db_auth_token,
58 add_generate_presigned_post,
59 add_generate_presigned_url,
60)
61from botocore.utils import (
62 SAFE_CHARS,
63 ArnParser,
64 conditionally_calculate_checksum,
65 conditionally_calculate_md5,
66 percent_encode,
67 switch_host_with_param,
68)
69
70# Keep these imported. There's pre-existing code that uses them.
71from botocore import retryhandler # noqa
72from botocore import translate # noqa
73from botocore.compat import MD5_AVAILABLE # noqa
74from botocore.exceptions import MissingServiceIdError # noqa
75from botocore.utils import hyphenize_service_id # noqa
76from botocore.utils import is_global_accesspoint # noqa
77from botocore.utils import SERVICE_NAME_ALIASES # noqa
78
79
80logger = logging.getLogger(__name__)
81
82REGISTER_FIRST = object()
83REGISTER_LAST = object()
84# From the S3 docs:
85# The rules for bucket names in the US Standard region allow bucket names
86# to be as long as 255 characters, and bucket names can contain any
87# combination of uppercase letters, lowercase letters, numbers, periods
88# (.), hyphens (-), and underscores (_).
89VALID_BUCKET = re.compile(r'^[a-zA-Z0-9.\-_]{1,255}$')
90_ACCESSPOINT_ARN = (
91 r'^arn:(aws).*:(s3|s3-object-lambda):[a-z\-0-9]*:[0-9]{12}:accesspoint[/:]'
92 r'[a-zA-Z0-9\-.]{1,63}$'
93)
94_OUTPOST_ARN = (
95 r'^arn:(aws).*:s3-outposts:[a-z\-0-9]+:[0-9]{12}:outpost[/:]'
96 r'[a-zA-Z0-9\-]{1,63}[/:]accesspoint[/:][a-zA-Z0-9\-]{1,63}$'
97)
98VALID_S3_ARN = re.compile('|'.join([_ACCESSPOINT_ARN, _OUTPOST_ARN]))
99# signing names used for the services s3 and s3-control, for example in
100# botocore/data/s3/2006-03-01/endpoints-rule-set-1.json
101S3_SIGNING_NAMES = ('s3', 's3-outposts', 's3-object-lambda', 's3express')
102VERSION_ID_SUFFIX = re.compile(r'\?versionId=[^\s]+$')
103
104
105def handle_service_name_alias(service_name, **kwargs):
106 return SERVICE_NAME_ALIASES.get(service_name, service_name)
107
108
109def add_recursion_detection_header(params, **kwargs):
110 has_lambda_name = 'AWS_LAMBDA_FUNCTION_NAME' in os.environ
111 trace_id = os.environ.get('_X_AMZN_TRACE_ID')
112 if has_lambda_name and trace_id:
113 headers = params['headers']
114 if 'X-Amzn-Trace-Id' not in headers:
115 headers['X-Amzn-Trace-Id'] = quote(trace_id, safe='-=;:+&[]{}"\',')
116
117
118def escape_xml_payload(params, **kwargs):
119 # Replace \r and \n with the escaped sequence over the whole XML document
120 # to avoid linebreak normalization modifying customer input when the
121 # document is parsed. Ideally, we would do this in ElementTree.tostring,
122 # but it doesn't allow us to override entity escaping for text fields. For
123 # this operation \r and \n can only appear in the XML document if they were
124 # passed as part of the customer input.
125 body = params['body']
126 if b'\r' in body:
127 body = body.replace(b'\r', b'
')
128 if b'\n' in body:
129 body = body.replace(b'\n', b'
')
130
131 params['body'] = body
132
133
134def check_for_200_error(response, **kwargs):
135 # From: http://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectCOPY.html
136 # There are two opportunities for a copy request to return an error. One
137 # can occur when Amazon S3 receives the copy request and the other can
138 # occur while Amazon S3 is copying the files. If the error occurs before
139 # the copy operation starts, you receive a standard Amazon S3 error. If the
140 # error occurs during the copy operation, the error response is embedded in
141 # the 200 OK response. This means that a 200 OK response can contain either
142 # a success or an error. Make sure to design your application to parse the
143 # contents of the response and handle it appropriately.
144 #
145 # So this handler checks for this case. Even though the server sends a
146 # 200 response, conceptually this should be handled exactly like a
147 # 500 response (with respect to raising exceptions, retries, etc.)
148 # We're connected *before* all the other retry logic handlers, so as long
149 # as we switch the error code to 500, we'll retry the error as expected.
150 if response is None:
151 # A None response can happen if an exception is raised while
152 # trying to retrieve the response. See Endpoint._get_response().
153 return
154 http_response, parsed = response
155 if _looks_like_special_case_error(http_response):
156 logger.debug(
157 "Error found for response with 200 status code, "
158 "errors: %s, changing status code to "
159 "500.",
160 parsed,
161 )
162 http_response.status_code = 500
163
164
165def _looks_like_special_case_error(http_response):
166 if http_response.status_code == 200:
167 try:
168 parser = ETree.XMLParser(
169 target=ETree.TreeBuilder(), encoding='utf-8'
170 )
171 parser.feed(http_response.content)
172 root = parser.close()
173 except XMLParseError:
174 # In cases of network disruptions, we may end up with a partial
175 # streamed response from S3. We need to treat these cases as
176 # 500 Service Errors and try again.
177 return True
178 if root.tag == 'Error':
179 return True
180 return False
181
182
183def set_operation_specific_signer(context, signing_name, **kwargs):
184 """Choose the operation-specific signer.
185
186 Individual operations may have a different auth type than the service as a
187 whole. This will most often manifest as operations that should not be
188 authenticated at all, but can include other auth modes such as sigv4
189 without body signing.
190 """
191 auth_type = context.get('auth_type')
192
193 # Auth type will be None if the operation doesn't have a configured auth
194 # type.
195 if not auth_type:
196 return
197
198 # Auth type will be the string value 'none' if the operation should not
199 # be signed at all.
200 if auth_type == 'none':
201 return botocore.UNSIGNED
202
203 if auth_type == 'bearer':
204 return 'bearer'
205
206 # If the operation needs an unsigned body, we set additional context
207 # allowing the signer to be aware of this.
208 if context.get('unsigned_payload') or auth_type == 'v4-unsigned-body':
209 context['payload_signing_enabled'] = False
210
211 if auth_type.startswith('v4'):
212 if auth_type == 'v4-s3express':
213 return auth_type
214
215 if auth_type == 'v4a':
216 # If sigv4a is chosen, we must add additional signing config for
217 # global signature.
218 region = _resolve_sigv4a_region(context)
219 signing = {'region': region, 'signing_name': signing_name}
220 if 'signing' in context:
221 context['signing'].update(signing)
222 else:
223 context['signing'] = signing
224 signature_version = 'v4a'
225 else:
226 signature_version = 'v4'
227
228 # Signing names used by s3 and s3-control use customized signers "s3v4"
229 # and "s3v4a".
230 if signing_name in S3_SIGNING_NAMES:
231 signature_version = f's3{signature_version}'
232
233 return signature_version
234
235
236def _resolve_sigv4a_region(context):
237 region = None
238 if 'client_config' in context:
239 region = context['client_config'].sigv4a_signing_region_set
240 if not region and context.get('signing', {}).get('region'):
241 region = context['signing']['region']
242 return region or '*'
243
244
245def decode_console_output(parsed, **kwargs):
246 if 'Output' in parsed:
247 try:
248 # We're using 'replace' for errors because it is
249 # possible that console output contains non string
250 # chars we can't utf-8 decode.
251 value = base64.b64decode(
252 bytes(parsed['Output'], 'latin-1')
253 ).decode('utf-8', 'replace')
254 parsed['Output'] = value
255 except (ValueError, TypeError, AttributeError):
256 logger.debug('Error decoding base64', exc_info=True)
257
258
259def generate_idempotent_uuid(params, model, **kwargs):
260 for name in model.idempotent_members:
261 if name not in params:
262 params[name] = str(uuid.uuid4())
263 logger.debug(
264 f"injecting idempotency token ({params[name]}) into param '{name}'."
265 )
266
267
268def decode_quoted_jsondoc(value):
269 try:
270 value = json.loads(unquote(value))
271 except (ValueError, TypeError):
272 logger.debug('Error loading quoted JSON', exc_info=True)
273 return value
274
275
276def json_decode_template_body(parsed, **kwargs):
277 if 'TemplateBody' in parsed:
278 try:
279 value = json.loads(
280 parsed['TemplateBody'], object_pairs_hook=OrderedDict
281 )
282 parsed['TemplateBody'] = value
283 except (ValueError, TypeError):
284 logger.debug('error loading JSON', exc_info=True)
285
286
287def validate_bucket_name(params, **kwargs):
288 if 'Bucket' not in params:
289 return
290 bucket = params['Bucket']
291 if not VALID_BUCKET.search(bucket) and not VALID_S3_ARN.search(bucket):
292 error_msg = (
293 f'Invalid bucket name "{bucket}": Bucket name must match '
294 f'the regex "{VALID_BUCKET.pattern}" or be an ARN matching '
295 f'the regex "{VALID_S3_ARN.pattern}"'
296 )
297 raise ParamValidationError(report=error_msg)
298
299
300def sse_md5(params, **kwargs):
301 """
302 S3 server-side encryption requires the encryption key to be sent to the
303 server base64 encoded, as well as a base64-encoded MD5 hash of the
304 encryption key. This handler does both if the MD5 has not been set by
305 the caller.
306 """
307 _sse_md5(params, 'SSECustomer')
308
309
310def copy_source_sse_md5(params, **kwargs):
311 """
312 S3 server-side encryption requires the encryption key to be sent to the
313 server base64 encoded, as well as a base64-encoded MD5 hash of the
314 encryption key. This handler does both if the MD5 has not been set by
315 the caller specifically if the parameter is for the copy-source sse-c key.
316 """
317 _sse_md5(params, 'CopySourceSSECustomer')
318
319
320def _sse_md5(params, sse_member_prefix='SSECustomer'):
321 if not _needs_s3_sse_customization(params, sse_member_prefix):
322 return
323
324 sse_key_member = sse_member_prefix + 'Key'
325 sse_md5_member = sse_member_prefix + 'KeyMD5'
326 key_as_bytes = params[sse_key_member]
327 if isinstance(key_as_bytes, str):
328 key_as_bytes = key_as_bytes.encode('utf-8')
329 key_md5_str = base64.b64encode(get_md5(key_as_bytes).digest()).decode(
330 'utf-8'
331 )
332 key_b64_encoded = base64.b64encode(key_as_bytes).decode('utf-8')
333 params[sse_key_member] = key_b64_encoded
334 params[sse_md5_member] = key_md5_str
335
336
337def _needs_s3_sse_customization(params, sse_member_prefix):
338 return (
339 params.get(sse_member_prefix + 'Key') is not None
340 and sse_member_prefix + 'KeyMD5' not in params
341 )
342
343
344def disable_signing(**kwargs):
345 """
346 This handler disables request signing by setting the signer
347 name to a special sentinel value.
348 """
349 return botocore.UNSIGNED
350
351
352def add_expect_header(model, params, **kwargs):
353 if model.http.get('method', '') not in ['PUT', 'POST']:
354 return
355 if 'body' in params:
356 body = params['body']
357 if hasattr(body, 'read'):
358 check_body = utils.ensure_boolean(
359 os.environ.get(
360 'BOTO_EXPERIMENTAL__NO_EMPTY_CONTINUE',
361 False,
362 )
363 )
364 if check_body and utils.determine_content_length(body) == 0:
365 return
366 # Any file like object will use an expect 100-continue
367 # header regardless of size.
368 logger.debug("Adding expect 100 continue header to request.")
369 params['headers']['Expect'] = '100-continue'
370
371
372class DeprecatedServiceDocumenter:
373 def __init__(self, replacement_service_name):
374 self._replacement_service_name = replacement_service_name
375
376 def inject_deprecation_notice(self, section, event_name, **kwargs):
377 section.style.start_important()
378 section.write('This service client is deprecated. Please use ')
379 section.style.ref(
380 self._replacement_service_name,
381 self._replacement_service_name,
382 )
383 section.write(' instead.')
384 section.style.end_important()
385
386
387def document_copy_source_form(section, event_name, **kwargs):
388 if 'request-example' in event_name:
389 parent = section.get_section('structure-value')
390 param_line = parent.get_section('CopySource')
391 value_portion = param_line.get_section('member-value')
392 value_portion.clear_text()
393 value_portion.write(
394 "'string' or {'Bucket': 'string', "
395 "'Key': 'string', 'VersionId': 'string'}"
396 )
397 elif 'request-params' in event_name:
398 param_section = section.get_section('CopySource')
399 type_section = param_section.get_section('param-type')
400 type_section.clear_text()
401 type_section.write(':type CopySource: str or dict')
402 doc_section = param_section.get_section('param-documentation')
403 doc_section.clear_text()
404 doc_section.write(
405 "The name of the source bucket, key name of the source object, "
406 "and optional version ID of the source object. You can either "
407 "provide this value as a string or a dictionary. The "
408 "string form is {bucket}/{key} or "
409 "{bucket}/{key}?versionId={versionId} if you want to copy a "
410 "specific version. You can also provide this value as a "
411 "dictionary. The dictionary format is recommended over "
412 "the string format because it is more explicit. The dictionary "
413 "format is: {'Bucket': 'bucket', 'Key': 'key', 'VersionId': 'id'}."
414 " Note that the VersionId key is optional and may be omitted."
415 " To specify an S3 access point, provide the access point"
416 " ARN for the ``Bucket`` key in the copy source dictionary. If you"
417 " want to provide the copy source for an S3 access point as a"
418 " string instead of a dictionary, the ARN provided must be the"
419 " full S3 access point object ARN"
420 " (i.e. {accesspoint_arn}/object/{key})"
421 )
422
423
424def handle_copy_source_param(params, **kwargs):
425 """Convert CopySource param for CopyObject/UploadPartCopy.
426
427 This handler will deal with two cases:
428
429 * CopySource provided as a string. We'll make a best effort
430 to URL encode the key name as required. This will require
431 parsing the bucket and version id from the CopySource value
432 and only encoding the key.
433 * CopySource provided as a dict. In this case we're
434 explicitly given the Bucket, Key, and VersionId so we're
435 able to encode the key and ensure this value is serialized
436 and correctly sent to S3.
437
438 """
439 source = params.get('CopySource')
440 if source is None:
441 # The call will eventually fail but we'll let the
442 # param validator take care of this. It will
443 # give a better error message.
444 return
445 if isinstance(source, str):
446 params['CopySource'] = _quote_source_header(source)
447 elif isinstance(source, dict):
448 params['CopySource'] = _quote_source_header_from_dict(source)
449
450
451def _quote_source_header_from_dict(source_dict):
452 try:
453 bucket = source_dict['Bucket']
454 key = source_dict['Key']
455 version_id = source_dict.get('VersionId')
456 if VALID_S3_ARN.search(bucket):
457 final = f'{bucket}/object/{key}'
458 else:
459 final = f'{bucket}/{key}'
460 except KeyError as e:
461 raise ParamValidationError(
462 report=f'Missing required parameter: {str(e)}'
463 )
464 final = percent_encode(final, safe=SAFE_CHARS + '/')
465 if version_id is not None:
466 final += f'?versionId={version_id}'
467 return final
468
469
470def _quote_source_header(value):
471 result = VERSION_ID_SUFFIX.search(value)
472 if result is None:
473 return percent_encode(value, safe=SAFE_CHARS + '/')
474 else:
475 first, version_id = value[: result.start()], value[result.start() :]
476 return percent_encode(first, safe=SAFE_CHARS + '/') + version_id
477
478
479def _get_cross_region_presigned_url(
480 request_signer, request_dict, model, source_region, destination_region
481):
482 # The better way to do this is to actually get the
483 # endpoint_resolver and get the endpoint_url given the
484 # source region. In this specific case, we know that
485 # we can safely replace the dest region with the source
486 # region because of the supported EC2 regions, but in
487 # general this is not a safe assumption to make.
488 # I think eventually we should try to plumb through something
489 # that allows us to resolve endpoints from regions.
490 request_dict_copy = copy.deepcopy(request_dict)
491 request_dict_copy['body']['DestinationRegion'] = destination_region
492 request_dict_copy['url'] = request_dict['url'].replace(
493 destination_region, source_region
494 )
495 request_dict_copy['method'] = 'GET'
496 request_dict_copy['headers'] = {}
497 return request_signer.generate_presigned_url(
498 request_dict_copy, region_name=source_region, operation_name=model.name
499 )
500
501
502def _get_presigned_url_source_and_destination_regions(request_signer, params):
503 # Gets the source and destination regions to be used
504 destination_region = request_signer._region_name
505 source_region = params.get('SourceRegion')
506 return source_region, destination_region
507
508
509def inject_presigned_url_ec2(params, request_signer, model, **kwargs):
510 # The customer can still provide this, so we should pass if they do.
511 if 'PresignedUrl' in params['body']:
512 return
513 src, dest = _get_presigned_url_source_and_destination_regions(
514 request_signer, params['body']
515 )
516 url = _get_cross_region_presigned_url(
517 request_signer, params, model, src, dest
518 )
519 params['body']['PresignedUrl'] = url
520 # EC2 Requires that the destination region be sent over the wire in
521 # addition to the source region.
522 params['body']['DestinationRegion'] = dest
523
524
525def inject_presigned_url_rds(params, request_signer, model, **kwargs):
526 # SourceRegion is not required for RDS operations, so it's possible that
527 # it isn't set. In that case it's probably a local copy so we don't need
528 # to do anything else.
529 if 'SourceRegion' not in params['body']:
530 return
531
532 src, dest = _get_presigned_url_source_and_destination_regions(
533 request_signer, params['body']
534 )
535
536 # Since SourceRegion isn't actually modeled for RDS, it needs to be
537 # removed from the request params before we send the actual request.
538 del params['body']['SourceRegion']
539
540 if 'PreSignedUrl' in params['body']:
541 return
542
543 url = _get_cross_region_presigned_url(
544 request_signer, params, model, src, dest
545 )
546 params['body']['PreSignedUrl'] = url
547
548
549def json_decode_policies(parsed, model, **kwargs):
550 # Any time an IAM operation returns a policy document
551 # it is a string that is json that has been urlencoded,
552 # i.e urlencode(json.dumps(policy_document)).
553 # To give users something more useful, we will urldecode
554 # this value and json.loads() the result so that they have
555 # the policy document as a dictionary.
556 output_shape = model.output_shape
557 if output_shape is not None:
558 _decode_policy_types(parsed, model.output_shape)
559
560
561def _decode_policy_types(parsed, shape):
562 # IAM consistently uses the policyDocumentType shape to indicate
563 # strings that have policy documents.
564 shape_name = 'policyDocumentType'
565 if shape.type_name == 'structure':
566 for member_name, member_shape in shape.members.items():
567 if (
568 member_shape.type_name == 'string'
569 and member_shape.name == shape_name
570 and member_name in parsed
571 ):
572 parsed[member_name] = decode_quoted_jsondoc(
573 parsed[member_name]
574 )
575 elif member_name in parsed:
576 _decode_policy_types(parsed[member_name], member_shape)
577 if shape.type_name == 'list':
578 shape_member = shape.member
579 for item in parsed:
580 _decode_policy_types(item, shape_member)
581
582
583def parse_get_bucket_location(parsed, http_response, **kwargs):
584 # s3.GetBucketLocation cannot be modeled properly. To
585 # account for this we just manually parse the XML document.
586 # The "parsed" passed in only has the ResponseMetadata
587 # filled out. This handler will fill in the LocationConstraint
588 # value.
589 if http_response.raw is None:
590 return
591 response_body = http_response.content
592 parser = ETree.XMLParser(target=ETree.TreeBuilder(), encoding='utf-8')
593 parser.feed(response_body)
594 root = parser.close()
595 region = root.text
596 parsed['LocationConstraint'] = region
597
598
599def base64_encode_user_data(params, **kwargs):
600 if 'UserData' in params:
601 if isinstance(params['UserData'], str):
602 # Encode it to bytes if it is text.
603 params['UserData'] = params['UserData'].encode('utf-8')
604 params['UserData'] = base64.b64encode(params['UserData']).decode(
605 'utf-8'
606 )
607
608
609def document_base64_encoding(param):
610 description = (
611 '**This value will be base64 encoded automatically. Do '
612 'not base64 encode this value prior to performing the '
613 'operation.**'
614 )
615 append = AppendParamDocumentation(param, description)
616 return append.append_documentation
617
618
619def validate_ascii_metadata(params, **kwargs):
620 """Verify S3 Metadata only contains ascii characters.
621
622 From: http://docs.aws.amazon.com/AmazonS3/latest/dev/UsingMetadata.html
623
624 "Amazon S3 stores user-defined metadata in lowercase. Each name, value pair
625 must conform to US-ASCII when using REST and UTF-8 when using SOAP or
626 browser-based uploads via POST."
627
628 """
629 metadata = params.get('Metadata')
630 if not metadata or not isinstance(metadata, dict):
631 # We have to at least type check the metadata as a dict type
632 # because this handler is called before param validation.
633 # We'll go ahead and return because the param validator will
634 # give a descriptive error message for us.
635 # We might need a post-param validation event.
636 return
637 for key, value in metadata.items():
638 try:
639 key.encode('ascii')
640 value.encode('ascii')
641 except UnicodeEncodeError:
642 error_msg = (
643 'Non ascii characters found in S3 metadata '
644 f'for key "{key}", value: "{value}". \nS3 metadata can only '
645 'contain ASCII characters. '
646 )
647 raise ParamValidationError(report=error_msg)
648
649
650def fix_route53_ids(params, model, **kwargs):
651 """
652 Check for and split apart Route53 resource IDs, setting
653 only the last piece. This allows the output of one operation
654 (e.g. ``'foo/1234'``) to be used as input in another
655 operation (e.g. it expects just ``'1234'``).
656 """
657 input_shape = model.input_shape
658 if not input_shape or not hasattr(input_shape, 'members'):
659 return
660
661 members = [
662 name
663 for (name, shape) in input_shape.members.items()
664 if shape.name in ['ResourceId', 'DelegationSetId', 'ChangeId']
665 ]
666
667 for name in members:
668 if name in params:
669 orig_value = params[name]
670 params[name] = orig_value.split('/')[-1]
671 logger.debug('%s %s -> %s', name, orig_value, params[name])
672
673
674def inject_account_id(params, **kwargs):
675 if params.get('accountId') is None:
676 # Glacier requires accountId, but allows you
677 # to specify '-' for the current owners account.
678 # We add this default value if the user does not
679 # provide the accountId as a convenience.
680 params['accountId'] = '-'
681
682
683def add_glacier_version(model, params, **kwargs):
684 request_dict = params
685 request_dict['headers']['x-amz-glacier-version'] = model.metadata[
686 'apiVersion'
687 ]
688
689
690def add_accept_header(model, params, **kwargs):
691 if params['headers'].get('Accept', None) is None:
692 request_dict = params
693 request_dict['headers']['Accept'] = 'application/json'
694
695
696def add_glacier_checksums(params, **kwargs):
697 """Add glacier checksums to the http request.
698
699 This will add two headers to the http request:
700
701 * x-amz-content-sha256
702 * x-amz-sha256-tree-hash
703
704 These values will only be added if they are not present
705 in the HTTP request.
706
707 """
708 request_dict = params
709 headers = request_dict['headers']
710 body = request_dict['body']
711 if isinstance(body, bytes):
712 # If the user provided a bytes type instead of a file
713 # like object, we're temporarily create a BytesIO object
714 # so we can use the util functions to calculate the
715 # checksums which assume file like objects. Note that
716 # we're not actually changing the body in the request_dict.
717 body = BytesIO(body)
718 starting_position = body.tell()
719 if 'x-amz-content-sha256' not in headers:
720 headers['x-amz-content-sha256'] = utils.calculate_sha256(
721 body, as_hex=True
722 )
723 body.seek(starting_position)
724 if 'x-amz-sha256-tree-hash' not in headers:
725 headers['x-amz-sha256-tree-hash'] = utils.calculate_tree_hash(body)
726 body.seek(starting_position)
727
728
729def document_glacier_tree_hash_checksum():
730 doc = '''
731 This is a required field.
732
733 Ideally you will want to compute this value with checksums from
734 previous uploaded parts, using the algorithm described in
735 `Glacier documentation <http://docs.aws.amazon.com/amazonglacier/latest/dev/checksum-calculations.html>`_.
736
737 But if you prefer, you can also use botocore.utils.calculate_tree_hash()
738 to compute it from raw file by::
739
740 checksum = calculate_tree_hash(open('your_file.txt', 'rb'))
741
742 '''
743 return AppendParamDocumentation('checksum', doc).append_documentation
744
745
746def document_cloudformation_get_template_return_type(
747 section, event_name, **kwargs
748):
749 if 'response-params' in event_name:
750 template_body_section = section.get_section('TemplateBody')
751 type_section = template_body_section.get_section('param-type')
752 type_section.clear_text()
753 type_section.write('(*dict*) --')
754 elif 'response-example' in event_name:
755 parent = section.get_section('structure-value')
756 param_line = parent.get_section('TemplateBody')
757 value_portion = param_line.get_section('member-value')
758 value_portion.clear_text()
759 value_portion.write('{}')
760
761
762def switch_host_machinelearning(request, **kwargs):
763 switch_host_with_param(request, 'PredictEndpoint')
764
765
766def check_openssl_supports_tls_version_1_2(**kwargs):
767 import ssl
768
769 try:
770 openssl_version_tuple = ssl.OPENSSL_VERSION_INFO
771 if openssl_version_tuple < (1, 0, 1):
772 warnings.warn(
773 f'Currently installed openssl version: {ssl.OPENSSL_VERSION} does not '
774 'support TLS 1.2, which is required for use of iot-data. '
775 'Please use python installed with openssl version 1.0.1 or '
776 'higher.',
777 UnsupportedTLSVersionWarning,
778 )
779 # We cannot check the openssl version on python2.6, so we should just
780 # pass on this conveniency check.
781 except AttributeError:
782 pass
783
784
785def change_get_to_post(request, **kwargs):
786 # This is useful when we need to change a potentially large GET request
787 # into a POST with x-www-form-urlencoded encoding.
788 if request.method == 'GET' and '?' in request.url:
789 request.headers['Content-Type'] = 'application/x-www-form-urlencoded'
790 request.method = 'POST'
791 request.url, request.data = request.url.split('?', 1)
792
793
794def set_list_objects_encoding_type_url(params, context, **kwargs):
795 if 'EncodingType' not in params:
796 # We set this context so that we know it wasn't the customer that
797 # requested the encoding.
798 context['encoding_type_auto_set'] = True
799 params['EncodingType'] = 'url'
800
801
802def decode_list_object(parsed, context, **kwargs):
803 # This is needed because we are passing url as the encoding type. Since the
804 # paginator is based on the key, we need to handle it before it can be
805 # round tripped.
806 #
807 # From the documentation: If you specify encoding-type request parameter,
808 # Amazon S3 includes this element in the response, and returns encoded key
809 # name values in the following response elements:
810 # Delimiter, Marker, Prefix, NextMarker, Key.
811 _decode_list_object(
812 top_level_keys=['Delimiter', 'Marker', 'NextMarker'],
813 nested_keys=[('Contents', 'Key'), ('CommonPrefixes', 'Prefix')],
814 parsed=parsed,
815 context=context,
816 )
817
818
819def decode_list_object_v2(parsed, context, **kwargs):
820 # From the documentation: If you specify encoding-type request parameter,
821 # Amazon S3 includes this element in the response, and returns encoded key
822 # name values in the following response elements:
823 # Delimiter, Prefix, ContinuationToken, Key, and StartAfter.
824 _decode_list_object(
825 top_level_keys=['Delimiter', 'Prefix', 'StartAfter'],
826 nested_keys=[('Contents', 'Key'), ('CommonPrefixes', 'Prefix')],
827 parsed=parsed,
828 context=context,
829 )
830
831
832def decode_list_object_versions(parsed, context, **kwargs):
833 # From the documentation: If you specify encoding-type request parameter,
834 # Amazon S3 includes this element in the response, and returns encoded key
835 # name values in the following response elements:
836 # KeyMarker, NextKeyMarker, Prefix, Key, and Delimiter.
837 _decode_list_object(
838 top_level_keys=[
839 'KeyMarker',
840 'NextKeyMarker',
841 'Prefix',
842 'Delimiter',
843 ],
844 nested_keys=[
845 ('Versions', 'Key'),
846 ('DeleteMarkers', 'Key'),
847 ('CommonPrefixes', 'Prefix'),
848 ],
849 parsed=parsed,
850 context=context,
851 )
852
853
854def _decode_list_object(top_level_keys, nested_keys, parsed, context):
855 if parsed.get('EncodingType') == 'url' and context.get(
856 'encoding_type_auto_set'
857 ):
858 # URL decode top-level keys in the response if present.
859 for key in top_level_keys:
860 if key in parsed:
861 parsed[key] = unquote_str(parsed[key])
862 # URL decode nested keys from the response if present.
863 for top_key, child_key in nested_keys:
864 if top_key in parsed:
865 for member in parsed[top_key]:
866 member[child_key] = unquote_str(member[child_key])
867
868
869def convert_body_to_file_like_object(params, **kwargs):
870 if 'Body' in params:
871 if isinstance(params['Body'], str):
872 params['Body'] = BytesIO(ensure_bytes(params['Body']))
873 elif isinstance(params['Body'], bytes):
874 params['Body'] = BytesIO(params['Body'])
875
876
877def _add_parameter_aliases(handler_list):
878 # Mapping of original parameter to parameter alias.
879 # The key is <service>.<operation>.parameter
880 # The first part of the key is used for event registration.
881 # The last part is the original parameter name and the value is the
882 # alias to expose in documentation.
883 aliases = {
884 'ec2.*.Filter': 'Filters',
885 'logs.CreateExportTask.from': 'fromTime',
886 'cloudsearchdomain.Search.return': 'returnFields',
887 }
888
889 for original, new_name in aliases.items():
890 event_portion, original_name = original.rsplit('.', 1)
891 parameter_alias = ParameterAlias(original_name, new_name)
892
893 # Add the handlers to the list of handlers.
894 # One handler is to handle when users provide the alias.
895 # The other handler is to update the documentation to show only
896 # the alias.
897 parameter_build_event_handler_tuple = (
898 'before-parameter-build.' + event_portion,
899 parameter_alias.alias_parameter_in_call,
900 REGISTER_FIRST,
901 )
902 docs_event_handler_tuple = (
903 'docs.*.' + event_portion + '.complete-section',
904 parameter_alias.alias_parameter_in_documentation,
905 )
906 handler_list.append(parameter_build_event_handler_tuple)
907 handler_list.append(docs_event_handler_tuple)
908
909
910class ParameterAlias:
911 def __init__(self, original_name, alias_name):
912 self._original_name = original_name
913 self._alias_name = alias_name
914
915 def alias_parameter_in_call(self, params, model, **kwargs):
916 if model.input_shape:
917 # Only consider accepting the alias if it is modeled in the
918 # input shape.
919 if self._original_name in model.input_shape.members:
920 if self._alias_name in params:
921 if self._original_name in params:
922 raise AliasConflictParameterError(
923 original=self._original_name,
924 alias=self._alias_name,
925 operation=model.name,
926 )
927 # Remove the alias parameter value and use the old name
928 # instead.
929 params[self._original_name] = params.pop(self._alias_name)
930
931 def alias_parameter_in_documentation(self, event_name, section, **kwargs):
932 if event_name.startswith('docs.request-params'):
933 if self._original_name not in section.available_sections:
934 return
935 # Replace the name for parameter type
936 param_section = section.get_section(self._original_name)
937 param_type_section = param_section.get_section('param-type')
938 self._replace_content(param_type_section)
939
940 # Replace the name for the parameter description
941 param_name_section = param_section.get_section('param-name')
942 self._replace_content(param_name_section)
943 elif event_name.startswith('docs.request-example'):
944 section = section.get_section('structure-value')
945 if self._original_name not in section.available_sections:
946 return
947 # Replace the name for the example
948 param_section = section.get_section(self._original_name)
949 self._replace_content(param_section)
950
951 def _replace_content(self, section):
952 content = section.getvalue().decode('utf-8')
953 updated_content = content.replace(
954 self._original_name, self._alias_name
955 )
956 section.clear_text()
957 section.write(updated_content)
958
959
960class ClientMethodAlias:
961 def __init__(self, actual_name):
962 """Aliases a non-extant method to an existing method.
963
964 :param actual_name: The name of the method that actually exists on
965 the client.
966 """
967 self._actual = actual_name
968
969 def __call__(self, client, **kwargs):
970 return getattr(client, self._actual)
971
972
973# TODO: Remove this class as it is no longer used
974class HeaderToHostHoister:
975 """Takes a header and moves it to the front of the hoststring."""
976
977 _VALID_HOSTNAME = re.compile(r'(?!-)[a-z\d-]{1,63}(?<!-)$', re.IGNORECASE)
978
979 def __init__(self, header_name):
980 self._header_name = header_name
981
982 def hoist(self, params, **kwargs):
983 """Hoist a header to the hostname.
984
985 Hoist a header to the beginning of the hostname with a suffix "." after
986 it. The original header should be removed from the header map. This
987 method is intended to be used as a target for the before-call event.
988 """
989 if self._header_name not in params['headers']:
990 return
991 header_value = params['headers'][self._header_name]
992 self._ensure_header_is_valid_host(header_value)
993 original_url = params['url']
994 new_url = self._prepend_to_host(original_url, header_value)
995 params['url'] = new_url
996
997 def _ensure_header_is_valid_host(self, header):
998 match = self._VALID_HOSTNAME.match(header)
999 if not match:
1000 raise ParamValidationError(
1001 report=(
1002 'Hostnames must contain only - and alphanumeric characters, '
1003 'and between 1 and 63 characters long.'
1004 )
1005 )
1006
1007 def _prepend_to_host(self, url, prefix):
1008 url_components = urlsplit(url)
1009 parts = url_components.netloc.split('.')
1010 parts = [prefix] + parts
1011 new_netloc = '.'.join(parts)
1012 new_components = (
1013 url_components.scheme,
1014 new_netloc,
1015 url_components.path,
1016 url_components.query,
1017 '',
1018 )
1019 new_url = urlunsplit(new_components)
1020 return new_url
1021
1022
1023def inject_api_version_header_if_needed(model, params, **kwargs):
1024 if not model.is_endpoint_discovery_operation:
1025 return
1026 params['headers']['x-amz-api-version'] = model.service_model.api_version
1027
1028
1029def remove_lex_v2_start_conversation(class_attributes, **kwargs):
1030 """Operation requires h2 which is currently unsupported in Python"""
1031 if 'start_conversation' in class_attributes:
1032 del class_attributes['start_conversation']
1033
1034
1035def remove_qbusiness_chat(class_attributes, **kwargs):
1036 """Operation requires h2 which is currently unsupported in Python"""
1037 if 'chat' in class_attributes:
1038 del class_attributes['chat']
1039
1040
1041def add_retry_headers(request, **kwargs):
1042 retries_context = request.context.get('retries')
1043 if not retries_context:
1044 return
1045 headers = request.headers
1046 headers['amz-sdk-invocation-id'] = retries_context['invocation-id']
1047 sdk_retry_keys = ('ttl', 'attempt', 'max')
1048 sdk_request_headers = [
1049 f'{key}={retries_context[key]}'
1050 for key in sdk_retry_keys
1051 if key in retries_context
1052 ]
1053 headers['amz-sdk-request'] = '; '.join(sdk_request_headers)
1054
1055
1056def remove_bucket_from_url_paths_from_model(params, model, context, **kwargs):
1057 """Strips leading `{Bucket}/` from any operations that have it.
1058
1059 The original value is retained in a separate "authPath" field. This is
1060 used in the HmacV1Auth signer. See HmacV1Auth.canonical_resource in
1061 botocore/auth.py for details.
1062
1063 This change is applied to the operation model during the first time the
1064 operation is invoked and then stays in effect for the lifetime of the
1065 client object.
1066
1067 When the ruleset based endpoint resolver is in effect, both the endpoint
1068 ruleset AND the service model place the bucket name in the final URL.
1069 The result is an invalid URL. This handler modifies the operation model to
1070 no longer place the bucket name. Previous versions of botocore fixed the
1071 URL after the fact when necessary. Since the introduction of ruleset based
1072 endpoint resolution, the problem exists in ALL URLs that contain a bucket
1073 name and can therefore be addressed before the URL gets assembled.
1074 """
1075 req_uri = model.http['requestUri']
1076 bucket_path = '/{Bucket}'
1077 if req_uri.startswith(bucket_path):
1078 model.http['requestUri'] = req_uri[len(bucket_path) :]
1079 # Strip query off the requestUri before using as authPath. The
1080 # HmacV1Auth signer will append query params to the authPath during
1081 # signing.
1082 req_uri = req_uri.split('?')[0]
1083 # If the request URI is ONLY a bucket, the auth_path must be
1084 # terminated with a '/' character to generate a signature that the
1085 # server will accept.
1086 needs_slash = req_uri == bucket_path
1087 model.http['authPath'] = f'{req_uri}/' if needs_slash else req_uri
1088
1089
1090def remove_accid_host_prefix_from_model(params, model, context, **kwargs):
1091 """Removes the `{AccountId}.` prefix from the operation model.
1092
1093 This change is applied to the operation model during the first time the
1094 operation is invoked and then stays in effect for the lifetime of the
1095 client object.
1096
1097 When the ruleset based endpoint resolver is in effect, both the endpoint
1098 ruleset AND the service model place the {AccountId}. prefix in the URL.
1099 The result is an invalid endpoint. This handler modifies the operation
1100 model to remove the `endpoint.hostPrefix` field while leaving the
1101 `RequiresAccountId` static context parameter in place.
1102 """
1103 has_ctx_param = any(
1104 ctx_param.name == 'RequiresAccountId' and ctx_param.value is True
1105 for ctx_param in model.static_context_parameters
1106 )
1107 if (
1108 model.endpoint is not None
1109 and model.endpoint.get('hostPrefix') == '{AccountId}.'
1110 and has_ctx_param
1111 ):
1112 del model.endpoint['hostPrefix']
1113
1114
1115def remove_arn_from_signing_path(request, **kwargs):
1116 auth_path = request.auth_path
1117 if isinstance(auth_path, str) and auth_path.startswith('/arn%3A'):
1118 auth_path_parts = auth_path.split('/')
1119 if len(auth_path_parts) > 1 and ArnParser.is_arn(
1120 unquote(auth_path_parts[1])
1121 ):
1122 request.auth_path = '/'.join(['', *auth_path_parts[2:]])
1123
1124
1125def customize_endpoint_resolver_builtins(
1126 builtins, model, params, context, **kwargs
1127):
1128 """Modify builtin parameter values for endpoint resolver
1129
1130 Modifies the builtins dict in place. Changes are in effect for one call.
1131 The corresponding event is emitted only if at least one builtin parameter
1132 value is required for endpoint resolution for the operation.
1133 """
1134 bucket_name = params.get('Bucket')
1135 bucket_is_arn = bucket_name is not None and ArnParser.is_arn(bucket_name)
1136 # In some situations the host will return AuthorizationHeaderMalformed
1137 # when the signing region of a sigv4 request is not the bucket's
1138 # region (which is likely unknown by the user of GetBucketLocation).
1139 # Avoid this by always using path-style addressing.
1140 if model.name == 'GetBucketLocation':
1141 builtins[EndpointResolverBuiltins.AWS_S3_FORCE_PATH_STYLE] = True
1142 # All situations where the bucket name is an ARN are not compatible
1143 # with path style addressing.
1144 elif bucket_is_arn:
1145 builtins[EndpointResolverBuiltins.AWS_S3_FORCE_PATH_STYLE] = False
1146
1147 # Bucket names that are invalid host labels require path-style addressing.
1148 # If path-style addressing was specifically requested, the default builtin
1149 # value is already set.
1150 path_style_required = (
1151 bucket_name is not None and not VALID_HOST_LABEL_RE.match(bucket_name)
1152 )
1153 path_style_requested = builtins[
1154 EndpointResolverBuiltins.AWS_S3_FORCE_PATH_STYLE
1155 ]
1156
1157 # Path-style addressing is incompatible with the global endpoint for
1158 # presigned URLs. If the bucket name is an ARN, the ARN's region should be
1159 # used in the endpoint.
1160 if (
1161 context.get('use_global_endpoint')
1162 and not path_style_required
1163 and not path_style_requested
1164 and not bucket_is_arn
1165 and not utils.is_s3express_bucket(bucket_name)
1166 ):
1167 builtins[EndpointResolverBuiltins.AWS_REGION] = 'aws-global'
1168 builtins[EndpointResolverBuiltins.AWS_S3_USE_GLOBAL_ENDPOINT] = True
1169
1170
1171def remove_content_type_header_for_presigning(request, **kwargs):
1172 if (
1173 request.context.get('is_presign_request') is True
1174 and 'Content-Type' in request.headers
1175 ):
1176 del request.headers['Content-Type']
1177
1178
1179def handle_expires_header(
1180 operation_model, response_dict, customized_response_dict, **kwargs
1181):
1182 if _has_expires_shape(operation_model.output_shape):
1183 if expires_value := response_dict.get('headers', {}).get('Expires'):
1184 customized_response_dict['ExpiresString'] = expires_value
1185 try:
1186 utils.parse_timestamp(expires_value)
1187 except (ValueError, RuntimeError):
1188 logger.warning(
1189 f'Failed to parse the "Expires" member as a timestamp: {expires_value}. '
1190 f'The unparsed value is available in the response under "ExpiresString".'
1191 )
1192 del response_dict['headers']['Expires']
1193
1194
1195def _has_expires_shape(shape):
1196 if not shape:
1197 return False
1198 return any(
1199 member_shape.name == 'Expires'
1200 and member_shape.serialization.get('name') == 'Expires'
1201 for member_shape in shape.members.values()
1202 )
1203
1204
1205def document_expires_shape(section, event_name, **kwargs):
1206 # Updates the documentation for S3 operations that include the 'Expires' member
1207 # in their response structure. Documents a synthetic member 'ExpiresString' and
1208 # includes a deprecation notice for 'Expires'.
1209 if 'response-example' in event_name:
1210 if not section.has_section('structure-value'):
1211 return
1212 parent = section.get_section('structure-value')
1213 if not parent.has_section('Expires'):
1214 return
1215 param_line = parent.get_section('Expires')
1216 param_line.add_new_section('ExpiresString')
1217 new_param_line = param_line.get_section('ExpiresString')
1218 new_param_line.write("'ExpiresString': 'string',")
1219 new_param_line.style.new_line()
1220 elif 'response-params' in event_name:
1221 if not section.has_section('Expires'):
1222 return
1223 param_section = section.get_section('Expires')
1224 # Add a deprecation notice for the "Expires" param
1225 doc_section = param_section.get_section('param-documentation')
1226 doc_section.style.start_note()
1227 doc_section.write(
1228 'This member has been deprecated. Please use ``ExpiresString`` instead.'
1229 )
1230 doc_section.style.end_note()
1231 # Document the "ExpiresString" param
1232 new_param_section = param_section.add_new_section('ExpiresString')
1233 new_param_section.style.new_paragraph()
1234 new_param_section.write('- **ExpiresString** *(string) --*')
1235 new_param_section.style.indent()
1236 new_param_section.style.new_paragraph()
1237 new_param_section.write(
1238 'The raw, unparsed value of the ``Expires`` field.'
1239 )
1240
1241
1242# This is a list of (event_name, handler).
1243# When a Session is created, everything in this list will be
1244# automatically registered with that Session.
1245
1246BUILTIN_HANDLERS = [
1247 ('choose-service-name', handle_service_name_alias),
1248 (
1249 'getattr.mturk.list_hi_ts_for_qualification_type',
1250 ClientMethodAlias('list_hits_for_qualification_type'),
1251 ),
1252 (
1253 'before-parameter-build.s3.UploadPart',
1254 convert_body_to_file_like_object,
1255 REGISTER_LAST,
1256 ),
1257 (
1258 'before-parameter-build.s3.PutObject',
1259 convert_body_to_file_like_object,
1260 REGISTER_LAST,
1261 ),
1262 ('creating-client-class', add_generate_presigned_url),
1263 ('creating-client-class.s3', add_generate_presigned_post),
1264 ('creating-client-class.iot-data', check_openssl_supports_tls_version_1_2),
1265 ('creating-client-class.lex-runtime-v2', remove_lex_v2_start_conversation),
1266 ('creating-client-class.qbusiness', remove_qbusiness_chat),
1267 ('after-call.iam', json_decode_policies),
1268 ('after-call.ec2.GetConsoleOutput', decode_console_output),
1269 ('after-call.cloudformation.GetTemplate', json_decode_template_body),
1270 ('after-call.s3.GetBucketLocation', parse_get_bucket_location),
1271 ('before-parse.s3.*', handle_expires_header),
1272 ('before-parameter-build', generate_idempotent_uuid),
1273 ('before-parameter-build.s3', validate_bucket_name),
1274 ('before-parameter-build.s3', remove_bucket_from_url_paths_from_model),
1275 (
1276 'before-parameter-build.s3.ListObjects',
1277 set_list_objects_encoding_type_url,
1278 ),
1279 (
1280 'before-parameter-build.s3.ListObjectsV2',
1281 set_list_objects_encoding_type_url,
1282 ),
1283 (
1284 'before-parameter-build.s3.ListObjectVersions',
1285 set_list_objects_encoding_type_url,
1286 ),
1287 ('before-parameter-build.s3.CopyObject', handle_copy_source_param),
1288 ('before-parameter-build.s3.UploadPartCopy', handle_copy_source_param),
1289 ('before-parameter-build.s3.CopyObject', validate_ascii_metadata),
1290 ('before-parameter-build.s3.PutObject', validate_ascii_metadata),
1291 (
1292 'before-parameter-build.s3.CreateMultipartUpload',
1293 validate_ascii_metadata,
1294 ),
1295 ('before-parameter-build.s3-control', remove_accid_host_prefix_from_model),
1296 ('docs.*.s3.CopyObject.complete-section', document_copy_source_form),
1297 ('docs.*.s3.UploadPartCopy.complete-section', document_copy_source_form),
1298 ('docs.response-example.s3.*.complete-section', document_expires_shape),
1299 ('docs.response-params.s3.*.complete-section', document_expires_shape),
1300 ('before-endpoint-resolution.s3', customize_endpoint_resolver_builtins),
1301 ('before-call', add_recursion_detection_header),
1302 ('before-call.s3', add_expect_header),
1303 ('before-call.glacier', add_glacier_version),
1304 ('before-call.apigateway', add_accept_header),
1305 ('before-call.s3.PutObject', conditionally_calculate_checksum),
1306 ('before-call.s3.UploadPart', conditionally_calculate_md5),
1307 ('before-call.s3.DeleteObjects', escape_xml_payload),
1308 ('before-call.s3.DeleteObjects', conditionally_calculate_checksum),
1309 ('before-call.s3.PutBucketLifecycleConfiguration', escape_xml_payload),
1310 ('before-call.glacier.UploadArchive', add_glacier_checksums),
1311 ('before-call.glacier.UploadMultipartPart', add_glacier_checksums),
1312 ('before-call.ec2.CopySnapshot', inject_presigned_url_ec2),
1313 ('request-created', add_retry_headers),
1314 ('request-created.machinelearning.Predict', switch_host_machinelearning),
1315 ('needs-retry.s3.UploadPartCopy', check_for_200_error, REGISTER_FIRST),
1316 ('needs-retry.s3.CopyObject', check_for_200_error, REGISTER_FIRST),
1317 (
1318 'needs-retry.s3.CompleteMultipartUpload',
1319 check_for_200_error,
1320 REGISTER_FIRST,
1321 ),
1322 ('choose-signer.cognito-identity.GetId', disable_signing),
1323 ('choose-signer.cognito-identity.GetOpenIdToken', disable_signing),
1324 ('choose-signer.cognito-identity.UnlinkIdentity', disable_signing),
1325 (
1326 'choose-signer.cognito-identity.GetCredentialsForIdentity',
1327 disable_signing,
1328 ),
1329 ('choose-signer.sts.AssumeRoleWithSAML', disable_signing),
1330 ('choose-signer.sts.AssumeRoleWithWebIdentity', disable_signing),
1331 ('choose-signer', set_operation_specific_signer),
1332 ('before-parameter-build.s3.HeadObject', sse_md5),
1333 ('before-parameter-build.s3.GetObject', sse_md5),
1334 ('before-parameter-build.s3.PutObject', sse_md5),
1335 ('before-parameter-build.s3.CopyObject', sse_md5),
1336 ('before-parameter-build.s3.CopyObject', copy_source_sse_md5),
1337 ('before-parameter-build.s3.CreateMultipartUpload', sse_md5),
1338 ('before-parameter-build.s3.UploadPart', sse_md5),
1339 ('before-parameter-build.s3.UploadPartCopy', sse_md5),
1340 ('before-parameter-build.s3.UploadPartCopy', copy_source_sse_md5),
1341 ('before-parameter-build.s3.CompleteMultipartUpload', sse_md5),
1342 ('before-parameter-build.s3.SelectObjectContent', sse_md5),
1343 ('before-parameter-build.ec2.RunInstances', base64_encode_user_data),
1344 (
1345 'before-parameter-build.autoscaling.CreateLaunchConfiguration',
1346 base64_encode_user_data,
1347 ),
1348 ('before-parameter-build.route53', fix_route53_ids),
1349 ('before-parameter-build.glacier', inject_account_id),
1350 ('before-sign.s3', remove_arn_from_signing_path),
1351 (
1352 'before-sign.polly.SynthesizeSpeech',
1353 remove_content_type_header_for_presigning,
1354 ),
1355 ('after-call.s3.ListObjects', decode_list_object),
1356 ('after-call.s3.ListObjectsV2', decode_list_object_v2),
1357 ('after-call.s3.ListObjectVersions', decode_list_object_versions),
1358 # Cloudsearchdomain search operation will be sent by HTTP POST
1359 ('request-created.cloudsearchdomain.Search', change_get_to_post),
1360 # Glacier documentation customizations
1361 (
1362 'docs.*.glacier.*.complete-section',
1363 AutoPopulatedParam(
1364 'accountId',
1365 'Note: this parameter is set to "-" by'
1366 'default if no value is not specified.',
1367 ).document_auto_populated_param,
1368 ),
1369 (
1370 'docs.*.glacier.UploadArchive.complete-section',
1371 AutoPopulatedParam('checksum').document_auto_populated_param,
1372 ),
1373 (
1374 'docs.*.glacier.UploadMultipartPart.complete-section',
1375 AutoPopulatedParam('checksum').document_auto_populated_param,
1376 ),
1377 (
1378 'docs.request-params.glacier.CompleteMultipartUpload.complete-section',
1379 document_glacier_tree_hash_checksum(),
1380 ),
1381 # Cloudformation documentation customizations
1382 (
1383 'docs.*.cloudformation.GetTemplate.complete-section',
1384 document_cloudformation_get_template_return_type,
1385 ),
1386 # UserData base64 encoding documentation customizations
1387 (
1388 'docs.*.ec2.RunInstances.complete-section',
1389 document_base64_encoding('UserData'),
1390 ),
1391 (
1392 'docs.*.autoscaling.CreateLaunchConfiguration.complete-section',
1393 document_base64_encoding('UserData'),
1394 ),
1395 # EC2 CopySnapshot documentation customizations
1396 (
1397 'docs.*.ec2.CopySnapshot.complete-section',
1398 AutoPopulatedParam('PresignedUrl').document_auto_populated_param,
1399 ),
1400 (
1401 'docs.*.ec2.CopySnapshot.complete-section',
1402 AutoPopulatedParam('DestinationRegion').document_auto_populated_param,
1403 ),
1404 # S3 SSE documentation modifications
1405 (
1406 'docs.*.s3.*.complete-section',
1407 AutoPopulatedParam('SSECustomerKeyMD5').document_auto_populated_param,
1408 ),
1409 # S3 SSE Copy Source documentation modifications
1410 (
1411 'docs.*.s3.*.complete-section',
1412 AutoPopulatedParam(
1413 'CopySourceSSECustomerKeyMD5'
1414 ).document_auto_populated_param,
1415 ),
1416 # Add base64 information to Lambda
1417 (
1418 'docs.*.lambda.UpdateFunctionCode.complete-section',
1419 document_base64_encoding('ZipFile'),
1420 ),
1421 # The following S3 operations cannot actually accept a ContentMD5
1422 (
1423 'docs.*.s3.*.complete-section',
1424 HideParamFromOperations(
1425 's3',
1426 'ContentMD5',
1427 [
1428 'DeleteObjects',
1429 'PutBucketAcl',
1430 'PutBucketCors',
1431 'PutBucketLifecycle',
1432 'PutBucketLogging',
1433 'PutBucketNotification',
1434 'PutBucketPolicy',
1435 'PutBucketReplication',
1436 'PutBucketRequestPayment',
1437 'PutBucketTagging',
1438 'PutBucketVersioning',
1439 'PutBucketWebsite',
1440 'PutObjectAcl',
1441 ],
1442 ).hide_param,
1443 ),
1444 #############
1445 # RDS
1446 #############
1447 ('creating-client-class.rds', add_generate_db_auth_token),
1448 ('before-call.rds.CopyDBClusterSnapshot', inject_presigned_url_rds),
1449 ('before-call.rds.CreateDBCluster', inject_presigned_url_rds),
1450 ('before-call.rds.CopyDBSnapshot', inject_presigned_url_rds),
1451 ('before-call.rds.CreateDBInstanceReadReplica', inject_presigned_url_rds),
1452 (
1453 'before-call.rds.StartDBInstanceAutomatedBackupsReplication',
1454 inject_presigned_url_rds,
1455 ),
1456 # RDS PresignedUrl documentation customizations
1457 (
1458 'docs.*.rds.CopyDBClusterSnapshot.complete-section',
1459 AutoPopulatedParam('PreSignedUrl').document_auto_populated_param,
1460 ),
1461 (
1462 'docs.*.rds.CreateDBCluster.complete-section',
1463 AutoPopulatedParam('PreSignedUrl').document_auto_populated_param,
1464 ),
1465 (
1466 'docs.*.rds.CopyDBSnapshot.complete-section',
1467 AutoPopulatedParam('PreSignedUrl').document_auto_populated_param,
1468 ),
1469 (
1470 'docs.*.rds.CreateDBInstanceReadReplica.complete-section',
1471 AutoPopulatedParam('PreSignedUrl').document_auto_populated_param,
1472 ),
1473 (
1474 'docs.*.rds.StartDBInstanceAutomatedBackupsReplication.complete-section',
1475 AutoPopulatedParam('PreSignedUrl').document_auto_populated_param,
1476 ),
1477 #############
1478 # Neptune
1479 #############
1480 ('before-call.neptune.CopyDBClusterSnapshot', inject_presigned_url_rds),
1481 ('before-call.neptune.CreateDBCluster', inject_presigned_url_rds),
1482 # Neptune PresignedUrl documentation customizations
1483 (
1484 'docs.*.neptune.CopyDBClusterSnapshot.complete-section',
1485 AutoPopulatedParam('PreSignedUrl').document_auto_populated_param,
1486 ),
1487 (
1488 'docs.*.neptune.CreateDBCluster.complete-section',
1489 AutoPopulatedParam('PreSignedUrl').document_auto_populated_param,
1490 ),
1491 #############
1492 # DocDB
1493 #############
1494 ('before-call.docdb.CopyDBClusterSnapshot', inject_presigned_url_rds),
1495 ('before-call.docdb.CreateDBCluster', inject_presigned_url_rds),
1496 # DocDB PresignedUrl documentation customizations
1497 (
1498 'docs.*.docdb.CopyDBClusterSnapshot.complete-section',
1499 AutoPopulatedParam('PreSignedUrl').document_auto_populated_param,
1500 ),
1501 (
1502 'docs.*.docdb.CreateDBCluster.complete-section',
1503 AutoPopulatedParam('PreSignedUrl').document_auto_populated_param,
1504 ),
1505 ('before-call', inject_api_version_header_if_needed),
1506]
1507_add_parameter_aliases(BUILTIN_HANDLERS)