Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/botocore/handlers.py: 26%
484 statements
« prev ^ index » next coverage.py v7.3.2, created at 2023-12-08 06:51 +0000
« prev ^ index » next coverage.py v7.3.2, created at 2023-12-08 06:51 +0000
1# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2#
3# Licensed under the Apache License, Version 2.0 (the "License"). You
4# may not use this file except in compliance with the License. A copy of
5# the License is located at
6#
7# http://aws.amazon.com/apache2.0/
8#
9# or in the "license" file accompanying this file. This file is
10# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11# ANY KIND, either express or implied. See the License for the specific
12# language governing permissions and limitations under the License.
14"""Builtin event handlers.
16This module contains builtin handlers for events emitted by botocore.
17"""
19import base64
20import copy
21import logging
22import os
23import re
24import uuid
25import warnings
26from io import BytesIO
28import botocore
29import botocore.auth
30from botocore import utils
31from botocore.compat import (
32 ETree,
33 OrderedDict,
34 XMLParseError,
35 ensure_bytes,
36 get_md5,
37 json,
38 quote,
39 unquote,
40 unquote_str,
41 urlsplit,
42 urlunsplit,
43)
44from botocore.docs.utils import (
45 AppendParamDocumentation,
46 AutoPopulatedParam,
47 HideParamFromOperations,
48)
49from botocore.endpoint_provider import VALID_HOST_LABEL_RE
50from botocore.exceptions import (
51 AliasConflictParameterError,
52 ParamValidationError,
53 UnsupportedTLSVersionWarning,
54)
55from botocore.regions import EndpointResolverBuiltins
56from botocore.signers import (
57 add_generate_db_auth_token,
58 add_generate_presigned_post,
59 add_generate_presigned_url,
60)
61from botocore.utils import (
62 SAFE_CHARS,
63 ArnParser,
64 conditionally_calculate_checksum,
65 conditionally_calculate_md5,
66 percent_encode,
67 switch_host_with_param,
68)
70# Keep these imported. There's pre-existing code that uses them.
71from botocore import retryhandler # noqa
72from botocore import translate # noqa
73from botocore.compat import MD5_AVAILABLE # noqa
74from botocore.exceptions import MissingServiceIdError # noqa
75from botocore.utils import hyphenize_service_id # noqa
76from botocore.utils import is_global_accesspoint # noqa
77from botocore.utils import SERVICE_NAME_ALIASES # noqa
80logger = logging.getLogger(__name__)
82REGISTER_FIRST = object()
83REGISTER_LAST = object()
84# From the S3 docs:
85# The rules for bucket names in the US Standard region allow bucket names
86# to be as long as 255 characters, and bucket names can contain any
87# combination of uppercase letters, lowercase letters, numbers, periods
88# (.), hyphens (-), and underscores (_).
89VALID_BUCKET = re.compile(r'^[a-zA-Z0-9.\-_]{1,255}$')
90_ACCESSPOINT_ARN = (
91 r'^arn:(aws).*:(s3|s3-object-lambda):[a-z\-0-9]*:[0-9]{12}:accesspoint[/:]'
92 r'[a-zA-Z0-9\-.]{1,63}$'
93)
94_OUTPOST_ARN = (
95 r'^arn:(aws).*:s3-outposts:[a-z\-0-9]+:[0-9]{12}:outpost[/:]'
96 r'[a-zA-Z0-9\-]{1,63}[/:]accesspoint[/:][a-zA-Z0-9\-]{1,63}$'
97)
98VALID_S3_ARN = re.compile('|'.join([_ACCESSPOINT_ARN, _OUTPOST_ARN]))
99# signing names used for the services s3 and s3-control, for example in
100# botocore/data/s3/2006-03-01/endpoints-rule-set-1.json
101S3_SIGNING_NAMES = ('s3', 's3-outposts', 's3-object-lambda', 's3express')
102VERSION_ID_SUFFIX = re.compile(r'\?versionId=[^\s]+$')
105def handle_service_name_alias(service_name, **kwargs):
106 return SERVICE_NAME_ALIASES.get(service_name, service_name)
109def add_recursion_detection_header(params, **kwargs):
110 has_lambda_name = 'AWS_LAMBDA_FUNCTION_NAME' in os.environ
111 trace_id = os.environ.get('_X_AMZN_TRACE_ID')
112 if has_lambda_name and trace_id:
113 headers = params['headers']
114 if 'X-Amzn-Trace-Id' not in headers:
115 headers['X-Amzn-Trace-Id'] = quote(trace_id, safe='-=;:+&[]{}"\',')
118def escape_xml_payload(params, **kwargs):
119 # Replace \r and \n with the escaped sequence over the whole XML document
120 # to avoid linebreak normalization modifying customer input when the
121 # document is parsed. Ideally, we would do this in ElementTree.tostring,
122 # but it doesn't allow us to override entity escaping for text fields. For
123 # this operation \r and \n can only appear in the XML document if they were
124 # passed as part of the customer input.
125 body = params['body']
126 if b'\r' in body:
127 body = body.replace(b'\r', b'
')
128 if b'\n' in body:
129 body = body.replace(b'\n', b'
')
131 params['body'] = body
134def check_for_200_error(response, **kwargs):
135 # From: http://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectCOPY.html
136 # There are two opportunities for a copy request to return an error. One
137 # can occur when Amazon S3 receives the copy request and the other can
138 # occur while Amazon S3 is copying the files. If the error occurs before
139 # the copy operation starts, you receive a standard Amazon S3 error. If the
140 # error occurs during the copy operation, the error response is embedded in
141 # the 200 OK response. This means that a 200 OK response can contain either
142 # a success or an error. Make sure to design your application to parse the
143 # contents of the response and handle it appropriately.
144 #
145 # So this handler checks for this case. Even though the server sends a
146 # 200 response, conceptually this should be handled exactly like a
147 # 500 response (with respect to raising exceptions, retries, etc.)
148 # We're connected *before* all the other retry logic handlers, so as long
149 # as we switch the error code to 500, we'll retry the error as expected.
150 if response is None:
151 # A None response can happen if an exception is raised while
152 # trying to retrieve the response. See Endpoint._get_response().
153 return
154 http_response, parsed = response
155 if _looks_like_special_case_error(http_response):
156 logger.debug(
157 "Error found for response with 200 status code, "
158 "errors: %s, changing status code to "
159 "500.",
160 parsed,
161 )
162 http_response.status_code = 500
165def _looks_like_special_case_error(http_response):
166 if http_response.status_code == 200:
167 try:
168 parser = ETree.XMLParser(
169 target=ETree.TreeBuilder(), encoding='utf-8'
170 )
171 parser.feed(http_response.content)
172 root = parser.close()
173 except XMLParseError:
174 # In cases of network disruptions, we may end up with a partial
175 # streamed response from S3. We need to treat these cases as
176 # 500 Service Errors and try again.
177 return True
178 if root.tag == 'Error':
179 return True
180 return False
183def set_operation_specific_signer(context, signing_name, **kwargs):
184 """Choose the operation-specific signer.
186 Individual operations may have a different auth type than the service as a
187 whole. This will most often manifest as operations that should not be
188 authenticated at all, but can include other auth modes such as sigv4
189 without body signing.
190 """
191 auth_type = context.get('auth_type')
193 # Auth type will be None if the operation doesn't have a configured auth
194 # type.
195 if not auth_type:
196 return
198 # Auth type will be the string value 'none' if the operation should not
199 # be signed at all.
200 if auth_type == 'none':
201 return botocore.UNSIGNED
203 if auth_type == 'bearer':
204 return 'bearer'
206 if auth_type.startswith('v4'):
207 if auth_type == 'v4-s3express':
208 return auth_type
210 if auth_type == 'v4a':
211 # If sigv4a is chosen, we must add additional signing config for
212 # global signature.
213 signing = {'region': '*', 'signing_name': signing_name}
214 if 'signing' in context:
215 context['signing'].update(signing)
216 else:
217 context['signing'] = signing
218 signature_version = 'v4a'
219 else:
220 signature_version = 'v4'
222 # If the operation needs an unsigned body, we set additional context
223 # allowing the signer to be aware of this.
224 if auth_type == 'v4-unsigned-body':
225 context['payload_signing_enabled'] = False
227 # Signing names used by s3 and s3-control use customized signers "s3v4"
228 # and "s3v4a".
229 if signing_name in S3_SIGNING_NAMES:
230 signature_version = f's3{signature_version}'
232 return signature_version
235def decode_console_output(parsed, **kwargs):
236 if 'Output' in parsed:
237 try:
238 # We're using 'replace' for errors because it is
239 # possible that console output contains non string
240 # chars we can't utf-8 decode.
241 value = base64.b64decode(
242 bytes(parsed['Output'], 'latin-1')
243 ).decode('utf-8', 'replace')
244 parsed['Output'] = value
245 except (ValueError, TypeError, AttributeError):
246 logger.debug('Error decoding base64', exc_info=True)
249def generate_idempotent_uuid(params, model, **kwargs):
250 for name in model.idempotent_members:
251 if name not in params:
252 params[name] = str(uuid.uuid4())
253 logger.debug(
254 "injecting idempotency token (%s) into param '%s'."
255 % (params[name], name)
256 )
259def decode_quoted_jsondoc(value):
260 try:
261 value = json.loads(unquote(value))
262 except (ValueError, TypeError):
263 logger.debug('Error loading quoted JSON', exc_info=True)
264 return value
267def json_decode_template_body(parsed, **kwargs):
268 if 'TemplateBody' in parsed:
269 try:
270 value = json.loads(
271 parsed['TemplateBody'], object_pairs_hook=OrderedDict
272 )
273 parsed['TemplateBody'] = value
274 except (ValueError, TypeError):
275 logger.debug('error loading JSON', exc_info=True)
278def validate_bucket_name(params, **kwargs):
279 if 'Bucket' not in params:
280 return
281 bucket = params['Bucket']
282 if not VALID_BUCKET.search(bucket) and not VALID_S3_ARN.search(bucket):
283 error_msg = (
284 f'Invalid bucket name "{bucket}": Bucket name must match '
285 f'the regex "{VALID_BUCKET.pattern}" or be an ARN matching '
286 f'the regex "{VALID_S3_ARN.pattern}"'
287 )
288 raise ParamValidationError(report=error_msg)
291def sse_md5(params, **kwargs):
292 """
293 S3 server-side encryption requires the encryption key to be sent to the
294 server base64 encoded, as well as a base64-encoded MD5 hash of the
295 encryption key. This handler does both if the MD5 has not been set by
296 the caller.
297 """
298 _sse_md5(params, 'SSECustomer')
301def copy_source_sse_md5(params, **kwargs):
302 """
303 S3 server-side encryption requires the encryption key to be sent to the
304 server base64 encoded, as well as a base64-encoded MD5 hash of the
305 encryption key. This handler does both if the MD5 has not been set by
306 the caller specifically if the parameter is for the copy-source sse-c key.
307 """
308 _sse_md5(params, 'CopySourceSSECustomer')
311def _sse_md5(params, sse_member_prefix='SSECustomer'):
312 if not _needs_s3_sse_customization(params, sse_member_prefix):
313 return
315 sse_key_member = sse_member_prefix + 'Key'
316 sse_md5_member = sse_member_prefix + 'KeyMD5'
317 key_as_bytes = params[sse_key_member]
318 if isinstance(key_as_bytes, str):
319 key_as_bytes = key_as_bytes.encode('utf-8')
320 key_md5_str = base64.b64encode(get_md5(key_as_bytes).digest()).decode(
321 'utf-8'
322 )
323 key_b64_encoded = base64.b64encode(key_as_bytes).decode('utf-8')
324 params[sse_key_member] = key_b64_encoded
325 params[sse_md5_member] = key_md5_str
328def _needs_s3_sse_customization(params, sse_member_prefix):
329 return (
330 params.get(sse_member_prefix + 'Key') is not None
331 and sse_member_prefix + 'KeyMD5' not in params
332 )
335def disable_signing(**kwargs):
336 """
337 This handler disables request signing by setting the signer
338 name to a special sentinel value.
339 """
340 return botocore.UNSIGNED
343def add_expect_header(model, params, **kwargs):
344 if model.http.get('method', '') not in ['PUT', 'POST']:
345 return
346 if 'body' in params:
347 body = params['body']
348 if hasattr(body, 'read'):
349 # Any file like object will use an expect 100-continue
350 # header regardless of size.
351 logger.debug("Adding expect 100 continue header to request.")
352 params['headers']['Expect'] = '100-continue'
355class DeprecatedServiceDocumenter:
356 def __init__(self, replacement_service_name):
357 self._replacement_service_name = replacement_service_name
359 def inject_deprecation_notice(self, section, event_name, **kwargs):
360 section.style.start_important()
361 section.write('This service client is deprecated. Please use ')
362 section.style.ref(
363 self._replacement_service_name,
364 self._replacement_service_name,
365 )
366 section.write(' instead.')
367 section.style.end_important()
370def document_copy_source_form(section, event_name, **kwargs):
371 if 'request-example' in event_name:
372 parent = section.get_section('structure-value')
373 param_line = parent.get_section('CopySource')
374 value_portion = param_line.get_section('member-value')
375 value_portion.clear_text()
376 value_portion.write(
377 "'string' or {'Bucket': 'string', "
378 "'Key': 'string', 'VersionId': 'string'}"
379 )
380 elif 'request-params' in event_name:
381 param_section = section.get_section('CopySource')
382 type_section = param_section.get_section('param-type')
383 type_section.clear_text()
384 type_section.write(':type CopySource: str or dict')
385 doc_section = param_section.get_section('param-documentation')
386 doc_section.clear_text()
387 doc_section.write(
388 "The name of the source bucket, key name of the source object, "
389 "and optional version ID of the source object. You can either "
390 "provide this value as a string or a dictionary. The "
391 "string form is {bucket}/{key} or "
392 "{bucket}/{key}?versionId={versionId} if you want to copy a "
393 "specific version. You can also provide this value as a "
394 "dictionary. The dictionary format is recommended over "
395 "the string format because it is more explicit. The dictionary "
396 "format is: {'Bucket': 'bucket', 'Key': 'key', 'VersionId': 'id'}."
397 " Note that the VersionId key is optional and may be omitted."
398 " To specify an S3 access point, provide the access point"
399 " ARN for the ``Bucket`` key in the copy source dictionary. If you"
400 " want to provide the copy source for an S3 access point as a"
401 " string instead of a dictionary, the ARN provided must be the"
402 " full S3 access point object ARN"
403 " (i.e. {accesspoint_arn}/object/{key})"
404 )
407def handle_copy_source_param(params, **kwargs):
408 """Convert CopySource param for CopyObject/UploadPartCopy.
410 This handler will deal with two cases:
412 * CopySource provided as a string. We'll make a best effort
413 to URL encode the key name as required. This will require
414 parsing the bucket and version id from the CopySource value
415 and only encoding the key.
416 * CopySource provided as a dict. In this case we're
417 explicitly given the Bucket, Key, and VersionId so we're
418 able to encode the key and ensure this value is serialized
419 and correctly sent to S3.
421 """
422 source = params.get('CopySource')
423 if source is None:
424 # The call will eventually fail but we'll let the
425 # param validator take care of this. It will
426 # give a better error message.
427 return
428 if isinstance(source, str):
429 params['CopySource'] = _quote_source_header(source)
430 elif isinstance(source, dict):
431 params['CopySource'] = _quote_source_header_from_dict(source)
434def _quote_source_header_from_dict(source_dict):
435 try:
436 bucket = source_dict['Bucket']
437 key = source_dict['Key']
438 version_id = source_dict.get('VersionId')
439 if VALID_S3_ARN.search(bucket):
440 final = f'{bucket}/object/{key}'
441 else:
442 final = f'{bucket}/{key}'
443 except KeyError as e:
444 raise ParamValidationError(
445 report=f'Missing required parameter: {str(e)}'
446 )
447 final = percent_encode(final, safe=SAFE_CHARS + '/')
448 if version_id is not None:
449 final += '?versionId=%s' % version_id
450 return final
453def _quote_source_header(value):
454 result = VERSION_ID_SUFFIX.search(value)
455 if result is None:
456 return percent_encode(value, safe=SAFE_CHARS + '/')
457 else:
458 first, version_id = value[: result.start()], value[result.start() :]
459 return percent_encode(first, safe=SAFE_CHARS + '/') + version_id
462def _get_cross_region_presigned_url(
463 request_signer, request_dict, model, source_region, destination_region
464):
465 # The better way to do this is to actually get the
466 # endpoint_resolver and get the endpoint_url given the
467 # source region. In this specific case, we know that
468 # we can safely replace the dest region with the source
469 # region because of the supported EC2 regions, but in
470 # general this is not a safe assumption to make.
471 # I think eventually we should try to plumb through something
472 # that allows us to resolve endpoints from regions.
473 request_dict_copy = copy.deepcopy(request_dict)
474 request_dict_copy['body']['DestinationRegion'] = destination_region
475 request_dict_copy['url'] = request_dict['url'].replace(
476 destination_region, source_region
477 )
478 request_dict_copy['method'] = 'GET'
479 request_dict_copy['headers'] = {}
480 return request_signer.generate_presigned_url(
481 request_dict_copy, region_name=source_region, operation_name=model.name
482 )
485def _get_presigned_url_source_and_destination_regions(request_signer, params):
486 # Gets the source and destination regions to be used
487 destination_region = request_signer._region_name
488 source_region = params.get('SourceRegion')
489 return source_region, destination_region
492def inject_presigned_url_ec2(params, request_signer, model, **kwargs):
493 # The customer can still provide this, so we should pass if they do.
494 if 'PresignedUrl' in params['body']:
495 return
496 src, dest = _get_presigned_url_source_and_destination_regions(
497 request_signer, params['body']
498 )
499 url = _get_cross_region_presigned_url(
500 request_signer, params, model, src, dest
501 )
502 params['body']['PresignedUrl'] = url
503 # EC2 Requires that the destination region be sent over the wire in
504 # addition to the source region.
505 params['body']['DestinationRegion'] = dest
508def inject_presigned_url_rds(params, request_signer, model, **kwargs):
509 # SourceRegion is not required for RDS operations, so it's possible that
510 # it isn't set. In that case it's probably a local copy so we don't need
511 # to do anything else.
512 if 'SourceRegion' not in params['body']:
513 return
515 src, dest = _get_presigned_url_source_and_destination_regions(
516 request_signer, params['body']
517 )
519 # Since SourceRegion isn't actually modeled for RDS, it needs to be
520 # removed from the request params before we send the actual request.
521 del params['body']['SourceRegion']
523 if 'PreSignedUrl' in params['body']:
524 return
526 url = _get_cross_region_presigned_url(
527 request_signer, params, model, src, dest
528 )
529 params['body']['PreSignedUrl'] = url
532def json_decode_policies(parsed, model, **kwargs):
533 # Any time an IAM operation returns a policy document
534 # it is a string that is json that has been urlencoded,
535 # i.e urlencode(json.dumps(policy_document)).
536 # To give users something more useful, we will urldecode
537 # this value and json.loads() the result so that they have
538 # the policy document as a dictionary.
539 output_shape = model.output_shape
540 if output_shape is not None:
541 _decode_policy_types(parsed, model.output_shape)
544def _decode_policy_types(parsed, shape):
545 # IAM consistently uses the policyDocumentType shape to indicate
546 # strings that have policy documents.
547 shape_name = 'policyDocumentType'
548 if shape.type_name == 'structure':
549 for member_name, member_shape in shape.members.items():
550 if (
551 member_shape.type_name == 'string'
552 and member_shape.name == shape_name
553 and member_name in parsed
554 ):
555 parsed[member_name] = decode_quoted_jsondoc(
556 parsed[member_name]
557 )
558 elif member_name in parsed:
559 _decode_policy_types(parsed[member_name], member_shape)
560 if shape.type_name == 'list':
561 shape_member = shape.member
562 for item in parsed:
563 _decode_policy_types(item, shape_member)
566def parse_get_bucket_location(parsed, http_response, **kwargs):
567 # s3.GetBucketLocation cannot be modeled properly. To
568 # account for this we just manually parse the XML document.
569 # The "parsed" passed in only has the ResponseMetadata
570 # filled out. This handler will fill in the LocationConstraint
571 # value.
572 if http_response.raw is None:
573 return
574 response_body = http_response.content
575 parser = ETree.XMLParser(target=ETree.TreeBuilder(), encoding='utf-8')
576 parser.feed(response_body)
577 root = parser.close()
578 region = root.text
579 parsed['LocationConstraint'] = region
582def base64_encode_user_data(params, **kwargs):
583 if 'UserData' in params:
584 if isinstance(params['UserData'], str):
585 # Encode it to bytes if it is text.
586 params['UserData'] = params['UserData'].encode('utf-8')
587 params['UserData'] = base64.b64encode(params['UserData']).decode(
588 'utf-8'
589 )
592def document_base64_encoding(param):
593 description = (
594 '**This value will be base64 encoded automatically. Do '
595 'not base64 encode this value prior to performing the '
596 'operation.**'
597 )
598 append = AppendParamDocumentation(param, description)
599 return append.append_documentation
602def validate_ascii_metadata(params, **kwargs):
603 """Verify S3 Metadata only contains ascii characters.
605 From: http://docs.aws.amazon.com/AmazonS3/latest/dev/UsingMetadata.html
607 "Amazon S3 stores user-defined metadata in lowercase. Each name, value pair
608 must conform to US-ASCII when using REST and UTF-8 when using SOAP or
609 browser-based uploads via POST."
611 """
612 metadata = params.get('Metadata')
613 if not metadata or not isinstance(metadata, dict):
614 # We have to at least type check the metadata as a dict type
615 # because this handler is called before param validation.
616 # We'll go ahead and return because the param validator will
617 # give a descriptive error message for us.
618 # We might need a post-param validation event.
619 return
620 for key, value in metadata.items():
621 try:
622 key.encode('ascii')
623 value.encode('ascii')
624 except UnicodeEncodeError:
625 error_msg = (
626 'Non ascii characters found in S3 metadata '
627 'for key "%s", value: "%s". \nS3 metadata can only '
628 'contain ASCII characters. ' % (key, value)
629 )
630 raise ParamValidationError(report=error_msg)
633def fix_route53_ids(params, model, **kwargs):
634 """
635 Check for and split apart Route53 resource IDs, setting
636 only the last piece. This allows the output of one operation
637 (e.g. ``'foo/1234'``) to be used as input in another
638 operation (e.g. it expects just ``'1234'``).
639 """
640 input_shape = model.input_shape
641 if not input_shape or not hasattr(input_shape, 'members'):
642 return
644 members = [
645 name
646 for (name, shape) in input_shape.members.items()
647 if shape.name in ['ResourceId', 'DelegationSetId', 'ChangeId']
648 ]
650 for name in members:
651 if name in params:
652 orig_value = params[name]
653 params[name] = orig_value.split('/')[-1]
654 logger.debug('%s %s -> %s', name, orig_value, params[name])
657def inject_account_id(params, **kwargs):
658 if params.get('accountId') is None:
659 # Glacier requires accountId, but allows you
660 # to specify '-' for the current owners account.
661 # We add this default value if the user does not
662 # provide the accountId as a convenience.
663 params['accountId'] = '-'
666def add_glacier_version(model, params, **kwargs):
667 request_dict = params
668 request_dict['headers']['x-amz-glacier-version'] = model.metadata[
669 'apiVersion'
670 ]
673def add_accept_header(model, params, **kwargs):
674 if params['headers'].get('Accept', None) is None:
675 request_dict = params
676 request_dict['headers']['Accept'] = 'application/json'
679def add_glacier_checksums(params, **kwargs):
680 """Add glacier checksums to the http request.
682 This will add two headers to the http request:
684 * x-amz-content-sha256
685 * x-amz-sha256-tree-hash
687 These values will only be added if they are not present
688 in the HTTP request.
690 """
691 request_dict = params
692 headers = request_dict['headers']
693 body = request_dict['body']
694 if isinstance(body, bytes):
695 # If the user provided a bytes type instead of a file
696 # like object, we're temporarily create a BytesIO object
697 # so we can use the util functions to calculate the
698 # checksums which assume file like objects. Note that
699 # we're not actually changing the body in the request_dict.
700 body = BytesIO(body)
701 starting_position = body.tell()
702 if 'x-amz-content-sha256' not in headers:
703 headers['x-amz-content-sha256'] = utils.calculate_sha256(
704 body, as_hex=True
705 )
706 body.seek(starting_position)
707 if 'x-amz-sha256-tree-hash' not in headers:
708 headers['x-amz-sha256-tree-hash'] = utils.calculate_tree_hash(body)
709 body.seek(starting_position)
712def document_glacier_tree_hash_checksum():
713 doc = '''
714 This is a required field.
716 Ideally you will want to compute this value with checksums from
717 previous uploaded parts, using the algorithm described in
718 `Glacier documentation <http://docs.aws.amazon.com/amazonglacier/latest/dev/checksum-calculations.html>`_.
720 But if you prefer, you can also use botocore.utils.calculate_tree_hash()
721 to compute it from raw file by::
723 checksum = calculate_tree_hash(open('your_file.txt', 'rb'))
725 '''
726 return AppendParamDocumentation('checksum', doc).append_documentation
729def document_cloudformation_get_template_return_type(
730 section, event_name, **kwargs
731):
732 if 'response-params' in event_name:
733 template_body_section = section.get_section('TemplateBody')
734 type_section = template_body_section.get_section('param-type')
735 type_section.clear_text()
736 type_section.write('(*dict*) --')
737 elif 'response-example' in event_name:
738 parent = section.get_section('structure-value')
739 param_line = parent.get_section('TemplateBody')
740 value_portion = param_line.get_section('member-value')
741 value_portion.clear_text()
742 value_portion.write('{}')
745def switch_host_machinelearning(request, **kwargs):
746 switch_host_with_param(request, 'PredictEndpoint')
749def check_openssl_supports_tls_version_1_2(**kwargs):
750 import ssl
752 try:
753 openssl_version_tuple = ssl.OPENSSL_VERSION_INFO
754 if openssl_version_tuple < (1, 0, 1):
755 warnings.warn(
756 'Currently installed openssl version: %s does not '
757 'support TLS 1.2, which is required for use of iot-data. '
758 'Please use python installed with openssl version 1.0.1 or '
759 'higher.' % (ssl.OPENSSL_VERSION),
760 UnsupportedTLSVersionWarning,
761 )
762 # We cannot check the openssl version on python2.6, so we should just
763 # pass on this conveniency check.
764 except AttributeError:
765 pass
768def change_get_to_post(request, **kwargs):
769 # This is useful when we need to change a potentially large GET request
770 # into a POST with x-www-form-urlencoded encoding.
771 if request.method == 'GET' and '?' in request.url:
772 request.headers['Content-Type'] = 'application/x-www-form-urlencoded'
773 request.method = 'POST'
774 request.url, request.data = request.url.split('?', 1)
777def set_list_objects_encoding_type_url(params, context, **kwargs):
778 if 'EncodingType' not in params:
779 # We set this context so that we know it wasn't the customer that
780 # requested the encoding.
781 context['encoding_type_auto_set'] = True
782 params['EncodingType'] = 'url'
785def decode_list_object(parsed, context, **kwargs):
786 # This is needed because we are passing url as the encoding type. Since the
787 # paginator is based on the key, we need to handle it before it can be
788 # round tripped.
789 #
790 # From the documentation: If you specify encoding-type request parameter,
791 # Amazon S3 includes this element in the response, and returns encoded key
792 # name values in the following response elements:
793 # Delimiter, Marker, Prefix, NextMarker, Key.
794 _decode_list_object(
795 top_level_keys=['Delimiter', 'Marker', 'NextMarker'],
796 nested_keys=[('Contents', 'Key'), ('CommonPrefixes', 'Prefix')],
797 parsed=parsed,
798 context=context,
799 )
802def decode_list_object_v2(parsed, context, **kwargs):
803 # From the documentation: If you specify encoding-type request parameter,
804 # Amazon S3 includes this element in the response, and returns encoded key
805 # name values in the following response elements:
806 # Delimiter, Prefix, ContinuationToken, Key, and StartAfter.
807 _decode_list_object(
808 top_level_keys=['Delimiter', 'Prefix', 'StartAfter'],
809 nested_keys=[('Contents', 'Key'), ('CommonPrefixes', 'Prefix')],
810 parsed=parsed,
811 context=context,
812 )
815def decode_list_object_versions(parsed, context, **kwargs):
816 # From the documentation: If you specify encoding-type request parameter,
817 # Amazon S3 includes this element in the response, and returns encoded key
818 # name values in the following response elements:
819 # KeyMarker, NextKeyMarker, Prefix, Key, and Delimiter.
820 _decode_list_object(
821 top_level_keys=[
822 'KeyMarker',
823 'NextKeyMarker',
824 'Prefix',
825 'Delimiter',
826 ],
827 nested_keys=[
828 ('Versions', 'Key'),
829 ('DeleteMarkers', 'Key'),
830 ('CommonPrefixes', 'Prefix'),
831 ],
832 parsed=parsed,
833 context=context,
834 )
837def _decode_list_object(top_level_keys, nested_keys, parsed, context):
838 if parsed.get('EncodingType') == 'url' and context.get(
839 'encoding_type_auto_set'
840 ):
841 # URL decode top-level keys in the response if present.
842 for key in top_level_keys:
843 if key in parsed:
844 parsed[key] = unquote_str(parsed[key])
845 # URL decode nested keys from the response if present.
846 for top_key, child_key in nested_keys:
847 if top_key in parsed:
848 for member in parsed[top_key]:
849 member[child_key] = unquote_str(member[child_key])
852def convert_body_to_file_like_object(params, **kwargs):
853 if 'Body' in params:
854 if isinstance(params['Body'], str):
855 params['Body'] = BytesIO(ensure_bytes(params['Body']))
856 elif isinstance(params['Body'], bytes):
857 params['Body'] = BytesIO(params['Body'])
860def _add_parameter_aliases(handler_list):
861 # Mapping of original parameter to parameter alias.
862 # The key is <service>.<operation>.parameter
863 # The first part of the key is used for event registration.
864 # The last part is the original parameter name and the value is the
865 # alias to expose in documentation.
866 aliases = {
867 'ec2.*.Filter': 'Filters',
868 'logs.CreateExportTask.from': 'fromTime',
869 'cloudsearchdomain.Search.return': 'returnFields',
870 }
872 for original, new_name in aliases.items():
873 event_portion, original_name = original.rsplit('.', 1)
874 parameter_alias = ParameterAlias(original_name, new_name)
876 # Add the handlers to the list of handlers.
877 # One handler is to handle when users provide the alias.
878 # The other handler is to update the documentation to show only
879 # the alias.
880 parameter_build_event_handler_tuple = (
881 'before-parameter-build.' + event_portion,
882 parameter_alias.alias_parameter_in_call,
883 REGISTER_FIRST,
884 )
885 docs_event_handler_tuple = (
886 'docs.*.' + event_portion + '.complete-section',
887 parameter_alias.alias_parameter_in_documentation,
888 )
889 handler_list.append(parameter_build_event_handler_tuple)
890 handler_list.append(docs_event_handler_tuple)
893class ParameterAlias:
894 def __init__(self, original_name, alias_name):
895 self._original_name = original_name
896 self._alias_name = alias_name
898 def alias_parameter_in_call(self, params, model, **kwargs):
899 if model.input_shape:
900 # Only consider accepting the alias if it is modeled in the
901 # input shape.
902 if self._original_name in model.input_shape.members:
903 if self._alias_name in params:
904 if self._original_name in params:
905 raise AliasConflictParameterError(
906 original=self._original_name,
907 alias=self._alias_name,
908 operation=model.name,
909 )
910 # Remove the alias parameter value and use the old name
911 # instead.
912 params[self._original_name] = params.pop(self._alias_name)
914 def alias_parameter_in_documentation(self, event_name, section, **kwargs):
915 if event_name.startswith('docs.request-params'):
916 if self._original_name not in section.available_sections:
917 return
918 # Replace the name for parameter type
919 param_section = section.get_section(self._original_name)
920 param_type_section = param_section.get_section('param-type')
921 self._replace_content(param_type_section)
923 # Replace the name for the parameter description
924 param_name_section = param_section.get_section('param-name')
925 self._replace_content(param_name_section)
926 elif event_name.startswith('docs.request-example'):
927 section = section.get_section('structure-value')
928 if self._original_name not in section.available_sections:
929 return
930 # Replace the name for the example
931 param_section = section.get_section(self._original_name)
932 self._replace_content(param_section)
934 def _replace_content(self, section):
935 content = section.getvalue().decode('utf-8')
936 updated_content = content.replace(
937 self._original_name, self._alias_name
938 )
939 section.clear_text()
940 section.write(updated_content)
943class ClientMethodAlias:
944 def __init__(self, actual_name):
945 """Aliases a non-extant method to an existing method.
947 :param actual_name: The name of the method that actually exists on
948 the client.
949 """
950 self._actual = actual_name
952 def __call__(self, client, **kwargs):
953 return getattr(client, self._actual)
956# TODO: Remove this class as it is no longer used
957class HeaderToHostHoister:
958 """Takes a header and moves it to the front of the hoststring."""
960 _VALID_HOSTNAME = re.compile(r'(?!-)[a-z\d-]{1,63}(?<!-)$', re.IGNORECASE)
962 def __init__(self, header_name):
963 self._header_name = header_name
965 def hoist(self, params, **kwargs):
966 """Hoist a header to the hostname.
968 Hoist a header to the beginning of the hostname with a suffix "." after
969 it. The original header should be removed from the header map. This
970 method is intended to be used as a target for the before-call event.
971 """
972 if self._header_name not in params['headers']:
973 return
974 header_value = params['headers'][self._header_name]
975 self._ensure_header_is_valid_host(header_value)
976 original_url = params['url']
977 new_url = self._prepend_to_host(original_url, header_value)
978 params['url'] = new_url
980 def _ensure_header_is_valid_host(self, header):
981 match = self._VALID_HOSTNAME.match(header)
982 if not match:
983 raise ParamValidationError(
984 report=(
985 'Hostnames must contain only - and alphanumeric characters, '
986 'and between 1 and 63 characters long.'
987 )
988 )
990 def _prepend_to_host(self, url, prefix):
991 url_components = urlsplit(url)
992 parts = url_components.netloc.split('.')
993 parts = [prefix] + parts
994 new_netloc = '.'.join(parts)
995 new_components = (
996 url_components.scheme,
997 new_netloc,
998 url_components.path,
999 url_components.query,
1000 '',
1001 )
1002 new_url = urlunsplit(new_components)
1003 return new_url
1006def inject_api_version_header_if_needed(model, params, **kwargs):
1007 if not model.is_endpoint_discovery_operation:
1008 return
1009 params['headers']['x-amz-api-version'] = model.service_model.api_version
1012def remove_lex_v2_start_conversation(class_attributes, **kwargs):
1013 """Operation requires h2 which is currently unsupported in Python"""
1014 if 'start_conversation' in class_attributes:
1015 del class_attributes['start_conversation']
1018def add_retry_headers(request, **kwargs):
1019 retries_context = request.context.get('retries')
1020 if not retries_context:
1021 return
1022 headers = request.headers
1023 headers['amz-sdk-invocation-id'] = retries_context['invocation-id']
1024 sdk_retry_keys = ('ttl', 'attempt', 'max')
1025 sdk_request_headers = [
1026 f'{key}={retries_context[key]}'
1027 for key in sdk_retry_keys
1028 if key in retries_context
1029 ]
1030 headers['amz-sdk-request'] = '; '.join(sdk_request_headers)
1033def remove_bucket_from_url_paths_from_model(params, model, context, **kwargs):
1034 """Strips leading `{Bucket}/` from any operations that have it.
1036 The original value is retained in a separate "authPath" field. This is
1037 used in the HmacV1Auth signer. See HmacV1Auth.canonical_resource in
1038 botocore/auth.py for details.
1040 This change is applied to the operation model during the first time the
1041 operation is invoked and then stays in effect for the lifetime of the
1042 client object.
1044 When the ruleset based endpoint resolver is in effect, both the endpoint
1045 ruleset AND the service model place the bucket name in the final URL.
1046 The result is an invalid URL. This handler modifies the operation model to
1047 no longer place the bucket name. Previous versions of botocore fixed the
1048 URL after the fact when necessary. Since the introduction of ruleset based
1049 endpoint resolution, the problem exists in ALL URLs that contain a bucket
1050 name and can therefore be addressed before the URL gets assembled.
1051 """
1052 req_uri = model.http['requestUri']
1053 bucket_path = '/{Bucket}'
1054 if req_uri.startswith(bucket_path):
1055 model.http['requestUri'] = req_uri[len(bucket_path) :]
1056 # Strip query off the requestUri before using as authPath. The
1057 # HmacV1Auth signer will append query params to the authPath during
1058 # signing.
1059 req_uri = req_uri.split('?')[0]
1060 # If the request URI is ONLY a bucket, the auth_path must be
1061 # terminated with a '/' character to generate a signature that the
1062 # server will accept.
1063 needs_slash = req_uri == bucket_path
1064 model.http['authPath'] = f'{req_uri}/' if needs_slash else req_uri
1067def remove_accid_host_prefix_from_model(params, model, context, **kwargs):
1068 """Removes the `{AccountId}.` prefix from the operation model.
1070 This change is applied to the operation model during the first time the
1071 operation is invoked and then stays in effect for the lifetime of the
1072 client object.
1074 When the ruleset based endpoint resolver is in effect, both the endpoint
1075 ruleset AND the service model place the {AccountId}. prefix in the URL.
1076 The result is an invalid endpoint. This handler modifies the operation
1077 model to remove the `endpoint.hostPrefix` field while leaving the
1078 `RequiresAccountId` static context parameter in place.
1079 """
1080 has_ctx_param = any(
1081 ctx_param.name == 'RequiresAccountId' and ctx_param.value is True
1082 for ctx_param in model.static_context_parameters
1083 )
1084 if (
1085 model.endpoint is not None
1086 and model.endpoint.get('hostPrefix') == '{AccountId}.'
1087 and has_ctx_param
1088 ):
1089 del model.endpoint['hostPrefix']
1092def remove_arn_from_signing_path(request, **kwargs):
1093 auth_path = request.auth_path
1094 if isinstance(auth_path, str) and auth_path.startswith('/arn%3A'):
1095 auth_path_parts = auth_path.split('/')
1096 if len(auth_path_parts) > 1 and ArnParser.is_arn(
1097 unquote(auth_path_parts[1])
1098 ):
1099 request.auth_path = '/'.join(['', *auth_path_parts[2:]])
1102def customize_endpoint_resolver_builtins(
1103 builtins, model, params, context, **kwargs
1104):
1105 """Modify builtin parameter values for endpoint resolver
1107 Modifies the builtins dict in place. Changes are in effect for one call.
1108 The corresponding event is emitted only if at least one builtin parameter
1109 value is required for endpoint resolution for the operation.
1110 """
1111 bucket_name = params.get('Bucket')
1112 bucket_is_arn = bucket_name is not None and ArnParser.is_arn(bucket_name)
1113 # In some situations the host will return AuthorizationHeaderMalformed
1114 # when the signing region of a sigv4 request is not the bucket's
1115 # region (which is likely unknown by the user of GetBucketLocation).
1116 # Avoid this by always using path-style addressing.
1117 if model.name == 'GetBucketLocation':
1118 builtins[EndpointResolverBuiltins.AWS_S3_FORCE_PATH_STYLE] = True
1119 # All situations where the bucket name is an ARN are not compatible
1120 # with path style addressing.
1121 elif bucket_is_arn:
1122 builtins[EndpointResolverBuiltins.AWS_S3_FORCE_PATH_STYLE] = False
1124 # Bucket names that are invalid host labels require path-style addressing.
1125 # If path-style addressing was specifically requested, the default builtin
1126 # value is already set.
1127 path_style_required = (
1128 bucket_name is not None and not VALID_HOST_LABEL_RE.match(bucket_name)
1129 )
1130 path_style_requested = builtins[
1131 EndpointResolverBuiltins.AWS_S3_FORCE_PATH_STYLE
1132 ]
1134 # Path-style addressing is incompatible with the global endpoint for
1135 # presigned URLs. If the bucket name is an ARN, the ARN's region should be
1136 # used in the endpoint.
1137 if (
1138 context.get('use_global_endpoint')
1139 and not path_style_required
1140 and not path_style_requested
1141 and not bucket_is_arn
1142 and not utils.is_s3express_bucket(bucket_name)
1143 ):
1144 builtins[EndpointResolverBuiltins.AWS_REGION] = 'aws-global'
1145 builtins[EndpointResolverBuiltins.AWS_S3_USE_GLOBAL_ENDPOINT] = True
1148def remove_content_type_header_for_presigning(request, **kwargs):
1149 if (
1150 request.context.get('is_presign_request') is True
1151 and 'Content-Type' in request.headers
1152 ):
1153 del request.headers['Content-Type']
1156# This is a list of (event_name, handler).
1157# When a Session is created, everything in this list will be
1158# automatically registered with that Session.
1160BUILTIN_HANDLERS = [
1161 ('choose-service-name', handle_service_name_alias),
1162 (
1163 'getattr.mturk.list_hi_ts_for_qualification_type',
1164 ClientMethodAlias('list_hits_for_qualification_type'),
1165 ),
1166 (
1167 'before-parameter-build.s3.UploadPart',
1168 convert_body_to_file_like_object,
1169 REGISTER_LAST,
1170 ),
1171 (
1172 'before-parameter-build.s3.PutObject',
1173 convert_body_to_file_like_object,
1174 REGISTER_LAST,
1175 ),
1176 ('creating-client-class', add_generate_presigned_url),
1177 ('creating-client-class.s3', add_generate_presigned_post),
1178 ('creating-client-class.iot-data', check_openssl_supports_tls_version_1_2),
1179 ('creating-client-class.lex-runtime-v2', remove_lex_v2_start_conversation),
1180 ('after-call.iam', json_decode_policies),
1181 ('after-call.ec2.GetConsoleOutput', decode_console_output),
1182 ('after-call.cloudformation.GetTemplate', json_decode_template_body),
1183 ('after-call.s3.GetBucketLocation', parse_get_bucket_location),
1184 ('before-parameter-build', generate_idempotent_uuid),
1185 ('before-parameter-build.s3', validate_bucket_name),
1186 ('before-parameter-build.s3', remove_bucket_from_url_paths_from_model),
1187 (
1188 'before-parameter-build.s3.ListObjects',
1189 set_list_objects_encoding_type_url,
1190 ),
1191 (
1192 'before-parameter-build.s3.ListObjectsV2',
1193 set_list_objects_encoding_type_url,
1194 ),
1195 (
1196 'before-parameter-build.s3.ListObjectVersions',
1197 set_list_objects_encoding_type_url,
1198 ),
1199 ('before-parameter-build.s3.CopyObject', handle_copy_source_param),
1200 ('before-parameter-build.s3.UploadPartCopy', handle_copy_source_param),
1201 ('before-parameter-build.s3.CopyObject', validate_ascii_metadata),
1202 ('before-parameter-build.s3.PutObject', validate_ascii_metadata),
1203 (
1204 'before-parameter-build.s3.CreateMultipartUpload',
1205 validate_ascii_metadata,
1206 ),
1207 ('before-parameter-build.s3-control', remove_accid_host_prefix_from_model),
1208 ('docs.*.s3.CopyObject.complete-section', document_copy_source_form),
1209 ('docs.*.s3.UploadPartCopy.complete-section', document_copy_source_form),
1210 ('before-endpoint-resolution.s3', customize_endpoint_resolver_builtins),
1211 ('before-call', add_recursion_detection_header),
1212 ('before-call.s3', add_expect_header),
1213 ('before-call.glacier', add_glacier_version),
1214 ('before-call.apigateway', add_accept_header),
1215 ('before-call.s3.PutObject', conditionally_calculate_checksum),
1216 ('before-call.s3.UploadPart', conditionally_calculate_md5),
1217 ('before-call.s3.DeleteObjects', escape_xml_payload),
1218 ('before-call.s3.DeleteObjects', conditionally_calculate_checksum),
1219 ('before-call.s3.PutBucketLifecycleConfiguration', escape_xml_payload),
1220 ('before-call.glacier.UploadArchive', add_glacier_checksums),
1221 ('before-call.glacier.UploadMultipartPart', add_glacier_checksums),
1222 ('before-call.ec2.CopySnapshot', inject_presigned_url_ec2),
1223 ('request-created', add_retry_headers),
1224 ('request-created.machinelearning.Predict', switch_host_machinelearning),
1225 ('needs-retry.s3.UploadPartCopy', check_for_200_error, REGISTER_FIRST),
1226 ('needs-retry.s3.CopyObject', check_for_200_error, REGISTER_FIRST),
1227 (
1228 'needs-retry.s3.CompleteMultipartUpload',
1229 check_for_200_error,
1230 REGISTER_FIRST,
1231 ),
1232 ('choose-signer.cognito-identity.GetId', disable_signing),
1233 ('choose-signer.cognito-identity.GetOpenIdToken', disable_signing),
1234 ('choose-signer.cognito-identity.UnlinkIdentity', disable_signing),
1235 (
1236 'choose-signer.cognito-identity.GetCredentialsForIdentity',
1237 disable_signing,
1238 ),
1239 ('choose-signer.sts.AssumeRoleWithSAML', disable_signing),
1240 ('choose-signer.sts.AssumeRoleWithWebIdentity', disable_signing),
1241 ('choose-signer', set_operation_specific_signer),
1242 ('before-parameter-build.s3.HeadObject', sse_md5),
1243 ('before-parameter-build.s3.GetObject', sse_md5),
1244 ('before-parameter-build.s3.PutObject', sse_md5),
1245 ('before-parameter-build.s3.CopyObject', sse_md5),
1246 ('before-parameter-build.s3.CopyObject', copy_source_sse_md5),
1247 ('before-parameter-build.s3.CreateMultipartUpload', sse_md5),
1248 ('before-parameter-build.s3.UploadPart', sse_md5),
1249 ('before-parameter-build.s3.UploadPartCopy', sse_md5),
1250 ('before-parameter-build.s3.UploadPartCopy', copy_source_sse_md5),
1251 ('before-parameter-build.s3.CompleteMultipartUpload', sse_md5),
1252 ('before-parameter-build.s3.SelectObjectContent', sse_md5),
1253 ('before-parameter-build.ec2.RunInstances', base64_encode_user_data),
1254 (
1255 'before-parameter-build.autoscaling.CreateLaunchConfiguration',
1256 base64_encode_user_data,
1257 ),
1258 ('before-parameter-build.route53', fix_route53_ids),
1259 ('before-parameter-build.glacier', inject_account_id),
1260 ('before-sign.s3', remove_arn_from_signing_path),
1261 (
1262 'before-sign.polly.SynthesizeSpeech',
1263 remove_content_type_header_for_presigning,
1264 ),
1265 ('after-call.s3.ListObjects', decode_list_object),
1266 ('after-call.s3.ListObjectsV2', decode_list_object_v2),
1267 ('after-call.s3.ListObjectVersions', decode_list_object_versions),
1268 # Cloudsearchdomain search operation will be sent by HTTP POST
1269 ('request-created.cloudsearchdomain.Search', change_get_to_post),
1270 # Glacier documentation customizations
1271 (
1272 'docs.*.glacier.*.complete-section',
1273 AutoPopulatedParam(
1274 'accountId',
1275 'Note: this parameter is set to "-" by'
1276 'default if no value is not specified.',
1277 ).document_auto_populated_param,
1278 ),
1279 (
1280 'docs.*.glacier.UploadArchive.complete-section',
1281 AutoPopulatedParam('checksum').document_auto_populated_param,
1282 ),
1283 (
1284 'docs.*.glacier.UploadMultipartPart.complete-section',
1285 AutoPopulatedParam('checksum').document_auto_populated_param,
1286 ),
1287 (
1288 'docs.request-params.glacier.CompleteMultipartUpload.complete-section',
1289 document_glacier_tree_hash_checksum(),
1290 ),
1291 # Cloudformation documentation customizations
1292 (
1293 'docs.*.cloudformation.GetTemplate.complete-section',
1294 document_cloudformation_get_template_return_type,
1295 ),
1296 # UserData base64 encoding documentation customizations
1297 (
1298 'docs.*.ec2.RunInstances.complete-section',
1299 document_base64_encoding('UserData'),
1300 ),
1301 (
1302 'docs.*.autoscaling.CreateLaunchConfiguration.complete-section',
1303 document_base64_encoding('UserData'),
1304 ),
1305 # EC2 CopySnapshot documentation customizations
1306 (
1307 'docs.*.ec2.CopySnapshot.complete-section',
1308 AutoPopulatedParam('PresignedUrl').document_auto_populated_param,
1309 ),
1310 (
1311 'docs.*.ec2.CopySnapshot.complete-section',
1312 AutoPopulatedParam('DestinationRegion').document_auto_populated_param,
1313 ),
1314 # S3 SSE documentation modifications
1315 (
1316 'docs.*.s3.*.complete-section',
1317 AutoPopulatedParam('SSECustomerKeyMD5').document_auto_populated_param,
1318 ),
1319 # S3 SSE Copy Source documentation modifications
1320 (
1321 'docs.*.s3.*.complete-section',
1322 AutoPopulatedParam(
1323 'CopySourceSSECustomerKeyMD5'
1324 ).document_auto_populated_param,
1325 ),
1326 # Add base64 information to Lambda
1327 (
1328 'docs.*.lambda.UpdateFunctionCode.complete-section',
1329 document_base64_encoding('ZipFile'),
1330 ),
1331 # The following S3 operations cannot actually accept a ContentMD5
1332 (
1333 'docs.*.s3.*.complete-section',
1334 HideParamFromOperations(
1335 's3',
1336 'ContentMD5',
1337 [
1338 'DeleteObjects',
1339 'PutBucketAcl',
1340 'PutBucketCors',
1341 'PutBucketLifecycle',
1342 'PutBucketLogging',
1343 'PutBucketNotification',
1344 'PutBucketPolicy',
1345 'PutBucketReplication',
1346 'PutBucketRequestPayment',
1347 'PutBucketTagging',
1348 'PutBucketVersioning',
1349 'PutBucketWebsite',
1350 'PutObjectAcl',
1351 ],
1352 ).hide_param,
1353 ),
1354 #############
1355 # RDS
1356 #############
1357 ('creating-client-class.rds', add_generate_db_auth_token),
1358 ('before-call.rds.CopyDBClusterSnapshot', inject_presigned_url_rds),
1359 ('before-call.rds.CreateDBCluster', inject_presigned_url_rds),
1360 ('before-call.rds.CopyDBSnapshot', inject_presigned_url_rds),
1361 ('before-call.rds.CreateDBInstanceReadReplica', inject_presigned_url_rds),
1362 (
1363 'before-call.rds.StartDBInstanceAutomatedBackupsReplication',
1364 inject_presigned_url_rds,
1365 ),
1366 # RDS PresignedUrl documentation customizations
1367 (
1368 'docs.*.rds.CopyDBClusterSnapshot.complete-section',
1369 AutoPopulatedParam('PreSignedUrl').document_auto_populated_param,
1370 ),
1371 (
1372 'docs.*.rds.CreateDBCluster.complete-section',
1373 AutoPopulatedParam('PreSignedUrl').document_auto_populated_param,
1374 ),
1375 (
1376 'docs.*.rds.CopyDBSnapshot.complete-section',
1377 AutoPopulatedParam('PreSignedUrl').document_auto_populated_param,
1378 ),
1379 (
1380 'docs.*.rds.CreateDBInstanceReadReplica.complete-section',
1381 AutoPopulatedParam('PreSignedUrl').document_auto_populated_param,
1382 ),
1383 (
1384 'docs.*.rds.StartDBInstanceAutomatedBackupsReplication.complete-section',
1385 AutoPopulatedParam('PreSignedUrl').document_auto_populated_param,
1386 ),
1387 #############
1388 # Neptune
1389 #############
1390 ('before-call.neptune.CopyDBClusterSnapshot', inject_presigned_url_rds),
1391 ('before-call.neptune.CreateDBCluster', inject_presigned_url_rds),
1392 # Neptune PresignedUrl documentation customizations
1393 (
1394 'docs.*.neptune.CopyDBClusterSnapshot.complete-section',
1395 AutoPopulatedParam('PreSignedUrl').document_auto_populated_param,
1396 ),
1397 (
1398 'docs.*.neptune.CreateDBCluster.complete-section',
1399 AutoPopulatedParam('PreSignedUrl').document_auto_populated_param,
1400 ),
1401 #############
1402 # DocDB
1403 #############
1404 ('before-call.docdb.CopyDBClusterSnapshot', inject_presigned_url_rds),
1405 ('before-call.docdb.CreateDBCluster', inject_presigned_url_rds),
1406 # DocDB PresignedUrl documentation customizations
1407 (
1408 'docs.*.docdb.CopyDBClusterSnapshot.complete-section',
1409 AutoPopulatedParam('PreSignedUrl').document_auto_populated_param,
1410 ),
1411 (
1412 'docs.*.docdb.CreateDBCluster.complete-section',
1413 AutoPopulatedParam('PreSignedUrl').document_auto_populated_param,
1414 ),
1415 ('before-call', inject_api_version_header_if_needed),
1416]
1417_add_parameter_aliases(BUILTIN_HANDLERS)