aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authoranoh <anoh@yandex-team.ru>2022-02-10 16:48:27 +0300
committerDaniil Cherednik <dcherednik@yandex-team.ru>2022-02-10 16:48:27 +0300
commitf49c77cb796fe12b4fb7dbdbdf472955017a87b1 (patch)
tree5d5cb817648f650d76cf1076100726fd9b8448e8
parent5ecbea34cc22f2d6d3c7f2b634131656807921d1 (diff)
downloadydb-f49c77cb796fe12b4fb7dbdbdf472955017a87b1.tar.gz
Restoring authorship annotation for <anoh@yandex-team.ru>. Commit 2 of 2.
-rw-r--r--contrib/python/boto3/boto3/__init__.py202
-rw-r--r--contrib/python/boto3/boto3/compat.py76
-rw-r--r--contrib/python/boto3/boto3/docs/__init__.py78
-rw-r--r--contrib/python/boto3/boto3/docs/action.py296
-rw-r--r--contrib/python/boto3/boto3/docs/attr.py108
-rw-r--r--contrib/python/boto3/boto3/docs/base.py62
-rw-r--r--contrib/python/boto3/boto3/docs/client.py54
-rw-r--r--contrib/python/boto3/boto3/docs/collection.py464
-rw-r--r--contrib/python/boto3/boto3/docs/docstring.py148
-rw-r--r--contrib/python/boto3/boto3/docs/method.py142
-rw-r--r--contrib/python/boto3/boto3/docs/resource.py518
-rw-r--r--contrib/python/boto3/boto3/docs/service.py250
-rw-r--r--contrib/python/boto3/boto3/docs/subresource.py224
-rw-r--r--contrib/python/boto3/boto3/docs/utils.py284
-rw-r--r--contrib/python/boto3/boto3/docs/waiter.py182
-rw-r--r--contrib/python/boto3/boto3/dynamodb/__init__.py24
-rw-r--r--contrib/python/boto3/boto3/dynamodb/conditions.py826
-rw-r--r--contrib/python/boto3/boto3/dynamodb/table.py308
-rw-r--r--contrib/python/boto3/boto3/dynamodb/transform.py582
-rw-r--r--contrib/python/boto3/boto3/dynamodb/types.py582
-rw-r--r--contrib/python/boto3/boto3/ec2/__init__.py24
-rw-r--r--contrib/python/boto3/boto3/ec2/createtags.py80
-rw-r--r--contrib/python/boto3/boto3/ec2/deletetags.py68
-rw-r--r--contrib/python/boto3/boto3/exceptions.py216
-rw-r--r--contrib/python/boto3/boto3/resources/action.py484
-rw-r--r--contrib/python/boto3/boto3/resources/base.py296
-rw-r--r--contrib/python/boto3/boto3/resources/collection.py1050
-rw-r--r--contrib/python/boto3/boto3/resources/factory.py1078
-rw-r--r--contrib/python/boto3/boto3/resources/model.py1242
-rw-r--r--contrib/python/boto3/boto3/resources/params.py334
-rw-r--r--contrib/python/boto3/boto3/resources/response.py600
-rw-r--r--contrib/python/boto3/boto3/s3/__init__.py24
-rw-r--r--contrib/python/boto3/boto3/s3/inject.py1280
-rw-r--r--contrib/python/boto3/boto3/s3/transfer.py658
-rw-r--r--contrib/python/boto3/boto3/session.py892
-rw-r--r--contrib/python/boto3/boto3/utils.py186
-rw-r--r--contrib/python/boto3/ya.make38
-rw-r--r--contrib/python/botocore/botocore/__init__.py172
-rw-r--r--contrib/python/botocore/botocore/args.py304
-rw-r--r--contrib/python/botocore/botocore/auth.py1556
-rw-r--r--contrib/python/botocore/botocore/awsrequest.py614
-rw-r--r--contrib/python/botocore/botocore/client.py1422
-rw-r--r--contrib/python/botocore/botocore/compat.py628
-rw-r--r--contrib/python/botocore/botocore/config.py402
-rw-r--r--contrib/python/botocore/botocore/configloader.py530
-rw-r--r--contrib/python/botocore/botocore/credentials.py3008
-rw-r--r--contrib/python/botocore/botocore/docs/__init__.py76
-rw-r--r--contrib/python/botocore/botocore/docs/bcdoc/__init__.py26
-rw-r--r--contrib/python/botocore/botocore/docs/bcdoc/docstringparser.py400
-rw-r--r--contrib/python/botocore/botocore/docs/bcdoc/restdoc.py436
-rw-r--r--contrib/python/botocore/botocore/docs/bcdoc/style.py804
-rw-r--r--contrib/python/botocore/botocore/docs/client.py226
-rw-r--r--contrib/python/botocore/botocore/docs/docstring.py192
-rw-r--r--contrib/python/botocore/botocore/docs/example.py396
-rw-r--r--contrib/python/botocore/botocore/docs/method.py532
-rw-r--r--contrib/python/botocore/botocore/docs/paginator.py346
-rw-r--r--contrib/python/botocore/botocore/docs/params.py434
-rw-r--r--contrib/python/botocore/botocore/docs/service.py180
-rw-r--r--contrib/python/botocore/botocore/docs/shape.py222
-rw-r--r--contrib/python/botocore/botocore/docs/sharedexample.py446
-rw-r--r--contrib/python/botocore/botocore/docs/utils.py394
-rw-r--r--contrib/python/botocore/botocore/docs/waiter.py254
-rw-r--r--contrib/python/botocore/botocore/endpoint.py418
-rw-r--r--contrib/python/botocore/botocore/errorfactory.py164
-rw-r--r--contrib/python/botocore/botocore/exceptions.py856
-rw-r--r--contrib/python/botocore/botocore/handlers.py1712
-rw-r--r--contrib/python/botocore/botocore/history.py94
-rw-r--r--contrib/python/botocore/botocore/hooks.py998
-rw-r--r--contrib/python/botocore/botocore/loaders.py918
-rw-r--r--contrib/python/botocore/botocore/model.py1394
-rw-r--r--contrib/python/botocore/botocore/paginate.py1346
-rw-r--r--contrib/python/botocore/botocore/parsers.py1538
-rw-r--r--contrib/python/botocore/botocore/regions.py370
-rw-r--r--contrib/python/botocore/botocore/response.py228
-rw-r--r--contrib/python/botocore/botocore/retryhandler.py708
-rw-r--r--contrib/python/botocore/botocore/serialize.py1232
-rw-r--r--contrib/python/botocore/botocore/session.py1582
-rw-r--r--contrib/python/botocore/botocore/signers.py1372
-rw-r--r--contrib/python/botocore/botocore/stub.py720
-rw-r--r--contrib/python/botocore/botocore/translate.py152
-rw-r--r--contrib/python/botocore/botocore/utils.py1944
-rw-r--r--contrib/python/botocore/botocore/validate.py560
-rw-r--r--contrib/python/botocore/botocore/waiter.py644
-rw-r--r--contrib/python/botocore/ya.make36
-rw-r--r--contrib/python/jmespath/jmespath/__init__.py22
-rw-r--r--contrib/python/jmespath/jmespath/ast.py180
-rw-r--r--contrib/python/jmespath/jmespath/compat.py130
-rw-r--r--contrib/python/jmespath/jmespath/exceptions.py244
-rw-r--r--contrib/python/jmespath/jmespath/functions.py724
-rw-r--r--contrib/python/jmespath/jmespath/lexer.py416
-rw-r--r--contrib/python/jmespath/jmespath/parser.py1052
-rw-r--r--contrib/python/jmespath/jmespath/visitor.py650
-rw-r--r--contrib/python/jmespath/ya.make24
-rw-r--r--contrib/python/ya.make14
94 files changed, 25201 insertions, 25201 deletions
diff --git a/contrib/python/boto3/boto3/__init__.py b/contrib/python/boto3/boto3/__init__.py
index 36f33b67ad..df9e65e296 100644
--- a/contrib/python/boto3/boto3/__init__.py
+++ b/contrib/python/boto3/boto3/__init__.py
@@ -1,48 +1,48 @@
-# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-
-import logging
-
-from boto3.session import Session
+# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+
+import logging
+
+from boto3.session import Session
from boto3.compat import _warn_deprecated_python
-
-
-__author__ = 'Amazon Web Services'
+
+
+__author__ = 'Amazon Web Services'
__version__ = '1.17.112'
-
-
-# The default Boto3 session; autoloaded when needed.
-DEFAULT_SESSION = None
-
-
-def setup_default_session(**kwargs):
- """
- Set up a default session, passing through any parameters to the session
- constructor. There is no need to call this unless you wish to pass custom
- parameters, because a default session will be created for you.
- """
- global DEFAULT_SESSION
- DEFAULT_SESSION = Session(**kwargs)
-
-
-def set_stream_logger(name='boto3', level=logging.DEBUG, format_string=None):
- """
- Add a stream handler for the given name and level to the logging module.
- By default, this logs all boto3 messages to ``stdout``.
-
- >>> import boto3
- >>> boto3.set_stream_logger('boto3.resources', logging.INFO)
-
+
+
+# The default Boto3 session; autoloaded when needed.
+DEFAULT_SESSION = None
+
+
+def setup_default_session(**kwargs):
+ """
+ Set up a default session, passing through any parameters to the session
+ constructor. There is no need to call this unless you wish to pass custom
+ parameters, because a default session will be created for you.
+ """
+ global DEFAULT_SESSION
+ DEFAULT_SESSION = Session(**kwargs)
+
+
+def set_stream_logger(name='boto3', level=logging.DEBUG, format_string=None):
+ """
+ Add a stream handler for the given name and level to the logging module.
+ By default, this logs all boto3 messages to ``stdout``.
+
+ >>> import boto3
+ >>> boto3.set_stream_logger('boto3.resources', logging.INFO)
+
For debugging purposes a good choice is to set the stream logger to ``''``
which is equivalent to saying "log everything".
@@ -51,62 +51,62 @@ def set_stream_logger(name='boto3', level=logging.DEBUG, format_string=None):
trace will appear in your logs. If your payloads contain sensitive data
this should not be used in production.
- :type name: string
- :param name: Log name
- :type level: int
- :param level: Logging level, e.g. ``logging.INFO``
- :type format_string: str
- :param format_string: Log message format
- """
- if format_string is None:
- format_string = "%(asctime)s %(name)s [%(levelname)s] %(message)s"
-
- logger = logging.getLogger(name)
- logger.setLevel(level)
- handler = logging.StreamHandler()
- handler.setLevel(level)
- formatter = logging.Formatter(format_string)
- handler.setFormatter(formatter)
- logger.addHandler(handler)
-
-
-def _get_default_session():
- """
- Get the default session, creating one if needed.
-
- :rtype: :py:class:`~boto3.session.Session`
- :return: The default session
- """
- if DEFAULT_SESSION is None:
- setup_default_session()
+ :type name: string
+ :param name: Log name
+ :type level: int
+ :param level: Logging level, e.g. ``logging.INFO``
+ :type format_string: str
+ :param format_string: Log message format
+ """
+ if format_string is None:
+ format_string = "%(asctime)s %(name)s [%(levelname)s] %(message)s"
+
+ logger = logging.getLogger(name)
+ logger.setLevel(level)
+ handler = logging.StreamHandler()
+ handler.setLevel(level)
+ formatter = logging.Formatter(format_string)
+ handler.setFormatter(formatter)
+ logger.addHandler(handler)
+
+
+def _get_default_session():
+ """
+ Get the default session, creating one if needed.
+
+ :rtype: :py:class:`~boto3.session.Session`
+ :return: The default session
+ """
+ if DEFAULT_SESSION is None:
+ setup_default_session()
_warn_deprecated_python()
-
- return DEFAULT_SESSION
-
-
-def client(*args, **kwargs):
- """
- Create a low-level service client by name using the default session.
-
- See :py:meth:`boto3.session.Session.client`.
- """
- return _get_default_session().client(*args, **kwargs)
-
-
-def resource(*args, **kwargs):
- """
- Create a resource service client by name using the default session.
-
- See :py:meth:`boto3.session.Session.resource`.
- """
- return _get_default_session().resource(*args, **kwargs)
-
-
-# Set up logging to ``/dev/null`` like a library is supposed to.
-# http://docs.python.org/3.3/howto/logging.html#configuring-logging-for-a-library
-class NullHandler(logging.Handler):
- def emit(self, record):
- pass
-
-
-logging.getLogger('boto3').addHandler(NullHandler())
+
+ return DEFAULT_SESSION
+
+
+def client(*args, **kwargs):
+ """
+ Create a low-level service client by name using the default session.
+
+ See :py:meth:`boto3.session.Session.client`.
+ """
+ return _get_default_session().client(*args, **kwargs)
+
+
+def resource(*args, **kwargs):
+ """
+ Create a resource service client by name using the default session.
+
+ See :py:meth:`boto3.session.Session.resource`.
+ """
+ return _get_default_session().resource(*args, **kwargs)
+
+
+# Set up logging to ``/dev/null`` like a library is supposed to.
+# http://docs.python.org/3.3/howto/logging.html#configuring-logging-for-a-library
+class NullHandler(logging.Handler):
+ def emit(self, record):
+ pass
+
+
+logging.getLogger('boto3').addHandler(NullHandler())
diff --git a/contrib/python/boto3/boto3/compat.py b/contrib/python/boto3/boto3/compat.py
index 0e1e812a43..5610a0ac93 100644
--- a/contrib/python/boto3/boto3/compat.py
+++ b/contrib/python/boto3/boto3/compat.py
@@ -1,53 +1,53 @@
-# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
import sys
import os
import errno
-import socket
+import socket
import warnings
-
+
from botocore.compat import six
from boto3.exceptions import PythonDeprecationWarning
-if six.PY3:
- # In python3, socket.error is OSError, which is too general
- # for what we want (i.e FileNotFoundError is a subclass of OSError).
- # In py3 all the socket related errors are in a newly created
- # ConnectionError
- SOCKET_ERROR = ConnectionError
-else:
- SOCKET_ERROR = socket.error
-
+if six.PY3:
+ # In python3, socket.error is OSError, which is too general
+ # for what we want (i.e FileNotFoundError is a subclass of OSError).
+ # In py3 all the socket related errors are in a newly created
+ # ConnectionError
+ SOCKET_ERROR = ConnectionError
+else:
+ SOCKET_ERROR = socket.error
+
if six.PY3:
import collections.abc as collections_abc
else:
import collections as collections_abc
-
-if sys.platform.startswith('win'):
- def rename_file(current_filename, new_filename):
- try:
- os.remove(new_filename)
- except OSError as e:
- if not e.errno == errno.ENOENT:
- # We only want to a ignore trying to remove
- # a file that does not exist. If it fails
- # for any other reason we should be propagating
- # that exception.
- raise
- os.rename(current_filename, new_filename)
-else:
- rename_file = os.rename
+
+if sys.platform.startswith('win'):
+ def rename_file(current_filename, new_filename):
+ try:
+ os.remove(new_filename)
+ except OSError as e:
+ if not e.errno == errno.ENOENT:
+ # We only want to a ignore trying to remove
+ # a file that does not exist. If it fails
+ # for any other reason we should be propagating
+ # that exception.
+ raise
+ os.rename(current_filename, new_filename)
+else:
+ rename_file = os.rename
def filter_python_deprecation_warnings():
diff --git a/contrib/python/boto3/boto3/docs/__init__.py b/contrib/python/boto3/boto3/docs/__init__.py
index a6f9e7c07f..c18c86e12e 100644
--- a/contrib/python/boto3/boto3/docs/__init__.py
+++ b/contrib/python/boto3/boto3/docs/__init__.py
@@ -1,39 +1,39 @@
-# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-import os
-
-from boto3.docs.service import ServiceDocumenter
-
-
-def generate_docs(root_dir, session):
- """Generates the reference documentation for botocore
-
- This will go through every available AWS service and output ReSTructured
- text files documenting each service.
-
- :param root_dir: The directory to write the reference files to. Each
- service's reference documentation is loacated at
- root_dir/reference/services/service-name.rst
-
- :param session: The boto3 session
- """
- services_doc_path = os.path.join(root_dir, 'reference', 'services')
- if not os.path.exists(services_doc_path):
- os.makedirs(services_doc_path)
-
- for service_name in session.get_available_services():
- docs = ServiceDocumenter(service_name, session).document_service()
- service_doc_path = os.path.join(
- services_doc_path, service_name + '.rst')
- with open(service_doc_path, 'wb') as f:
- f.write(docs)
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+import os
+
+from boto3.docs.service import ServiceDocumenter
+
+
+def generate_docs(root_dir, session):
+ """Generates the reference documentation for botocore
+
+ This will go through every available AWS service and output ReSTructured
+ text files documenting each service.
+
+ :param root_dir: The directory to write the reference files to. Each
+ service's reference documentation is loacated at
+ root_dir/reference/services/service-name.rst
+
+ :param session: The boto3 session
+ """
+ services_doc_path = os.path.join(root_dir, 'reference', 'services')
+ if not os.path.exists(services_doc_path):
+ os.makedirs(services_doc_path)
+
+ for service_name in session.get_available_services():
+ docs = ServiceDocumenter(service_name, session).document_service()
+ service_doc_path = os.path.join(
+ services_doc_path, service_name + '.rst')
+ with open(service_doc_path, 'wb') as f:
+ f.write(docs)
diff --git a/contrib/python/boto3/boto3/docs/action.py b/contrib/python/boto3/boto3/docs/action.py
index 1ce3563a51..a1a51ccd7e 100644
--- a/contrib/python/boto3/boto3/docs/action.py
+++ b/contrib/python/boto3/boto3/docs/action.py
@@ -1,148 +1,148 @@
-# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-from botocore import xform_name
-from botocore.model import OperationModel
-from botocore.utils import get_service_module_name
-from botocore.docs.method import document_model_driven_method
-from botocore.docs.method import document_custom_method
-
-from boto3.docs.base import BaseDocumenter
-from boto3.docs.method import document_model_driven_resource_method
-from boto3.docs.utils import get_resource_ignore_params
-from boto3.docs.utils import get_resource_public_actions
-from boto3.docs.utils import add_resource_type_overview
-
-
-class ActionDocumenter(BaseDocumenter):
- def document_actions(self, section):
- modeled_actions_list = self._resource_model.actions
- modeled_actions = {}
- for modeled_action in modeled_actions_list:
- modeled_actions[modeled_action.name] = modeled_action
- resource_actions = get_resource_public_actions(
- self._resource.__class__)
- self.member_map['actions'] = sorted(resource_actions)
- add_resource_type_overview(
- section=section,
- resource_type='Actions',
- description=(
- 'Actions call operations on resources. They may '
- 'automatically handle the passing in of arguments set '
- 'from identifiers and some attributes.'),
- intro_link='actions_intro')
-
- for action_name in sorted(resource_actions):
- action_section = section.add_new_section(action_name)
- if action_name in ['load', 'reload'] and self._resource_model.load:
- document_load_reload_action(
- section=action_section,
- action_name=action_name,
- resource_name=self._resource_name,
- event_emitter=self._resource.meta.client.meta.events,
- load_model=self._resource_model.load,
- service_model=self._service_model
- )
- elif action_name in modeled_actions:
- document_action(
- section=action_section,
- resource_name=self._resource_name,
- event_emitter=self._resource.meta.client.meta.events,
- action_model=modeled_actions[action_name],
- service_model=self._service_model,
- )
- else:
- document_custom_method(
- action_section, action_name, resource_actions[action_name])
-
-
-def document_action(section, resource_name, event_emitter, action_model,
- service_model, include_signature=True):
- """Documents a resource action
-
- :param section: The section to write to
-
- :param resource_name: The name of the resource
-
- :param event_emitter: The event emitter to use to emit events
-
- :param action_model: The model of the action
-
- :param service_model: The model of the service
-
- :param include_signature: Whether or not to include the signature.
- It is useful for generating docstrings.
- """
- operation_model = service_model.operation_model(
- action_model.request.operation)
- ignore_params = get_resource_ignore_params(action_model.request.params)
-
- example_return_value = 'response'
- if action_model.resource:
- example_return_value = xform_name(action_model.resource.type)
- example_resource_name = xform_name(resource_name)
- if service_model.service_name == resource_name:
- example_resource_name = resource_name
- example_prefix = '%s = %s.%s' % (
- example_return_value, example_resource_name, action_model.name)
- document_model_driven_resource_method(
- section=section, method_name=action_model.name,
- operation_model=operation_model,
- event_emitter=event_emitter,
- method_description=operation_model.documentation,
- example_prefix=example_prefix,
- exclude_input=ignore_params,
- resource_action_model=action_model,
- include_signature=include_signature
- )
-
-
-def document_load_reload_action(section, action_name, resource_name,
- event_emitter, load_model, service_model,
- include_signature=True):
- """Documents the resource load action
-
- :param section: The section to write to
-
- :param action_name: The name of the loading action should be load or reload
-
- :param resource_name: The name of the resource
-
- :param event_emitter: The event emitter to use to emit events
-
- :param load_model: The model of the load action
-
- :param service_model: The model of the service
-
- :param include_signature: Whether or not to include the signature.
- It is useful for generating docstrings.
- """
- description = (
- 'Calls :py:meth:`%s.Client.%s` to update the attributes of the'
- ' %s resource. Note that the load and reload methods are '
- 'the same method and can be used interchangeably.' % (
- get_service_module_name(service_model),
- xform_name(load_model.request.operation),
- resource_name)
- )
- example_resource_name = xform_name(resource_name)
- if service_model.service_name == resource_name:
- example_resource_name = resource_name
- example_prefix = '%s.%s' % (example_resource_name, action_name)
- document_model_driven_method(
- section=section, method_name=action_name,
- operation_model=OperationModel({}, service_model),
- event_emitter=event_emitter,
- method_description=description,
- example_prefix=example_prefix,
- include_signature=include_signature
- )
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+from botocore import xform_name
+from botocore.model import OperationModel
+from botocore.utils import get_service_module_name
+from botocore.docs.method import document_model_driven_method
+from botocore.docs.method import document_custom_method
+
+from boto3.docs.base import BaseDocumenter
+from boto3.docs.method import document_model_driven_resource_method
+from boto3.docs.utils import get_resource_ignore_params
+from boto3.docs.utils import get_resource_public_actions
+from boto3.docs.utils import add_resource_type_overview
+
+
+class ActionDocumenter(BaseDocumenter):
+ def document_actions(self, section):
+ modeled_actions_list = self._resource_model.actions
+ modeled_actions = {}
+ for modeled_action in modeled_actions_list:
+ modeled_actions[modeled_action.name] = modeled_action
+ resource_actions = get_resource_public_actions(
+ self._resource.__class__)
+ self.member_map['actions'] = sorted(resource_actions)
+ add_resource_type_overview(
+ section=section,
+ resource_type='Actions',
+ description=(
+ 'Actions call operations on resources. They may '
+ 'automatically handle the passing in of arguments set '
+ 'from identifiers and some attributes.'),
+ intro_link='actions_intro')
+
+ for action_name in sorted(resource_actions):
+ action_section = section.add_new_section(action_name)
+ if action_name in ['load', 'reload'] and self._resource_model.load:
+ document_load_reload_action(
+ section=action_section,
+ action_name=action_name,
+ resource_name=self._resource_name,
+ event_emitter=self._resource.meta.client.meta.events,
+ load_model=self._resource_model.load,
+ service_model=self._service_model
+ )
+ elif action_name in modeled_actions:
+ document_action(
+ section=action_section,
+ resource_name=self._resource_name,
+ event_emitter=self._resource.meta.client.meta.events,
+ action_model=modeled_actions[action_name],
+ service_model=self._service_model,
+ )
+ else:
+ document_custom_method(
+ action_section, action_name, resource_actions[action_name])
+
+
+def document_action(section, resource_name, event_emitter, action_model,
+ service_model, include_signature=True):
+ """Documents a resource action
+
+ :param section: The section to write to
+
+ :param resource_name: The name of the resource
+
+ :param event_emitter: The event emitter to use to emit events
+
+ :param action_model: The model of the action
+
+ :param service_model: The model of the service
+
+ :param include_signature: Whether or not to include the signature.
+ It is useful for generating docstrings.
+ """
+ operation_model = service_model.operation_model(
+ action_model.request.operation)
+ ignore_params = get_resource_ignore_params(action_model.request.params)
+
+ example_return_value = 'response'
+ if action_model.resource:
+ example_return_value = xform_name(action_model.resource.type)
+ example_resource_name = xform_name(resource_name)
+ if service_model.service_name == resource_name:
+ example_resource_name = resource_name
+ example_prefix = '%s = %s.%s' % (
+ example_return_value, example_resource_name, action_model.name)
+ document_model_driven_resource_method(
+ section=section, method_name=action_model.name,
+ operation_model=operation_model,
+ event_emitter=event_emitter,
+ method_description=operation_model.documentation,
+ example_prefix=example_prefix,
+ exclude_input=ignore_params,
+ resource_action_model=action_model,
+ include_signature=include_signature
+ )
+
+
+def document_load_reload_action(section, action_name, resource_name,
+ event_emitter, load_model, service_model,
+ include_signature=True):
+ """Documents the resource load action
+
+ :param section: The section to write to
+
+ :param action_name: The name of the loading action should be load or reload
+
+ :param resource_name: The name of the resource
+
+ :param event_emitter: The event emitter to use to emit events
+
+ :param load_model: The model of the load action
+
+ :param service_model: The model of the service
+
+ :param include_signature: Whether or not to include the signature.
+ It is useful for generating docstrings.
+ """
+ description = (
+ 'Calls :py:meth:`%s.Client.%s` to update the attributes of the'
+ ' %s resource. Note that the load and reload methods are '
+ 'the same method and can be used interchangeably.' % (
+ get_service_module_name(service_model),
+ xform_name(load_model.request.operation),
+ resource_name)
+ )
+ example_resource_name = xform_name(resource_name)
+ if service_model.service_name == resource_name:
+ example_resource_name = resource_name
+ example_prefix = '%s.%s' % (example_resource_name, action_name)
+ document_model_driven_method(
+ section=section, method_name=action_name,
+ operation_model=OperationModel({}, service_model),
+ event_emitter=event_emitter,
+ method_description=description,
+ example_prefix=example_prefix,
+ include_signature=include_signature
+ )
diff --git a/contrib/python/boto3/boto3/docs/attr.py b/contrib/python/boto3/boto3/docs/attr.py
index 49a9a5d6f6..4ebbc7d57e 100644
--- a/contrib/python/boto3/boto3/docs/attr.py
+++ b/contrib/python/boto3/boto3/docs/attr.py
@@ -1,54 +1,54 @@
-# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-from botocore.docs.params import ResponseParamsDocumenter
-
-from boto3.docs.utils import get_identifier_description
-
-
-class ResourceShapeDocumenter(ResponseParamsDocumenter):
- EVENT_NAME = 'resource-shape'
-
-
-def document_attribute(section, service_name, resource_name, attr_name,
- event_emitter, attr_model, include_signature=True):
- if include_signature:
- section.style.start_sphinx_py_attr(attr_name)
- # Note that an attribute may have one, may have many, or may have no
- # operations that back the resource's shape. So we just set the
- # operation_name to the resource name if we ever to hook in and modify
- # a particular attribute.
- ResourceShapeDocumenter(
- service_name=service_name, operation_name=resource_name,
- event_emitter=event_emitter).document_params(
- section=section,
- shape=attr_model)
-
-
-def document_identifier(section, resource_name, identifier_model,
- include_signature=True):
- if include_signature:
- section.style.start_sphinx_py_attr(identifier_model.name)
- description = get_identifier_description(
- resource_name, identifier_model.name)
- description = '*(string)* ' + description
- section.write(description)
-
-
-def document_reference(section, reference_model, include_signature=True):
- if include_signature:
- section.style.start_sphinx_py_attr(reference_model.name)
- reference_type = '(:py:class:`%s`) ' % reference_model.resource.type
- section.write(reference_type)
- section.include_doc_string(
- 'The related %s if set, otherwise ``None``.' % reference_model.name
- )
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+from botocore.docs.params import ResponseParamsDocumenter
+
+from boto3.docs.utils import get_identifier_description
+
+
+class ResourceShapeDocumenter(ResponseParamsDocumenter):
+ EVENT_NAME = 'resource-shape'
+
+
+def document_attribute(section, service_name, resource_name, attr_name,
+ event_emitter, attr_model, include_signature=True):
+ if include_signature:
+ section.style.start_sphinx_py_attr(attr_name)
+ # Note that an attribute may have one, may have many, or may have no
+ # operations that back the resource's shape. So we just set the
+ # operation_name to the resource name if we ever to hook in and modify
+ # a particular attribute.
+ ResourceShapeDocumenter(
+ service_name=service_name, operation_name=resource_name,
+ event_emitter=event_emitter).document_params(
+ section=section,
+ shape=attr_model)
+
+
+def document_identifier(section, resource_name, identifier_model,
+ include_signature=True):
+ if include_signature:
+ section.style.start_sphinx_py_attr(identifier_model.name)
+ description = get_identifier_description(
+ resource_name, identifier_model.name)
+ description = '*(string)* ' + description
+ section.write(description)
+
+
+def document_reference(section, reference_model, include_signature=True):
+ if include_signature:
+ section.style.start_sphinx_py_attr(reference_model.name)
+ reference_type = '(:py:class:`%s`) ' % reference_model.resource.type
+ section.write(reference_type)
+ section.include_doc_string(
+ 'The related %s if set, otherwise ``None``.' % reference_model.name
+ )
diff --git a/contrib/python/boto3/boto3/docs/base.py b/contrib/python/boto3/boto3/docs/base.py
index e7364d59ee..1ebd4222e2 100644
--- a/contrib/python/boto3/boto3/docs/base.py
+++ b/contrib/python/boto3/boto3/docs/base.py
@@ -1,31 +1,31 @@
-# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-from botocore.compat import OrderedDict
-
-
-class BaseDocumenter(object):
- def __init__(self, resource):
- self._resource = resource
- self._client = self._resource.meta.client
- self._resource_model = self._resource.meta.resource_model
- self._service_model = self._client.meta.service_model
- self._resource_name = self._resource.meta.resource_model.name
- self._service_name = self._service_model.service_name
- self._service_docs_name = self._client.__class__.__name__
- self.member_map = OrderedDict()
- self.represents_service_resource = (
- self._service_name == self._resource_name)
-
- @property
- def class_name(self):
- return '%s.%s' % (self._service_docs_name, self._resource_name)
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+from botocore.compat import OrderedDict
+
+
+class BaseDocumenter(object):
+ def __init__(self, resource):
+ self._resource = resource
+ self._client = self._resource.meta.client
+ self._resource_model = self._resource.meta.resource_model
+ self._service_model = self._client.meta.service_model
+ self._resource_name = self._resource.meta.resource_model.name
+ self._service_name = self._service_model.service_name
+ self._service_docs_name = self._client.__class__.__name__
+ self.member_map = OrderedDict()
+ self.represents_service_resource = (
+ self._service_name == self._resource_name)
+
+ @property
+ def class_name(self):
+ return '%s.%s' % (self._service_docs_name, self._resource_name)
diff --git a/contrib/python/boto3/boto3/docs/client.py b/contrib/python/boto3/boto3/docs/client.py
index 064457908f..7db8c1f1e2 100644
--- a/contrib/python/boto3/boto3/docs/client.py
+++ b/contrib/python/boto3/boto3/docs/client.py
@@ -1,27 +1,27 @@
-# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-from botocore.docs.client import ClientDocumenter
-
-
-class Boto3ClientDocumenter(ClientDocumenter):
- def _add_client_creation_example(self, section):
- section.style.start_codeblock()
- section.style.new_line()
- section.write('import boto3')
- section.style.new_line()
- section.style.new_line()
- section.write(
- 'client = boto3.client(\'{service}\')'.format(
- service=self._service_name)
- )
- section.style.end_codeblock()
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+from botocore.docs.client import ClientDocumenter
+
+
+class Boto3ClientDocumenter(ClientDocumenter):
+ def _add_client_creation_example(self, section):
+ section.style.start_codeblock()
+ section.style.new_line()
+ section.write('import boto3')
+ section.style.new_line()
+ section.style.new_line()
+ section.write(
+ 'client = boto3.client(\'{service}\')'.format(
+ service=self._service_name)
+ )
+ section.style.end_codeblock()
diff --git a/contrib/python/boto3/boto3/docs/collection.py b/contrib/python/boto3/boto3/docs/collection.py
index 9aeb2e230e..6400532c49 100644
--- a/contrib/python/boto3/boto3/docs/collection.py
+++ b/contrib/python/boto3/boto3/docs/collection.py
@@ -1,242 +1,242 @@
-# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-from botocore import xform_name
-from botocore.docs.method import get_instance_public_methods
-from botocore.docs.utils import DocumentedShape
-
-from boto3.docs.base import BaseDocumenter
-from boto3.docs.utils import get_resource_ignore_params
-from boto3.docs.method import document_model_driven_resource_method
-from boto3.docs.utils import add_resource_type_overview
-
-
-class CollectionDocumenter(BaseDocumenter):
- def document_collections(self, section):
- collections = self._resource.meta.resource_model.collections
- collections_list = []
- add_resource_type_overview(
- section=section,
- resource_type='Collections',
- description=(
- 'Collections provide an interface to iterate over and '
- 'manipulate groups of resources. '),
- intro_link='guide_collections')
- self.member_map['collections'] = collections_list
- for collection in collections:
- collection_section = section.add_new_section(collection.name)
- collections_list.append(collection.name)
- self._document_collection(collection_section, collection)
-
- def _document_collection(self, section, collection):
- methods = get_instance_public_methods(
- getattr(self._resource, collection.name))
- document_collection_object(section, collection)
- batch_actions = {}
- for batch_action in collection.batch_actions:
- batch_actions[batch_action.name] = batch_action
-
- for method in sorted(methods):
- method_section = section.add_new_section(method)
- if method in batch_actions:
- document_batch_action(
- section=method_section,
- resource_name=self._resource_name,
- event_emitter=self._resource.meta.client.meta.events,
- batch_action_model=batch_actions[method],
- collection_model=collection,
- service_model=self._resource.meta.client.meta.service_model
- )
- else:
- document_collection_method(
- section=method_section,
- resource_name=self._resource_name,
- action_name=method,
- event_emitter=self._resource.meta.client.meta.events,
- collection_model=collection,
- service_model=self._resource.meta.client.meta.service_model
- )
-
-
-def document_collection_object(section, collection_model,
- include_signature=True):
- """Documents a collection resource object
-
- :param section: The section to write to
-
- :param collection_model: The model of the collection
-
- :param include_signature: Whether or not to include the signature.
- It is useful for generating docstrings.
- """
- if include_signature:
- section.style.start_sphinx_py_attr(collection_model.name)
- section.include_doc_string(
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+from botocore import xform_name
+from botocore.docs.method import get_instance_public_methods
+from botocore.docs.utils import DocumentedShape
+
+from boto3.docs.base import BaseDocumenter
+from boto3.docs.utils import get_resource_ignore_params
+from boto3.docs.method import document_model_driven_resource_method
+from boto3.docs.utils import add_resource_type_overview
+
+
+class CollectionDocumenter(BaseDocumenter):
+ def document_collections(self, section):
+ collections = self._resource.meta.resource_model.collections
+ collections_list = []
+ add_resource_type_overview(
+ section=section,
+ resource_type='Collections',
+ description=(
+ 'Collections provide an interface to iterate over and '
+ 'manipulate groups of resources. '),
+ intro_link='guide_collections')
+ self.member_map['collections'] = collections_list
+ for collection in collections:
+ collection_section = section.add_new_section(collection.name)
+ collections_list.append(collection.name)
+ self._document_collection(collection_section, collection)
+
+ def _document_collection(self, section, collection):
+ methods = get_instance_public_methods(
+ getattr(self._resource, collection.name))
+ document_collection_object(section, collection)
+ batch_actions = {}
+ for batch_action in collection.batch_actions:
+ batch_actions[batch_action.name] = batch_action
+
+ for method in sorted(methods):
+ method_section = section.add_new_section(method)
+ if method in batch_actions:
+ document_batch_action(
+ section=method_section,
+ resource_name=self._resource_name,
+ event_emitter=self._resource.meta.client.meta.events,
+ batch_action_model=batch_actions[method],
+ collection_model=collection,
+ service_model=self._resource.meta.client.meta.service_model
+ )
+ else:
+ document_collection_method(
+ section=method_section,
+ resource_name=self._resource_name,
+ action_name=method,
+ event_emitter=self._resource.meta.client.meta.events,
+ collection_model=collection,
+ service_model=self._resource.meta.client.meta.service_model
+ )
+
+
+def document_collection_object(section, collection_model,
+ include_signature=True):
+ """Documents a collection resource object
+
+ :param section: The section to write to
+
+ :param collection_model: The model of the collection
+
+ :param include_signature: Whether or not to include the signature.
+ It is useful for generating docstrings.
+ """
+ if include_signature:
+ section.style.start_sphinx_py_attr(collection_model.name)
+ section.include_doc_string(
'A collection of %s resources.' % collection_model.resource.type)
section.include_doc_string(
'A %s Collection will include all resources by default, '
'and extreme caution should be taken when performing '
'actions on all resources.' % collection_model.resource.type)
-
-
-def document_batch_action(section, resource_name, event_emitter,
- batch_action_model, service_model, collection_model,
- include_signature=True):
- """Documents a collection's batch action
-
- :param section: The section to write to
-
- :param resource_name: The name of the resource
-
- :param action_name: The name of collection action. Currently only
- can be all, filter, limit, or page_size
-
- :param event_emitter: The event emitter to use to emit events
-
- :param batch_action_model: The model of the batch action
-
- :param collection_model: The model of the collection
-
- :param service_model: The model of the service
-
- :param include_signature: Whether or not to include the signature.
- It is useful for generating docstrings.
- """
- operation_model = service_model.operation_model(
- batch_action_model.request.operation)
- ignore_params = get_resource_ignore_params(
- batch_action_model.request.params)
-
- example_return_value = 'response'
- if batch_action_model.resource:
- example_return_value = xform_name(batch_action_model.resource.type)
-
- example_resource_name = xform_name(resource_name)
- if service_model.service_name == resource_name:
- example_resource_name = resource_name
- example_prefix = '%s = %s.%s.%s' % (
- example_return_value, example_resource_name,
- collection_model.name, batch_action_model.name
- )
- document_model_driven_resource_method(
- section=section, method_name=batch_action_model.name,
- operation_model=operation_model,
- event_emitter=event_emitter,
- method_description=operation_model.documentation,
- example_prefix=example_prefix,
- exclude_input=ignore_params,
- resource_action_model=batch_action_model,
- include_signature=include_signature
- )
-
-
-def document_collection_method(section, resource_name, action_name,
- event_emitter, collection_model, service_model,
- include_signature=True):
- """Documents a collection method
-
- :param section: The section to write to
-
- :param resource_name: The name of the resource
-
- :param action_name: The name of collection action. Currently only
- can be all, filter, limit, or page_size
-
- :param event_emitter: The event emitter to use to emit events
-
- :param collection_model: The model of the collection
-
- :param service_model: The model of the service
-
- :param include_signature: Whether or not to include the signature.
- It is useful for generating docstrings.
- """
- operation_model = service_model.operation_model(
- collection_model.request.operation)
-
- underlying_operation_members = []
- if operation_model.input_shape:
- underlying_operation_members = operation_model.input_shape.members
-
- example_resource_name = xform_name(resource_name)
- if service_model.service_name == resource_name:
- example_resource_name = resource_name
-
- custom_action_info_dict = {
- 'all': {
- 'method_description': (
- 'Creates an iterable of all %s resources '
- 'in the collection.' % collection_model.resource.type),
- 'example_prefix': '%s_iterator = %s.%s.all' % (
- xform_name(collection_model.resource.type),
- example_resource_name, collection_model.name),
- 'exclude_input': underlying_operation_members
- },
- 'filter': {
- 'method_description': (
- 'Creates an iterable of all %s resources '
- 'in the collection filtered by kwargs passed to '
+
+
+def document_batch_action(section, resource_name, event_emitter,
+ batch_action_model, service_model, collection_model,
+ include_signature=True):
+ """Documents a collection's batch action
+
+ :param section: The section to write to
+
+ :param resource_name: The name of the resource
+
+ :param action_name: The name of collection action. Currently only
+ can be all, filter, limit, or page_size
+
+ :param event_emitter: The event emitter to use to emit events
+
+ :param batch_action_model: The model of the batch action
+
+ :param collection_model: The model of the collection
+
+ :param service_model: The model of the service
+
+ :param include_signature: Whether or not to include the signature.
+ It is useful for generating docstrings.
+ """
+ operation_model = service_model.operation_model(
+ batch_action_model.request.operation)
+ ignore_params = get_resource_ignore_params(
+ batch_action_model.request.params)
+
+ example_return_value = 'response'
+ if batch_action_model.resource:
+ example_return_value = xform_name(batch_action_model.resource.type)
+
+ example_resource_name = xform_name(resource_name)
+ if service_model.service_name == resource_name:
+ example_resource_name = resource_name
+ example_prefix = '%s = %s.%s.%s' % (
+ example_return_value, example_resource_name,
+ collection_model.name, batch_action_model.name
+ )
+ document_model_driven_resource_method(
+ section=section, method_name=batch_action_model.name,
+ operation_model=operation_model,
+ event_emitter=event_emitter,
+ method_description=operation_model.documentation,
+ example_prefix=example_prefix,
+ exclude_input=ignore_params,
+ resource_action_model=batch_action_model,
+ include_signature=include_signature
+ )
+
+
+def document_collection_method(section, resource_name, action_name,
+ event_emitter, collection_model, service_model,
+ include_signature=True):
+ """Documents a collection method
+
+ :param section: The section to write to
+
+ :param resource_name: The name of the resource
+
+ :param action_name: The name of collection action. Currently only
+ can be all, filter, limit, or page_size
+
+ :param event_emitter: The event emitter to use to emit events
+
+ :param collection_model: The model of the collection
+
+ :param service_model: The model of the service
+
+ :param include_signature: Whether or not to include the signature.
+ It is useful for generating docstrings.
+ """
+ operation_model = service_model.operation_model(
+ collection_model.request.operation)
+
+ underlying_operation_members = []
+ if operation_model.input_shape:
+ underlying_operation_members = operation_model.input_shape.members
+
+ example_resource_name = xform_name(resource_name)
+ if service_model.service_name == resource_name:
+ example_resource_name = resource_name
+
+ custom_action_info_dict = {
+ 'all': {
+ 'method_description': (
+ 'Creates an iterable of all %s resources '
+ 'in the collection.' % collection_model.resource.type),
+ 'example_prefix': '%s_iterator = %s.%s.all' % (
+ xform_name(collection_model.resource.type),
+ example_resource_name, collection_model.name),
+ 'exclude_input': underlying_operation_members
+ },
+ 'filter': {
+ 'method_description': (
+ 'Creates an iterable of all %s resources '
+ 'in the collection filtered by kwargs passed to '
'method.' % collection_model.resource.type +
'A %s collection will include all resources by '
'default if no filters are provided, and extreme '
'caution should be taken when performing actions '
'on all resources.'% collection_model.resource.type),
- 'example_prefix': '%s_iterator = %s.%s.filter' % (
- xform_name(collection_model.resource.type),
- example_resource_name, collection_model.name),
- 'exclude_input': get_resource_ignore_params(
- collection_model.request.params)
- },
- 'limit': {
- 'method_description': (
- 'Creates an iterable up to a specified amount of '
- '%s resources in the collection.' %
- collection_model.resource.type),
- 'example_prefix': '%s_iterator = %s.%s.limit' % (
- xform_name(collection_model.resource.type),
- example_resource_name, collection_model.name),
- 'include_input': [
- DocumentedShape(
- name='count', type_name='integer',
- documentation=(
- 'The limit to the number of resources '
- 'in the iterable.'))],
- 'exclude_input': underlying_operation_members
- },
- 'page_size': {
- 'method_description': (
- 'Creates an iterable of all %s resources '
- 'in the collection, but limits the number of '
- 'items returned by each service call by the specified '
- 'amount.' % collection_model.resource.type),
- 'example_prefix': '%s_iterator = %s.%s.page_size' % (
- xform_name(collection_model.resource.type),
- example_resource_name, collection_model.name),
- 'include_input': [
- DocumentedShape(
- name='count', type_name='integer',
- documentation=(
- 'The number of items returned by each '
- 'service call'))],
- 'exclude_input': underlying_operation_members
- }
- }
- if action_name in custom_action_info_dict:
- action_info = custom_action_info_dict[action_name]
- document_model_driven_resource_method(
- section=section, method_name=action_name,
- operation_model=operation_model,
- event_emitter=event_emitter,
- resource_action_model=collection_model,
- include_signature=include_signature,
- **action_info
- )
+ 'example_prefix': '%s_iterator = %s.%s.filter' % (
+ xform_name(collection_model.resource.type),
+ example_resource_name, collection_model.name),
+ 'exclude_input': get_resource_ignore_params(
+ collection_model.request.params)
+ },
+ 'limit': {
+ 'method_description': (
+ 'Creates an iterable up to a specified amount of '
+ '%s resources in the collection.' %
+ collection_model.resource.type),
+ 'example_prefix': '%s_iterator = %s.%s.limit' % (
+ xform_name(collection_model.resource.type),
+ example_resource_name, collection_model.name),
+ 'include_input': [
+ DocumentedShape(
+ name='count', type_name='integer',
+ documentation=(
+ 'The limit to the number of resources '
+ 'in the iterable.'))],
+ 'exclude_input': underlying_operation_members
+ },
+ 'page_size': {
+ 'method_description': (
+ 'Creates an iterable of all %s resources '
+ 'in the collection, but limits the number of '
+ 'items returned by each service call by the specified '
+ 'amount.' % collection_model.resource.type),
+ 'example_prefix': '%s_iterator = %s.%s.page_size' % (
+ xform_name(collection_model.resource.type),
+ example_resource_name, collection_model.name),
+ 'include_input': [
+ DocumentedShape(
+ name='count', type_name='integer',
+ documentation=(
+ 'The number of items returned by each '
+ 'service call'))],
+ 'exclude_input': underlying_operation_members
+ }
+ }
+ if action_name in custom_action_info_dict:
+ action_info = custom_action_info_dict[action_name]
+ document_model_driven_resource_method(
+ section=section, method_name=action_name,
+ operation_model=operation_model,
+ event_emitter=event_emitter,
+ resource_action_model=collection_model,
+ include_signature=include_signature,
+ **action_info
+ )
diff --git a/contrib/python/boto3/boto3/docs/docstring.py b/contrib/python/boto3/boto3/docs/docstring.py
index ffaf5c164e..9dd4d396b5 100644
--- a/contrib/python/boto3/boto3/docs/docstring.py
+++ b/contrib/python/boto3/boto3/docs/docstring.py
@@ -1,74 +1,74 @@
-# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-from botocore.docs.docstring import LazyLoadedDocstring
-
-from boto3.docs.action import document_action
-from boto3.docs.action import document_load_reload_action
-from boto3.docs.subresource import document_sub_resource
-from boto3.docs.attr import document_attribute
-from boto3.docs.attr import document_identifier
-from boto3.docs.attr import document_reference
-from boto3.docs.collection import document_collection_object
-from boto3.docs.collection import document_collection_method
-from boto3.docs.collection import document_batch_action
-from boto3.docs.waiter import document_resource_waiter
-
-
-class ActionDocstring(LazyLoadedDocstring):
- def _write_docstring(self, *args, **kwargs):
- document_action(*args, **kwargs)
-
-
-class LoadReloadDocstring(LazyLoadedDocstring):
- def _write_docstring(self, *args, **kwargs):
- document_load_reload_action(*args, **kwargs)
-
-
-class SubResourceDocstring(LazyLoadedDocstring):
- def _write_docstring(self, *args, **kwargs):
- document_sub_resource(*args, **kwargs)
-
-
-class AttributeDocstring(LazyLoadedDocstring):
- def _write_docstring(self, *args, **kwargs):
- document_attribute(*args, **kwargs)
-
-
-class IdentifierDocstring(LazyLoadedDocstring):
- def _write_docstring(self, *args, **kwargs):
- document_identifier(*args, **kwargs)
-
-
-class ReferenceDocstring(LazyLoadedDocstring):
- def _write_docstring(self, *args, **kwargs):
- document_reference(*args, **kwargs)
-
-
-class CollectionDocstring(LazyLoadedDocstring):
- def _write_docstring(self, *args, **kwargs):
- document_collection_object(*args, **kwargs)
-
-
-class CollectionMethodDocstring(LazyLoadedDocstring):
- def _write_docstring(self, *args, **kwargs):
- document_collection_method(*args, **kwargs)
-
-
-class BatchActionDocstring(LazyLoadedDocstring):
- def _write_docstring(self, *args, **kwargs):
- document_batch_action(*args, **kwargs)
-
-
-class ResourceWaiterDocstring(LazyLoadedDocstring):
- def _write_docstring(self, *args, **kwargs):
- document_resource_waiter(*args, **kwargs)
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+from botocore.docs.docstring import LazyLoadedDocstring
+
+from boto3.docs.action import document_action
+from boto3.docs.action import document_load_reload_action
+from boto3.docs.subresource import document_sub_resource
+from boto3.docs.attr import document_attribute
+from boto3.docs.attr import document_identifier
+from boto3.docs.attr import document_reference
+from boto3.docs.collection import document_collection_object
+from boto3.docs.collection import document_collection_method
+from boto3.docs.collection import document_batch_action
+from boto3.docs.waiter import document_resource_waiter
+
+
+class ActionDocstring(LazyLoadedDocstring):
+ def _write_docstring(self, *args, **kwargs):
+ document_action(*args, **kwargs)
+
+
+class LoadReloadDocstring(LazyLoadedDocstring):
+ def _write_docstring(self, *args, **kwargs):
+ document_load_reload_action(*args, **kwargs)
+
+
+class SubResourceDocstring(LazyLoadedDocstring):
+ def _write_docstring(self, *args, **kwargs):
+ document_sub_resource(*args, **kwargs)
+
+
+class AttributeDocstring(LazyLoadedDocstring):
+ def _write_docstring(self, *args, **kwargs):
+ document_attribute(*args, **kwargs)
+
+
+class IdentifierDocstring(LazyLoadedDocstring):
+ def _write_docstring(self, *args, **kwargs):
+ document_identifier(*args, **kwargs)
+
+
+class ReferenceDocstring(LazyLoadedDocstring):
+ def _write_docstring(self, *args, **kwargs):
+ document_reference(*args, **kwargs)
+
+
+class CollectionDocstring(LazyLoadedDocstring):
+ def _write_docstring(self, *args, **kwargs):
+ document_collection_object(*args, **kwargs)
+
+
+class CollectionMethodDocstring(LazyLoadedDocstring):
+ def _write_docstring(self, *args, **kwargs):
+ document_collection_method(*args, **kwargs)
+
+
+class BatchActionDocstring(LazyLoadedDocstring):
+ def _write_docstring(self, *args, **kwargs):
+ document_batch_action(*args, **kwargs)
+
+
+class ResourceWaiterDocstring(LazyLoadedDocstring):
+ def _write_docstring(self, *args, **kwargs):
+ document_resource_waiter(*args, **kwargs)
diff --git a/contrib/python/boto3/boto3/docs/method.py b/contrib/python/boto3/boto3/docs/method.py
index d56381e366..fcc398c730 100644
--- a/contrib/python/boto3/boto3/docs/method.py
+++ b/contrib/python/boto3/boto3/docs/method.py
@@ -1,71 +1,71 @@
-# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-from botocore.docs.method import document_model_driven_method
-
-
-def document_model_driven_resource_method(
- section, method_name, operation_model, event_emitter,
- method_description=None, example_prefix=None, include_input=None,
- include_output=None, exclude_input=None, exclude_output=None,
- document_output=True, resource_action_model=None,
- include_signature=True):
-
- document_model_driven_method(
- section=section, method_name=method_name,
- operation_model=operation_model,
- event_emitter=event_emitter,
- method_description=method_description,
- example_prefix=example_prefix,
- include_input=include_input,
- include_output=include_output,
- exclude_input=exclude_input,
- exclude_output=exclude_output,
- document_output=document_output,
- include_signature=include_signature
- )
-
- # If this action returns a resource modify the return example to
- # appropriately reflect that.
- if resource_action_model.resource:
- if 'return' in section.available_sections:
- section.delete_section('return')
- resource_type = resource_action_model.resource.type
-
- new_return_section = section.add_new_section('return')
- return_resource_type = '%s.%s' % (
- operation_model.service_model.service_name,
- resource_type)
-
- return_type = ':py:class:`%s`' % return_resource_type
- return_description = '%s resource' % (resource_type)
-
- if _method_returns_resource_list(resource_action_model.resource):
- return_type = 'list(%s)' % return_type
- return_description = 'A list of %s resources' % (
- resource_type)
-
- new_return_section.style.new_line()
- new_return_section.write(
- ':rtype: %s' % return_type)
- new_return_section.style.new_line()
- new_return_section.write(
- ':returns: %s' % return_description)
- new_return_section.style.new_line()
-
-
-def _method_returns_resource_list(resource):
- for identifier in resource.identifiers:
- if identifier.path and '[]' in identifier.path:
- return True
-
- return False
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+from botocore.docs.method import document_model_driven_method
+
+
+def document_model_driven_resource_method(
+ section, method_name, operation_model, event_emitter,
+ method_description=None, example_prefix=None, include_input=None,
+ include_output=None, exclude_input=None, exclude_output=None,
+ document_output=True, resource_action_model=None,
+ include_signature=True):
+
+ document_model_driven_method(
+ section=section, method_name=method_name,
+ operation_model=operation_model,
+ event_emitter=event_emitter,
+ method_description=method_description,
+ example_prefix=example_prefix,
+ include_input=include_input,
+ include_output=include_output,
+ exclude_input=exclude_input,
+ exclude_output=exclude_output,
+ document_output=document_output,
+ include_signature=include_signature
+ )
+
+ # If this action returns a resource modify the return example to
+ # appropriately reflect that.
+ if resource_action_model.resource:
+ if 'return' in section.available_sections:
+ section.delete_section('return')
+ resource_type = resource_action_model.resource.type
+
+ new_return_section = section.add_new_section('return')
+ return_resource_type = '%s.%s' % (
+ operation_model.service_model.service_name,
+ resource_type)
+
+ return_type = ':py:class:`%s`' % return_resource_type
+ return_description = '%s resource' % (resource_type)
+
+ if _method_returns_resource_list(resource_action_model.resource):
+ return_type = 'list(%s)' % return_type
+ return_description = 'A list of %s resources' % (
+ resource_type)
+
+ new_return_section.style.new_line()
+ new_return_section.write(
+ ':rtype: %s' % return_type)
+ new_return_section.style.new_line()
+ new_return_section.write(
+ ':returns: %s' % return_description)
+ new_return_section.style.new_line()
+
+
+def _method_returns_resource_list(resource):
+ for identifier in resource.identifiers:
+ if identifier.path and '[]' in identifier.path:
+ return True
+
+ return False
diff --git a/contrib/python/boto3/boto3/docs/resource.py b/contrib/python/boto3/boto3/docs/resource.py
index 968b3efef3..17ecc0aed7 100644
--- a/contrib/python/boto3/boto3/docs/resource.py
+++ b/contrib/python/boto3/boto3/docs/resource.py
@@ -1,259 +1,259 @@
-# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-from botocore import xform_name
-from botocore.docs.utils import get_official_service_name
-
-from boto3.docs.base import BaseDocumenter
-from boto3.docs.action import ActionDocumenter
-from boto3.docs.waiter import WaiterResourceDocumenter
-from boto3.docs.collection import CollectionDocumenter
-from boto3.docs.subresource import SubResourceDocumenter
-from boto3.docs.attr import document_attribute
-from boto3.docs.attr import document_identifier
-from boto3.docs.attr import document_reference
-from boto3.docs.utils import get_identifier_args_for_signature
-from boto3.docs.utils import get_identifier_values_for_example
-from boto3.docs.utils import get_identifier_description
-from boto3.docs.utils import add_resource_type_overview
-
-
-class ResourceDocumenter(BaseDocumenter):
- def __init__(self, resource, botocore_session):
- super(ResourceDocumenter, self).__init__(resource)
- self._botocore_session = botocore_session
-
- def document_resource(self, section):
- self._add_title(section)
- self._add_intro(section)
- overview_section = section.add_new_section('member-overview')
- self._add_identifiers(section)
- self._add_attributes(section)
- self._add_references(section)
- self._add_actions(section)
- self._add_sub_resources(section)
- self._add_collections(section)
- self._add_waiters(section)
- self._add_overview_of_members(overview_section)
-
- def _add_title(self, section):
- section.style.h2(self._resource_name)
-
- def _add_intro(self, section):
- identifier_names = []
- if self._resource_model.identifiers:
- for identifier in self._resource_model.identifiers:
- identifier_names.append(identifier.name)
-
- # Write out the class signature.
- class_args = get_identifier_args_for_signature(identifier_names)
- section.style.start_sphinx_py_class(
- class_name='%s(%s)' % (self.class_name, class_args))
-
- # Add as short description about the resource
- description_section = section.add_new_section('description')
- self._add_description(description_section)
-
- # Add an example of how to instantiate the resource
- example_section = section.add_new_section('example')
- self._add_example(example_section, identifier_names)
-
- # Add the description for the parameters to instantiate the
- # resource.
- param_section = section.add_new_section('params')
- self._add_params_description(param_section, identifier_names)
-
- def _add_description(self, section):
- official_service_name = get_official_service_name(
- self._service_model)
- section.write(
- 'A resource representing an %s %s' % (
- official_service_name, self._resource_name))
-
- def _add_example(self, section, identifier_names):
- section.style.start_codeblock()
- section.style.new_line()
- section.write('import boto3')
- section.style.new_line()
- section.style.new_line()
- section.write(
- '%s = boto3.resource(\'%s\')' % (
- self._service_name, self._service_name)
- )
- section.style.new_line()
- example_values = get_identifier_values_for_example(identifier_names)
- section.write(
- '%s = %s.%s(%s)' % (
- xform_name(self._resource_name), self._service_name,
- self._resource_name, example_values))
- section.style.end_codeblock()
-
- def _add_params_description(self, section, identifier_names):
- for identifier_name in identifier_names:
- description = get_identifier_description(
- self._resource_name, identifier_name)
- section.write(':type %s: string' % identifier_name)
- section.style.new_line()
- section.write(':param %s: %s' % (
- identifier_name, description))
- section.style.new_line()
-
- def _add_overview_of_members(self, section):
- for resource_member_type in self.member_map:
- section.style.new_line()
- section.write('These are the resource\'s available %s:' % (
- resource_member_type))
- section.style.new_line()
- for member in self.member_map[resource_member_type]:
- if resource_member_type in ['identifiers', 'attributes',
- 'references', 'collections']:
- section.style.li(':py:attr:`%s`' % member)
- else:
- section.style.li(':py:meth:`%s()`' % member)
-
- def _add_identifiers(self, section):
- identifiers = self._resource.meta.resource_model.identifiers
- section = section.add_new_section('identifiers')
- member_list = []
- if identifiers:
- self.member_map['identifiers'] = member_list
- add_resource_type_overview(
- section=section,
- resource_type='Identifiers',
- description=(
- 'Identifiers are properties of a resource that are '
- 'set upon instantation of the resource.'),
- intro_link='identifiers_attributes_intro')
- for identifier in identifiers:
- identifier_section = section.add_new_section(identifier.name)
- member_list.append(identifier.name)
- document_identifier(
- section=identifier_section,
- resource_name=self._resource_name,
- identifier_model=identifier
- )
-
- def _add_attributes(self, section):
- service_model = self._resource.meta.client.meta.service_model
- attributes = {}
- if self._resource.meta.resource_model.shape:
- shape = service_model.shape_for(
- self._resource.meta.resource_model.shape)
- attributes = self._resource.meta.resource_model.get_attributes(
- shape)
- section = section.add_new_section('attributes')
- attribute_list = []
- if attributes:
- add_resource_type_overview(
- section=section,
- resource_type='Attributes',
- description=(
- 'Attributes provide access'
- ' to the properties of a resource. Attributes are lazy-'
- 'loaded the first time one is accessed via the'
- ' :py:meth:`load` method.'),
- intro_link='identifiers_attributes_intro')
- self.member_map['attributes'] = attribute_list
- for attr_name in sorted(attributes):
- _, attr_shape = attributes[attr_name]
- attribute_section = section.add_new_section(attr_name)
- attribute_list.append(attr_name)
- document_attribute(
- section=attribute_section,
- service_name=self._service_name,
- resource_name=self._resource_name,
- attr_name=attr_name,
- event_emitter=self._resource.meta.client.meta.events,
- attr_model=attr_shape
- )
-
- def _add_references(self, section):
- section = section.add_new_section('references')
- references = self._resource.meta.resource_model.references
- reference_list = []
- if references:
- add_resource_type_overview(
- section=section,
- resource_type='References',
- description=(
- 'References are related resource instances that have '
- 'a belongs-to relationship.'),
- intro_link='references_intro')
- self.member_map['references'] = reference_list
- for reference in references:
- reference_section = section.add_new_section(reference.name)
- reference_list.append(reference.name)
- document_reference(
- section=reference_section,
- reference_model=reference
- )
-
- def _add_actions(self, section):
- section = section.add_new_section('actions')
- actions = self._resource.meta.resource_model.actions
- if actions:
- documenter = ActionDocumenter(self._resource)
- documenter.member_map = self.member_map
- documenter.document_actions(section)
-
- def _add_sub_resources(self, section):
- section = section.add_new_section('sub-resources')
- sub_resources = self._resource.meta.resource_model.subresources
- if sub_resources:
- documenter = SubResourceDocumenter(self._resource)
- documenter.member_map = self.member_map
- documenter.document_sub_resources(section)
-
- def _add_collections(self, section):
- section = section.add_new_section('collections')
- collections = self._resource.meta.resource_model.collections
- if collections:
- documenter = CollectionDocumenter(self._resource)
- documenter.member_map = self.member_map
- documenter.document_collections(section)
-
- def _add_waiters(self, section):
- section = section.add_new_section('waiters')
- waiters = self._resource.meta.resource_model.waiters
- if waiters:
- service_waiter_model = self._botocore_session.get_waiter_model(
- self._service_name)
- documenter = WaiterResourceDocumenter(
- self._resource, service_waiter_model)
- documenter.member_map = self.member_map
- documenter.document_resource_waiters(section)
-
-
-class ServiceResourceDocumenter(ResourceDocumenter):
- @property
- def class_name(self):
- return '%s.ServiceResource' % self._service_docs_name
-
- def _add_title(self, section):
- section.style.h2('Service Resource')
-
- def _add_description(self, section):
- official_service_name = get_official_service_name(
- self._service_model)
- section.write(
- 'A resource representing %s' % official_service_name)
-
- def _add_example(self, section, identifier_names):
- section.style.start_codeblock()
- section.style.new_line()
- section.write('import boto3')
- section.style.new_line()
- section.style.new_line()
- section.write(
- '%s = boto3.resource(\'%s\')' % (
- self._service_name, self._service_name))
- section.style.end_codeblock()
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+from botocore import xform_name
+from botocore.docs.utils import get_official_service_name
+
+from boto3.docs.base import BaseDocumenter
+from boto3.docs.action import ActionDocumenter
+from boto3.docs.waiter import WaiterResourceDocumenter
+from boto3.docs.collection import CollectionDocumenter
+from boto3.docs.subresource import SubResourceDocumenter
+from boto3.docs.attr import document_attribute
+from boto3.docs.attr import document_identifier
+from boto3.docs.attr import document_reference
+from boto3.docs.utils import get_identifier_args_for_signature
+from boto3.docs.utils import get_identifier_values_for_example
+from boto3.docs.utils import get_identifier_description
+from boto3.docs.utils import add_resource_type_overview
+
+
+class ResourceDocumenter(BaseDocumenter):
+ def __init__(self, resource, botocore_session):
+ super(ResourceDocumenter, self).__init__(resource)
+ self._botocore_session = botocore_session
+
+ def document_resource(self, section):
+ self._add_title(section)
+ self._add_intro(section)
+ overview_section = section.add_new_section('member-overview')
+ self._add_identifiers(section)
+ self._add_attributes(section)
+ self._add_references(section)
+ self._add_actions(section)
+ self._add_sub_resources(section)
+ self._add_collections(section)
+ self._add_waiters(section)
+ self._add_overview_of_members(overview_section)
+
+ def _add_title(self, section):
+ section.style.h2(self._resource_name)
+
+ def _add_intro(self, section):
+ identifier_names = []
+ if self._resource_model.identifiers:
+ for identifier in self._resource_model.identifiers:
+ identifier_names.append(identifier.name)
+
+ # Write out the class signature.
+ class_args = get_identifier_args_for_signature(identifier_names)
+ section.style.start_sphinx_py_class(
+ class_name='%s(%s)' % (self.class_name, class_args))
+
+ # Add as short description about the resource
+ description_section = section.add_new_section('description')
+ self._add_description(description_section)
+
+ # Add an example of how to instantiate the resource
+ example_section = section.add_new_section('example')
+ self._add_example(example_section, identifier_names)
+
+ # Add the description for the parameters to instantiate the
+ # resource.
+ param_section = section.add_new_section('params')
+ self._add_params_description(param_section, identifier_names)
+
+ def _add_description(self, section):
+ official_service_name = get_official_service_name(
+ self._service_model)
+ section.write(
+ 'A resource representing an %s %s' % (
+ official_service_name, self._resource_name))
+
+ def _add_example(self, section, identifier_names):
+ section.style.start_codeblock()
+ section.style.new_line()
+ section.write('import boto3')
+ section.style.new_line()
+ section.style.new_line()
+ section.write(
+ '%s = boto3.resource(\'%s\')' % (
+ self._service_name, self._service_name)
+ )
+ section.style.new_line()
+ example_values = get_identifier_values_for_example(identifier_names)
+ section.write(
+ '%s = %s.%s(%s)' % (
+ xform_name(self._resource_name), self._service_name,
+ self._resource_name, example_values))
+ section.style.end_codeblock()
+
+ def _add_params_description(self, section, identifier_names):
+ for identifier_name in identifier_names:
+ description = get_identifier_description(
+ self._resource_name, identifier_name)
+ section.write(':type %s: string' % identifier_name)
+ section.style.new_line()
+ section.write(':param %s: %s' % (
+ identifier_name, description))
+ section.style.new_line()
+
+ def _add_overview_of_members(self, section):
+ for resource_member_type in self.member_map:
+ section.style.new_line()
+ section.write('These are the resource\'s available %s:' % (
+ resource_member_type))
+ section.style.new_line()
+ for member in self.member_map[resource_member_type]:
+ if resource_member_type in ['identifiers', 'attributes',
+ 'references', 'collections']:
+ section.style.li(':py:attr:`%s`' % member)
+ else:
+ section.style.li(':py:meth:`%s()`' % member)
+
+ def _add_identifiers(self, section):
+ identifiers = self._resource.meta.resource_model.identifiers
+ section = section.add_new_section('identifiers')
+ member_list = []
+ if identifiers:
+ self.member_map['identifiers'] = member_list
+ add_resource_type_overview(
+ section=section,
+ resource_type='Identifiers',
+ description=(
+ 'Identifiers are properties of a resource that are '
+ 'set upon instantation of the resource.'),
+ intro_link='identifiers_attributes_intro')
+ for identifier in identifiers:
+ identifier_section = section.add_new_section(identifier.name)
+ member_list.append(identifier.name)
+ document_identifier(
+ section=identifier_section,
+ resource_name=self._resource_name,
+ identifier_model=identifier
+ )
+
+ def _add_attributes(self, section):
+ service_model = self._resource.meta.client.meta.service_model
+ attributes = {}
+ if self._resource.meta.resource_model.shape:
+ shape = service_model.shape_for(
+ self._resource.meta.resource_model.shape)
+ attributes = self._resource.meta.resource_model.get_attributes(
+ shape)
+ section = section.add_new_section('attributes')
+ attribute_list = []
+ if attributes:
+ add_resource_type_overview(
+ section=section,
+ resource_type='Attributes',
+ description=(
+ 'Attributes provide access'
+ ' to the properties of a resource. Attributes are lazy-'
+ 'loaded the first time one is accessed via the'
+ ' :py:meth:`load` method.'),
+ intro_link='identifiers_attributes_intro')
+ self.member_map['attributes'] = attribute_list
+ for attr_name in sorted(attributes):
+ _, attr_shape = attributes[attr_name]
+ attribute_section = section.add_new_section(attr_name)
+ attribute_list.append(attr_name)
+ document_attribute(
+ section=attribute_section,
+ service_name=self._service_name,
+ resource_name=self._resource_name,
+ attr_name=attr_name,
+ event_emitter=self._resource.meta.client.meta.events,
+ attr_model=attr_shape
+ )
+
+ def _add_references(self, section):
+ section = section.add_new_section('references')
+ references = self._resource.meta.resource_model.references
+ reference_list = []
+ if references:
+ add_resource_type_overview(
+ section=section,
+ resource_type='References',
+ description=(
+ 'References are related resource instances that have '
+ 'a belongs-to relationship.'),
+ intro_link='references_intro')
+ self.member_map['references'] = reference_list
+ for reference in references:
+ reference_section = section.add_new_section(reference.name)
+ reference_list.append(reference.name)
+ document_reference(
+ section=reference_section,
+ reference_model=reference
+ )
+
+ def _add_actions(self, section):
+ section = section.add_new_section('actions')
+ actions = self._resource.meta.resource_model.actions
+ if actions:
+ documenter = ActionDocumenter(self._resource)
+ documenter.member_map = self.member_map
+ documenter.document_actions(section)
+
+ def _add_sub_resources(self, section):
+ section = section.add_new_section('sub-resources')
+ sub_resources = self._resource.meta.resource_model.subresources
+ if sub_resources:
+ documenter = SubResourceDocumenter(self._resource)
+ documenter.member_map = self.member_map
+ documenter.document_sub_resources(section)
+
+ def _add_collections(self, section):
+ section = section.add_new_section('collections')
+ collections = self._resource.meta.resource_model.collections
+ if collections:
+ documenter = CollectionDocumenter(self._resource)
+ documenter.member_map = self.member_map
+ documenter.document_collections(section)
+
+ def _add_waiters(self, section):
+ section = section.add_new_section('waiters')
+ waiters = self._resource.meta.resource_model.waiters
+ if waiters:
+ service_waiter_model = self._botocore_session.get_waiter_model(
+ self._service_name)
+ documenter = WaiterResourceDocumenter(
+ self._resource, service_waiter_model)
+ documenter.member_map = self.member_map
+ documenter.document_resource_waiters(section)
+
+
+class ServiceResourceDocumenter(ResourceDocumenter):
+ @property
+ def class_name(self):
+ return '%s.ServiceResource' % self._service_docs_name
+
+ def _add_title(self, section):
+ section.style.h2('Service Resource')
+
+ def _add_description(self, section):
+ official_service_name = get_official_service_name(
+ self._service_model)
+ section.write(
+ 'A resource representing %s' % official_service_name)
+
+ def _add_example(self, section, identifier_names):
+ section.style.start_codeblock()
+ section.style.new_line()
+ section.write('import boto3')
+ section.style.new_line()
+ section.style.new_line()
+ section.write(
+ '%s = boto3.resource(\'%s\')' % (
+ self._service_name, self._service_name))
+ section.style.end_codeblock()
diff --git a/contrib/python/boto3/boto3/docs/service.py b/contrib/python/boto3/boto3/docs/service.py
index 3f4185dc29..d2c173602d 100644
--- a/contrib/python/boto3/boto3/docs/service.py
+++ b/contrib/python/boto3/boto3/docs/service.py
@@ -1,131 +1,131 @@
-# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-import os
-
-import boto3
-from botocore.exceptions import DataNotFoundError
-from botocore.docs.service import ServiceDocumenter as BaseServiceDocumenter
-from botocore.docs.bcdoc.restdoc import DocumentStructure
-
-from boto3.utils import ServiceContext
-from boto3.docs.client import Boto3ClientDocumenter
-from boto3.docs.resource import ResourceDocumenter
-from boto3.docs.resource import ServiceResourceDocumenter
-
-
-class ServiceDocumenter(BaseServiceDocumenter):
- # The path used to find examples
- EXAMPLE_PATH = os.path.join(os.path.dirname(boto3.__file__), 'examples')
-
- def __init__(self, service_name, session):
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+import os
+
+import boto3
+from botocore.exceptions import DataNotFoundError
+from botocore.docs.service import ServiceDocumenter as BaseServiceDocumenter
+from botocore.docs.bcdoc.restdoc import DocumentStructure
+
+from boto3.utils import ServiceContext
+from boto3.docs.client import Boto3ClientDocumenter
+from boto3.docs.resource import ResourceDocumenter
+from boto3.docs.resource import ServiceResourceDocumenter
+
+
+class ServiceDocumenter(BaseServiceDocumenter):
+ # The path used to find examples
+ EXAMPLE_PATH = os.path.join(os.path.dirname(boto3.__file__), 'examples')
+
+ def __init__(self, service_name, session):
super(ServiceDocumenter, self).__init__(
service_name=service_name,
# I know that this is an internal attribute, but the botocore session
# is needed to load the paginator and waiter models.
session=session._session,
)
- self._boto3_session = session
- self._client = self._boto3_session.client(service_name)
- self._service_resource = None
- if self._service_name in self._boto3_session.get_available_resources():
- self._service_resource = self._boto3_session.resource(service_name)
- self.sections = [
- 'title',
- 'table-of-contents',
- 'client',
- 'paginators',
- 'waiters',
- 'service-resource',
- 'resources',
- 'examples'
- ]
-
- def document_service(self):
- """Documents an entire service.
-
- :returns: The reStructured text of the documented service.
- """
- doc_structure = DocumentStructure(
- self._service_name, section_names=self.sections,
- target='html')
- self.title(doc_structure.get_section('title'))
- self.table_of_contents(doc_structure.get_section('table-of-contents'))
-
- self.client_api(doc_structure.get_section('client'))
- self.paginator_api(doc_structure.get_section('paginators'))
- self.waiter_api(doc_structure.get_section('waiters'))
- if self._service_resource:
- self._document_service_resource(
- doc_structure.get_section('service-resource'))
- self._document_resources(doc_structure.get_section('resources'))
- self._document_examples(doc_structure.get_section('examples'))
- return doc_structure.flush_structure()
-
- def client_api(self, section):
- examples = None
- try:
- examples = self.get_examples(self._service_name)
- except DataNotFoundError:
- pass
-
- Boto3ClientDocumenter(self._client, examples).document_client(section)
-
- def _document_service_resource(self, section):
- ServiceResourceDocumenter(
- self._service_resource, self._session).document_resource(
- section)
-
- def _document_resources(self, section):
- temp_identifier_value = 'foo'
- loader = self._session.get_component('data_loader')
- json_resource_model = loader.load_service_model(
- self._service_name, 'resources-1')
- service_model = self._service_resource.meta.client.meta.service_model
- for resource_name in json_resource_model['resources']:
- resource_model = json_resource_model['resources'][resource_name]
- resource_cls = self._boto3_session.resource_factory.\
- load_from_definition(
- resource_name=resource_name,
- single_resource_json_definition=resource_model,
- service_context=ServiceContext(
- service_name=self._service_name,
- resource_json_definitions=json_resource_model[
- 'resources'],
- service_model=service_model,
- service_waiter_model=None
- )
- )
- identifiers = resource_cls.meta.resource_model.identifiers
- args = []
- for _ in identifiers:
- args.append(temp_identifier_value)
- resource = resource_cls(*args, client=self._client)
- ResourceDocumenter(
- resource, self._session).document_resource(
- section.add_new_section(resource.meta.resource_model.name))
-
- def _get_example_file(self):
- return os.path.realpath(
- os.path.join(self.EXAMPLE_PATH,
- self._service_name + '.rst'))
-
- def _document_examples(self, section):
- examples_file = self._get_example_file()
- if os.path.isfile(examples_file):
- section.style.h2('Examples')
- section.style.new_line()
- section.write(".. contents::\n :local:\n :depth: 1")
- section.style.new_line()
- section.style.new_line()
- with open(examples_file, 'r') as f:
- section.write(f.read())
+ self._boto3_session = session
+ self._client = self._boto3_session.client(service_name)
+ self._service_resource = None
+ if self._service_name in self._boto3_session.get_available_resources():
+ self._service_resource = self._boto3_session.resource(service_name)
+ self.sections = [
+ 'title',
+ 'table-of-contents',
+ 'client',
+ 'paginators',
+ 'waiters',
+ 'service-resource',
+ 'resources',
+ 'examples'
+ ]
+
+ def document_service(self):
+ """Documents an entire service.
+
+ :returns: The reStructured text of the documented service.
+ """
+ doc_structure = DocumentStructure(
+ self._service_name, section_names=self.sections,
+ target='html')
+ self.title(doc_structure.get_section('title'))
+ self.table_of_contents(doc_structure.get_section('table-of-contents'))
+
+ self.client_api(doc_structure.get_section('client'))
+ self.paginator_api(doc_structure.get_section('paginators'))
+ self.waiter_api(doc_structure.get_section('waiters'))
+ if self._service_resource:
+ self._document_service_resource(
+ doc_structure.get_section('service-resource'))
+ self._document_resources(doc_structure.get_section('resources'))
+ self._document_examples(doc_structure.get_section('examples'))
+ return doc_structure.flush_structure()
+
+ def client_api(self, section):
+ examples = None
+ try:
+ examples = self.get_examples(self._service_name)
+ except DataNotFoundError:
+ pass
+
+ Boto3ClientDocumenter(self._client, examples).document_client(section)
+
+ def _document_service_resource(self, section):
+ ServiceResourceDocumenter(
+ self._service_resource, self._session).document_resource(
+ section)
+
+ def _document_resources(self, section):
+ temp_identifier_value = 'foo'
+ loader = self._session.get_component('data_loader')
+ json_resource_model = loader.load_service_model(
+ self._service_name, 'resources-1')
+ service_model = self._service_resource.meta.client.meta.service_model
+ for resource_name in json_resource_model['resources']:
+ resource_model = json_resource_model['resources'][resource_name]
+ resource_cls = self._boto3_session.resource_factory.\
+ load_from_definition(
+ resource_name=resource_name,
+ single_resource_json_definition=resource_model,
+ service_context=ServiceContext(
+ service_name=self._service_name,
+ resource_json_definitions=json_resource_model[
+ 'resources'],
+ service_model=service_model,
+ service_waiter_model=None
+ )
+ )
+ identifiers = resource_cls.meta.resource_model.identifiers
+ args = []
+ for _ in identifiers:
+ args.append(temp_identifier_value)
+ resource = resource_cls(*args, client=self._client)
+ ResourceDocumenter(
+ resource, self._session).document_resource(
+ section.add_new_section(resource.meta.resource_model.name))
+
+ def _get_example_file(self):
+ return os.path.realpath(
+ os.path.join(self.EXAMPLE_PATH,
+ self._service_name + '.rst'))
+
+ def _document_examples(self, section):
+ examples_file = self._get_example_file()
+ if os.path.isfile(examples_file):
+ section.style.h2('Examples')
+ section.style.new_line()
+ section.write(".. contents::\n :local:\n :depth: 1")
+ section.style.new_line()
+ section.style.new_line()
+ with open(examples_file, 'r') as f:
+ section.write(f.read())
diff --git a/contrib/python/boto3/boto3/docs/subresource.py b/contrib/python/boto3/boto3/docs/subresource.py
index 029ce85624..7241606713 100644
--- a/contrib/python/boto3/boto3/docs/subresource.py
+++ b/contrib/python/boto3/boto3/docs/subresource.py
@@ -1,112 +1,112 @@
-# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-from botocore import xform_name
-from botocore.utils import get_service_module_name
-
-from boto3.docs.base import BaseDocumenter
-from boto3.docs.utils import get_identifier_args_for_signature
-from boto3.docs.utils import get_identifier_values_for_example
-from boto3.docs.utils import get_identifier_description
-from boto3.docs.utils import add_resource_type_overview
-
-
-class SubResourceDocumenter(BaseDocumenter):
- def document_sub_resources(self, section):
- add_resource_type_overview(
- section=section,
- resource_type='Sub-resources',
- description=(
- 'Sub-resources are methods that create a new instance of a'
- ' child resource. This resource\'s identifiers get passed'
- ' along to the child.'),
- intro_link='subresources_intro')
- sub_resources = sorted(
- self._resource.meta.resource_model.subresources,
- key=lambda sub_resource: sub_resource.name
- )
- sub_resources_list = []
- self.member_map['sub-resources'] = sub_resources_list
- for sub_resource in sub_resources:
- sub_resource_section = section.add_new_section(sub_resource.name)
- sub_resources_list.append(sub_resource.name)
- document_sub_resource(
- section=sub_resource_section,
- resource_name=self._resource_name,
- sub_resource_model=sub_resource,
- service_model=self._service_model
- )
-
-
-def document_sub_resource(section, resource_name, sub_resource_model,
- service_model, include_signature=True):
- """Documents a resource action
-
- :param section: The section to write to
-
- :param resource_name: The name of the resource
-
- :param sub_resource_model: The model of the subresource
-
- :param service_model: The model of the service
-
- :param include_signature: Whether or not to include the signature.
- It is useful for generating docstrings.
- """
- identifiers_needed = []
- for identifier in sub_resource_model.resource.identifiers:
- if identifier.source == 'input':
- identifiers_needed.append(xform_name(identifier.target))
-
- if include_signature:
- signature_args = get_identifier_args_for_signature(identifiers_needed)
- section.style.start_sphinx_py_method(
- sub_resource_model.name, signature_args)
-
- method_intro_section = section.add_new_section(
- 'method-intro')
- description = 'Creates a %s resource.' % sub_resource_model.resource.type
- method_intro_section.include_doc_string(description)
- example_section = section.add_new_section('example')
- example_values = get_identifier_values_for_example(identifiers_needed)
- example_resource_name = xform_name(resource_name)
- if service_model.service_name == resource_name:
- example_resource_name = resource_name
- example = '%s = %s.%s(%s)' % (
- xform_name(sub_resource_model.resource.type),
- example_resource_name,
- sub_resource_model.name, example_values
- )
- example_section.style.start_codeblock()
- example_section.write(example)
- example_section.style.end_codeblock()
-
- param_section = section.add_new_section('params')
- for identifier in identifiers_needed:
- description = get_identifier_description(
- sub_resource_model.name, identifier)
- param_section.write(':type %s: string' % identifier)
- param_section.style.new_line()
- param_section.write(':param %s: %s' % (
- identifier, description))
- param_section.style.new_line()
-
- return_section = section.add_new_section('return')
- return_section.style.new_line()
- return_section.write(
- ':rtype: :py:class:`%s.%s`' % (
- get_service_module_name(service_model),
- sub_resource_model.resource.type))
- return_section.style.new_line()
- return_section.write(
- ':returns: A %s resource' % sub_resource_model.resource.type)
- return_section.style.new_line()
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+from botocore import xform_name
+from botocore.utils import get_service_module_name
+
+from boto3.docs.base import BaseDocumenter
+from boto3.docs.utils import get_identifier_args_for_signature
+from boto3.docs.utils import get_identifier_values_for_example
+from boto3.docs.utils import get_identifier_description
+from boto3.docs.utils import add_resource_type_overview
+
+
+class SubResourceDocumenter(BaseDocumenter):
+ def document_sub_resources(self, section):
+ add_resource_type_overview(
+ section=section,
+ resource_type='Sub-resources',
+ description=(
+ 'Sub-resources are methods that create a new instance of a'
+ ' child resource. This resource\'s identifiers get passed'
+ ' along to the child.'),
+ intro_link='subresources_intro')
+ sub_resources = sorted(
+ self._resource.meta.resource_model.subresources,
+ key=lambda sub_resource: sub_resource.name
+ )
+ sub_resources_list = []
+ self.member_map['sub-resources'] = sub_resources_list
+ for sub_resource in sub_resources:
+ sub_resource_section = section.add_new_section(sub_resource.name)
+ sub_resources_list.append(sub_resource.name)
+ document_sub_resource(
+ section=sub_resource_section,
+ resource_name=self._resource_name,
+ sub_resource_model=sub_resource,
+ service_model=self._service_model
+ )
+
+
+def document_sub_resource(section, resource_name, sub_resource_model,
+ service_model, include_signature=True):
+ """Documents a resource action
+
+ :param section: The section to write to
+
+ :param resource_name: The name of the resource
+
+ :param sub_resource_model: The model of the subresource
+
+ :param service_model: The model of the service
+
+ :param include_signature: Whether or not to include the signature.
+ It is useful for generating docstrings.
+ """
+ identifiers_needed = []
+ for identifier in sub_resource_model.resource.identifiers:
+ if identifier.source == 'input':
+ identifiers_needed.append(xform_name(identifier.target))
+
+ if include_signature:
+ signature_args = get_identifier_args_for_signature(identifiers_needed)
+ section.style.start_sphinx_py_method(
+ sub_resource_model.name, signature_args)
+
+ method_intro_section = section.add_new_section(
+ 'method-intro')
+ description = 'Creates a %s resource.' % sub_resource_model.resource.type
+ method_intro_section.include_doc_string(description)
+ example_section = section.add_new_section('example')
+ example_values = get_identifier_values_for_example(identifiers_needed)
+ example_resource_name = xform_name(resource_name)
+ if service_model.service_name == resource_name:
+ example_resource_name = resource_name
+ example = '%s = %s.%s(%s)' % (
+ xform_name(sub_resource_model.resource.type),
+ example_resource_name,
+ sub_resource_model.name, example_values
+ )
+ example_section.style.start_codeblock()
+ example_section.write(example)
+ example_section.style.end_codeblock()
+
+ param_section = section.add_new_section('params')
+ for identifier in identifiers_needed:
+ description = get_identifier_description(
+ sub_resource_model.name, identifier)
+ param_section.write(':type %s: string' % identifier)
+ param_section.style.new_line()
+ param_section.write(':param %s: %s' % (
+ identifier, description))
+ param_section.style.new_line()
+
+ return_section = section.add_new_section('return')
+ return_section.style.new_line()
+ return_section.write(
+ ':rtype: :py:class:`%s.%s`' % (
+ get_service_module_name(service_model),
+ sub_resource_model.resource.type))
+ return_section.style.new_line()
+ return_section.write(
+ ':returns: A %s resource' % sub_resource_model.resource.type)
+ return_section.style.new_line()
diff --git a/contrib/python/boto3/boto3/docs/utils.py b/contrib/python/boto3/boto3/docs/utils.py
index 5590c5bcd5..8c96a121d7 100644
--- a/contrib/python/boto3/boto3/docs/utils.py
+++ b/contrib/python/boto3/boto3/docs/utils.py
@@ -1,142 +1,142 @@
-# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-import inspect
-
-import jmespath
-
-from botocore.compat import six
-
-
-def get_resource_ignore_params(params):
- """Helper method to determine which parameters to ignore for actions
-
- :returns: A list of the parameter names that does not need to be
- included in a resource's method call for documentation purposes.
- """
- ignore_params = []
- for param in params:
- result = jmespath.compile(param.target)
- current = result.parsed
- # Use JMESPath to find the left most element in the target expression
- # which will be the parameter to ignore in the action call.
- while current['children']:
- current = current['children'][0]
- # Make sure the parameter we are about to ignore is a field.
- # If it is not, we should ignore the result to avoid false positives.
- if current['type'] == 'field':
- ignore_params.append(current['value'])
- return ignore_params
-
-
-def is_resource_action(action_handle):
- if six.PY3:
- return inspect.isfunction(action_handle)
- else:
- return inspect.ismethod(action_handle)
-
-
-def get_resource_public_actions(resource_class):
- resource_class_members = inspect.getmembers(resource_class)
- resource_methods = {}
- for name, member in resource_class_members:
- if not name.startswith('_'):
- if not name[0].isupper():
- if not name.startswith('wait_until'):
- if is_resource_action(member):
- resource_methods[name] = member
- return resource_methods
-
-
-def get_identifier_values_for_example(identifier_names):
- example_values = ['\'%s\'' % identifier for identifier in identifier_names]
- return ','.join(example_values)
-
-
-def get_identifier_args_for_signature(identifier_names):
- return ','.join(identifier_names)
-
-
-def get_identifier_description(resource_name, identifier_name):
- return "The %s's %s identifier. This **must** be set." % (
- resource_name, identifier_name)
-
-
-def add_resource_type_overview(section, resource_type, description,
- intro_link=None):
- section.style.new_line()
- section.write('.. rst-class:: admonition-title')
- section.style.new_line()
- section.style.new_line()
- section.write(resource_type)
- section.style.new_line()
- section.style.new_line()
- section.write(description)
- section.style.new_line()
- if intro_link is not None:
- section.write('For more information about %s refer to the '
- ':ref:`Resources Introduction Guide<%s>`.' % (
- resource_type.lower(), intro_link))
- section.style.new_line()
-
-
-class DocumentModifiedShape(object):
- def __init__(self, shape_name, new_type, new_description,
- new_example_value):
- self._shape_name = shape_name
- self._new_type = new_type
- self._new_description = new_description
- self._new_example_value = new_example_value
-
- def replace_documentation_for_matching_shape(self, event_name, section,
- **kwargs):
- if self._shape_name == section.context.get('shape'):
- self._replace_documentation(event_name, section)
- for section_name in section.available_sections:
- sub_section = section.get_section(section_name)
- if self._shape_name == sub_section.context.get('shape'):
- self._replace_documentation(event_name, sub_section)
- else:
- self.replace_documentation_for_matching_shape(
- event_name, sub_section)
-
- def _replace_documentation(self, event_name, section):
- if event_name.startswith('docs.request-example') or \
- event_name.startswith('docs.response-example'):
- section.remove_all_sections()
- section.clear_text()
- section.write(self._new_example_value)
-
- if event_name.startswith('docs.request-params') or \
- event_name.startswith('docs.response-params'):
- for section_name in section.available_sections:
- # Delete any extra members as a new shape is being
- # used.
- if section_name not in ['param-name', 'param-documentation',
- 'end-structure', 'param-type',
- 'end-param']:
- section.delete_section(section_name)
-
- # Update the documentation
- description_section = section.get_section('param-documentation')
- description_section.clear_text()
- description_section.write(self._new_description)
-
- # Update the param type
- type_section = section.get_section('param-type')
- if type_section.getvalue().decode('utf-8').startswith(':type'):
- type_section.clear_text()
- type_section.write(':type %s: %s' % (
- section.name, self._new_type))
- else:
- type_section.clear_text()
- type_section.style.italics('(%s) -- ' % self._new_type)
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+import inspect
+
+import jmespath
+
+from botocore.compat import six
+
+
+def get_resource_ignore_params(params):
+ """Helper method to determine which parameters to ignore for actions
+
+ :returns: A list of the parameter names that does not need to be
+ included in a resource's method call for documentation purposes.
+ """
+ ignore_params = []
+ for param in params:
+ result = jmespath.compile(param.target)
+ current = result.parsed
+ # Use JMESPath to find the left most element in the target expression
+ # which will be the parameter to ignore in the action call.
+ while current['children']:
+ current = current['children'][0]
+ # Make sure the parameter we are about to ignore is a field.
+ # If it is not, we should ignore the result to avoid false positives.
+ if current['type'] == 'field':
+ ignore_params.append(current['value'])
+ return ignore_params
+
+
+def is_resource_action(action_handle):
+ if six.PY3:
+ return inspect.isfunction(action_handle)
+ else:
+ return inspect.ismethod(action_handle)
+
+
+def get_resource_public_actions(resource_class):
+ resource_class_members = inspect.getmembers(resource_class)
+ resource_methods = {}
+ for name, member in resource_class_members:
+ if not name.startswith('_'):
+ if not name[0].isupper():
+ if not name.startswith('wait_until'):
+ if is_resource_action(member):
+ resource_methods[name] = member
+ return resource_methods
+
+
+def get_identifier_values_for_example(identifier_names):
+ example_values = ['\'%s\'' % identifier for identifier in identifier_names]
+ return ','.join(example_values)
+
+
+def get_identifier_args_for_signature(identifier_names):
+ return ','.join(identifier_names)
+
+
+def get_identifier_description(resource_name, identifier_name):
+ return "The %s's %s identifier. This **must** be set." % (
+ resource_name, identifier_name)
+
+
+def add_resource_type_overview(section, resource_type, description,
+ intro_link=None):
+ section.style.new_line()
+ section.write('.. rst-class:: admonition-title')
+ section.style.new_line()
+ section.style.new_line()
+ section.write(resource_type)
+ section.style.new_line()
+ section.style.new_line()
+ section.write(description)
+ section.style.new_line()
+ if intro_link is not None:
+ section.write('For more information about %s refer to the '
+ ':ref:`Resources Introduction Guide<%s>`.' % (
+ resource_type.lower(), intro_link))
+ section.style.new_line()
+
+
+class DocumentModifiedShape(object):
+ def __init__(self, shape_name, new_type, new_description,
+ new_example_value):
+ self._shape_name = shape_name
+ self._new_type = new_type
+ self._new_description = new_description
+ self._new_example_value = new_example_value
+
+ def replace_documentation_for_matching_shape(self, event_name, section,
+ **kwargs):
+ if self._shape_name == section.context.get('shape'):
+ self._replace_documentation(event_name, section)
+ for section_name in section.available_sections:
+ sub_section = section.get_section(section_name)
+ if self._shape_name == sub_section.context.get('shape'):
+ self._replace_documentation(event_name, sub_section)
+ else:
+ self.replace_documentation_for_matching_shape(
+ event_name, sub_section)
+
+ def _replace_documentation(self, event_name, section):
+ if event_name.startswith('docs.request-example') or \
+ event_name.startswith('docs.response-example'):
+ section.remove_all_sections()
+ section.clear_text()
+ section.write(self._new_example_value)
+
+ if event_name.startswith('docs.request-params') or \
+ event_name.startswith('docs.response-params'):
+ for section_name in section.available_sections:
+ # Delete any extra members as a new shape is being
+ # used.
+ if section_name not in ['param-name', 'param-documentation',
+ 'end-structure', 'param-type',
+ 'end-param']:
+ section.delete_section(section_name)
+
+ # Update the documentation
+ description_section = section.get_section('param-documentation')
+ description_section.clear_text()
+ description_section.write(self._new_description)
+
+ # Update the param type
+ type_section = section.get_section('param-type')
+ if type_section.getvalue().decode('utf-8').startswith(':type'):
+ type_section.clear_text()
+ type_section.write(':type %s: %s' % (
+ section.name, self._new_type))
+ else:
+ type_section.clear_text()
+ type_section.style.italics('(%s) -- ' % self._new_type)
diff --git a/contrib/python/boto3/boto3/docs/waiter.py b/contrib/python/boto3/boto3/docs/waiter.py
index 3ee95d4e73..da7e776475 100644
--- a/contrib/python/boto3/boto3/docs/waiter.py
+++ b/contrib/python/boto3/boto3/docs/waiter.py
@@ -1,91 +1,91 @@
-# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-from botocore import xform_name
-from botocore.utils import get_service_module_name
-from botocore.docs.method import document_model_driven_method
-
-from boto3.docs.base import BaseDocumenter
-from boto3.docs.utils import get_resource_ignore_params
-from boto3.docs.utils import add_resource_type_overview
-
-
-class WaiterResourceDocumenter(BaseDocumenter):
- def __init__(self, resource, service_waiter_model):
- super(WaiterResourceDocumenter, self).__init__(resource)
- self._service_waiter_model = service_waiter_model
-
- def document_resource_waiters(self, section):
- waiters = self._resource.meta.resource_model.waiters
- add_resource_type_overview(
- section=section,
- resource_type='Waiters',
- description=(
- 'Waiters provide an interface to wait for a resource'
- ' to reach a specific state.'),
- intro_link='waiters_intro')
- waiter_list = []
- self.member_map['waiters'] = waiter_list
- for waiter in waiters:
- waiter_section = section.add_new_section(waiter.name)
- waiter_list.append(waiter.name)
- document_resource_waiter(
- section=waiter_section,
- resource_name=self._resource_name,
- event_emitter=self._resource.meta.client.meta.events,
- service_model=self._service_model,
- resource_waiter_model=waiter,
- service_waiter_model=self._service_waiter_model
- )
-
-
-def document_resource_waiter(section, resource_name, event_emitter,
- service_model, resource_waiter_model,
- service_waiter_model, include_signature=True):
- waiter_model = service_waiter_model.get_waiter(
- resource_waiter_model.waiter_name)
- operation_model = service_model.operation_model(
- waiter_model.operation)
-
- ignore_params = get_resource_ignore_params(resource_waiter_model.params)
- service_module_name = get_service_module_name(service_model)
- description = (
- 'Waits until this %s is %s. This method calls '
- ':py:meth:`%s.Waiter.%s.wait` which polls. '
- ':py:meth:`%s.Client.%s` every %s seconds until '
- 'a successful state is reached. An error is returned '
- 'after %s failed checks.' % (
- resource_name, ' '.join(resource_waiter_model.name.split('_')[2:]),
- service_module_name,
- xform_name(resource_waiter_model.waiter_name),
- service_module_name,
- xform_name(waiter_model.operation),
- waiter_model.delay, waiter_model.max_attempts))
- example_prefix = '%s.%s' % (
- xform_name(resource_name), resource_waiter_model.name)
- document_model_driven_method(
- section=section, method_name=resource_waiter_model.name,
- operation_model=operation_model,
- event_emitter=event_emitter,
- example_prefix=example_prefix,
- method_description=description,
- exclude_input=ignore_params,
- include_signature=include_signature
- )
- if 'return' in section.available_sections:
- # Waiters do not return anything so we should remove
- # any sections that may document the underlying return
- # value of the client method.
- return_section = section.get_section('return')
- return_section.clear_text()
- return_section.remove_all_sections()
- return_section.write(':returns: None')
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+from botocore import xform_name
+from botocore.utils import get_service_module_name
+from botocore.docs.method import document_model_driven_method
+
+from boto3.docs.base import BaseDocumenter
+from boto3.docs.utils import get_resource_ignore_params
+from boto3.docs.utils import add_resource_type_overview
+
+
+class WaiterResourceDocumenter(BaseDocumenter):
+ def __init__(self, resource, service_waiter_model):
+ super(WaiterResourceDocumenter, self).__init__(resource)
+ self._service_waiter_model = service_waiter_model
+
+ def document_resource_waiters(self, section):
+ waiters = self._resource.meta.resource_model.waiters
+ add_resource_type_overview(
+ section=section,
+ resource_type='Waiters',
+ description=(
+ 'Waiters provide an interface to wait for a resource'
+ ' to reach a specific state.'),
+ intro_link='waiters_intro')
+ waiter_list = []
+ self.member_map['waiters'] = waiter_list
+ for waiter in waiters:
+ waiter_section = section.add_new_section(waiter.name)
+ waiter_list.append(waiter.name)
+ document_resource_waiter(
+ section=waiter_section,
+ resource_name=self._resource_name,
+ event_emitter=self._resource.meta.client.meta.events,
+ service_model=self._service_model,
+ resource_waiter_model=waiter,
+ service_waiter_model=self._service_waiter_model
+ )
+
+
+def document_resource_waiter(section, resource_name, event_emitter,
+ service_model, resource_waiter_model,
+ service_waiter_model, include_signature=True):
+ waiter_model = service_waiter_model.get_waiter(
+ resource_waiter_model.waiter_name)
+ operation_model = service_model.operation_model(
+ waiter_model.operation)
+
+ ignore_params = get_resource_ignore_params(resource_waiter_model.params)
+ service_module_name = get_service_module_name(service_model)
+ description = (
+ 'Waits until this %s is %s. This method calls '
+ ':py:meth:`%s.Waiter.%s.wait` which polls. '
+ ':py:meth:`%s.Client.%s` every %s seconds until '
+ 'a successful state is reached. An error is returned '
+ 'after %s failed checks.' % (
+ resource_name, ' '.join(resource_waiter_model.name.split('_')[2:]),
+ service_module_name,
+ xform_name(resource_waiter_model.waiter_name),
+ service_module_name,
+ xform_name(waiter_model.operation),
+ waiter_model.delay, waiter_model.max_attempts))
+ example_prefix = '%s.%s' % (
+ xform_name(resource_name), resource_waiter_model.name)
+ document_model_driven_method(
+ section=section, method_name=resource_waiter_model.name,
+ operation_model=operation_model,
+ event_emitter=event_emitter,
+ example_prefix=example_prefix,
+ method_description=description,
+ exclude_input=ignore_params,
+ include_signature=include_signature
+ )
+ if 'return' in section.available_sections:
+ # Waiters do not return anything so we should remove
+ # any sections that may document the underlying return
+ # value of the client method.
+ return_section = section.get_section('return')
+ return_section.clear_text()
+ return_section.remove_all_sections()
+ return_section.write(':returns: None')
diff --git a/contrib/python/boto3/boto3/dynamodb/__init__.py b/contrib/python/boto3/boto3/dynamodb/__init__.py
index 58f30dc875..c89416d7a5 100644
--- a/contrib/python/boto3/boto3/dynamodb/__init__.py
+++ b/contrib/python/boto3/boto3/dynamodb/__init__.py
@@ -1,12 +1,12 @@
-# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
diff --git a/contrib/python/boto3/boto3/dynamodb/conditions.py b/contrib/python/boto3/boto3/dynamodb/conditions.py
index d027ce4d63..98a13c35d2 100644
--- a/contrib/python/boto3/boto3/dynamodb/conditions.py
+++ b/contrib/python/boto3/boto3/dynamodb/conditions.py
@@ -1,428 +1,428 @@
-# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-from collections import namedtuple
-import re
-
-from boto3.exceptions import DynamoDBOperationNotSupportedError
-from boto3.exceptions import DynamoDBNeedsConditionError
-from boto3.exceptions import DynamoDBNeedsKeyConditionError
-
-
-ATTR_NAME_REGEX = re.compile(r'[^.\[\]]+(?![^\[]*\])')
-
-
-class ConditionBase(object):
-
- expression_format = ''
- expression_operator = ''
- has_grouped_values = False
-
- def __init__(self, *values):
- self._values = values
-
- def __and__(self, other):
- if not isinstance(other, ConditionBase):
- raise DynamoDBOperationNotSupportedError('AND', other)
- return And(self, other)
-
- def __or__(self, other):
- if not isinstance(other, ConditionBase):
- raise DynamoDBOperationNotSupportedError('OR', other)
- return Or(self, other)
-
- def __invert__(self):
- return Not(self)
-
- def get_expression(self):
- return {'format': self.expression_format,
- 'operator': self.expression_operator,
- 'values': self._values}
-
- def __eq__(self, other):
- if isinstance(other, type(self)):
- if self._values == other._values:
- return True
- return False
-
- def __ne__(self, other):
- return not self.__eq__(other)
-
-
-class AttributeBase(object):
- def __init__(self, name):
- self.name = name
-
- def __and__(self, value):
- raise DynamoDBOperationNotSupportedError('AND', self)
-
- def __or__(self, value):
- raise DynamoDBOperationNotSupportedError('OR', self)
-
- def __invert__(self):
- raise DynamoDBOperationNotSupportedError('NOT', self)
-
- def eq(self, value):
- """Creates a condition where the attribute is equal to the value.
-
- :param value: The value that the attribute is equal to.
- """
- return Equals(self, value)
-
- def lt(self, value):
- """Creates a condition where the attribute is less than the value.
-
- :param value: The value that the attribute is less than.
- """
- return LessThan(self, value)
-
- def lte(self, value):
- """Creates a condition where the attribute is less than or equal to the
- value.
-
- :param value: The value that the attribute is less than or equal to.
- """
- return LessThanEquals(self, value)
-
- def gt(self, value):
- """Creates a condition where the attribute is greater than the value.
-
- :param value: The value that the attribute is greater than.
- """
- return GreaterThan(self, value)
-
- def gte(self, value):
- """Creates a condition where the attribute is greater than or equal to
- the value.
-
- :param value: The value that the attribute is greater than or equal to.
- """
- return GreaterThanEquals(self, value)
-
- def begins_with(self, value):
- """Creates a condition where the attribute begins with the value.
-
- :param value: The value that the attribute begins with.
- """
- return BeginsWith(self, value)
-
- def between(self, low_value, high_value):
- """Creates a condition where the attribute is greater than or equal
- to the low value and less than or equal to the high value.
-
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+from collections import namedtuple
+import re
+
+from boto3.exceptions import DynamoDBOperationNotSupportedError
+from boto3.exceptions import DynamoDBNeedsConditionError
+from boto3.exceptions import DynamoDBNeedsKeyConditionError
+
+
+ATTR_NAME_REGEX = re.compile(r'[^.\[\]]+(?![^\[]*\])')
+
+
+class ConditionBase(object):
+
+ expression_format = ''
+ expression_operator = ''
+ has_grouped_values = False
+
+ def __init__(self, *values):
+ self._values = values
+
+ def __and__(self, other):
+ if not isinstance(other, ConditionBase):
+ raise DynamoDBOperationNotSupportedError('AND', other)
+ return And(self, other)
+
+ def __or__(self, other):
+ if not isinstance(other, ConditionBase):
+ raise DynamoDBOperationNotSupportedError('OR', other)
+ return Or(self, other)
+
+ def __invert__(self):
+ return Not(self)
+
+ def get_expression(self):
+ return {'format': self.expression_format,
+ 'operator': self.expression_operator,
+ 'values': self._values}
+
+ def __eq__(self, other):
+ if isinstance(other, type(self)):
+ if self._values == other._values:
+ return True
+ return False
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+
+class AttributeBase(object):
+ def __init__(self, name):
+ self.name = name
+
+ def __and__(self, value):
+ raise DynamoDBOperationNotSupportedError('AND', self)
+
+ def __or__(self, value):
+ raise DynamoDBOperationNotSupportedError('OR', self)
+
+ def __invert__(self):
+ raise DynamoDBOperationNotSupportedError('NOT', self)
+
+ def eq(self, value):
+ """Creates a condition where the attribute is equal to the value.
+
+ :param value: The value that the attribute is equal to.
+ """
+ return Equals(self, value)
+
+ def lt(self, value):
+ """Creates a condition where the attribute is less than the value.
+
+ :param value: The value that the attribute is less than.
+ """
+ return LessThan(self, value)
+
+ def lte(self, value):
+ """Creates a condition where the attribute is less than or equal to the
+ value.
+
+ :param value: The value that the attribute is less than or equal to.
+ """
+ return LessThanEquals(self, value)
+
+ def gt(self, value):
+ """Creates a condition where the attribute is greater than the value.
+
+ :param value: The value that the attribute is greater than.
+ """
+ return GreaterThan(self, value)
+
+ def gte(self, value):
+ """Creates a condition where the attribute is greater than or equal to
+ the value.
+
+ :param value: The value that the attribute is greater than or equal to.
+ """
+ return GreaterThanEquals(self, value)
+
+ def begins_with(self, value):
+ """Creates a condition where the attribute begins with the value.
+
+ :param value: The value that the attribute begins with.
+ """
+ return BeginsWith(self, value)
+
+ def between(self, low_value, high_value):
+ """Creates a condition where the attribute is greater than or equal
+ to the low value and less than or equal to the high value.
+
:param low_value: The value that the attribute is greater than or equal to.
:param high_value: The value that the attribute is less than or equal to.
- """
- return Between(self, low_value, high_value)
-
+ """
+ return Between(self, low_value, high_value)
+
def __eq__(self, other):
return isinstance(other, type(self)) and self.name == other.name
-
+
def __ne__(self, other):
return not self.__eq__(other)
-class ConditionAttributeBase(ConditionBase, AttributeBase):
- """This base class is for conditions that can have attribute methods.
-
- One example is the Size condition. To complete a condition, you need
- to apply another AttributeBase method like eq().
- """
- def __init__(self, *values):
- ConditionBase.__init__(self, *values)
- # This is assuming the first value to the condition is the attribute
- # in which can be used to generate its attribute base.
- AttributeBase.__init__(self, values[0].name)
-
+class ConditionAttributeBase(ConditionBase, AttributeBase):
+ """This base class is for conditions that can have attribute methods.
+
+ One example is the Size condition. To complete a condition, you need
+ to apply another AttributeBase method like eq().
+ """
+ def __init__(self, *values):
+ ConditionBase.__init__(self, *values)
+ # This is assuming the first value to the condition is the attribute
+ # in which can be used to generate its attribute base.
+ AttributeBase.__init__(self, values[0].name)
+
def __eq__(self, other):
return ConditionBase.__eq__(self, other) and \
AttributeBase.__eq__(self, other)
-
+
def __ne__(self, other):
return not self.__eq__(other)
-class ComparisonCondition(ConditionBase):
- expression_format = '{0} {operator} {1}'
-
-
-class Equals(ComparisonCondition):
- expression_operator = '='
-
-
-class NotEquals(ComparisonCondition):
- expression_operator = '<>'
-
-
-class LessThan(ComparisonCondition):
- expression_operator = '<'
-
-
-class LessThanEquals(ComparisonCondition):
- expression_operator = '<='
-
-
-class GreaterThan(ComparisonCondition):
- expression_operator = '>'
-
-
-class GreaterThanEquals(ComparisonCondition):
- expression_operator = '>='
-
-
-class In(ComparisonCondition):
- expression_operator = 'IN'
- has_grouped_values = True
-
-
-class Between(ConditionBase):
- expression_operator = 'BETWEEN'
- expression_format = '{0} {operator} {1} AND {2}'
-
-
-class BeginsWith(ConditionBase):
- expression_operator = 'begins_with'
- expression_format = '{operator}({0}, {1})'
-
-
-class Contains(ConditionBase):
- expression_operator = 'contains'
- expression_format = '{operator}({0}, {1})'
-
-
-class Size(ConditionAttributeBase):
- expression_operator = 'size'
- expression_format = '{operator}({0})'
-
-
-class AttributeType(ConditionBase):
- expression_operator = 'attribute_type'
- expression_format = '{operator}({0}, {1})'
-
-
-class AttributeExists(ConditionBase):
- expression_operator = 'attribute_exists'
- expression_format = '{operator}({0})'
-
-
-class AttributeNotExists(ConditionBase):
- expression_operator = 'attribute_not_exists'
- expression_format = '{operator}({0})'
-
-
-class And(ConditionBase):
- expression_operator = 'AND'
- expression_format = '({0} {operator} {1})'
-
-
-class Or(ConditionBase):
- expression_operator = 'OR'
- expression_format = '({0} {operator} {1})'
-
-
-class Not(ConditionBase):
- expression_operator = 'NOT'
- expression_format = '({operator} {0})'
-
-
-class Key(AttributeBase):
- pass
-
-
-class Attr(AttributeBase):
- """Represents an DynamoDB item's attribute."""
- def ne(self, value):
- """Creates a condition where the attribute is not equal to the value
-
- :param value: The value that the attribute is not equal to.
- """
- return NotEquals(self, value)
-
- def is_in(self, value):
- """Creates a condition where the attribute is in the value,
-
- :type value: list
- :param value: The value that the attribute is in.
- """
- return In(self, value)
-
- def exists(self):
- """Creates a condition where the attribute exists."""
- return AttributeExists(self)
-
- def not_exists(self):
- """Creates a condition where the attribute does not exist."""
- return AttributeNotExists(self)
-
- def contains(self, value):
- """Creates a condition where the attribute contains the value.
-
- :param value: The value the attribute contains.
- """
- return Contains(self, value)
-
- def size(self):
- """Creates a condition for the attribute size.
-
- Note another AttributeBase method must be called on the returned
- size condition to be a valid DynamoDB condition.
- """
- return Size(self)
-
- def attribute_type(self, value):
- """Creates a condition for the attribute type.
-
- :param value: The type of the attribute.
- """
- return AttributeType(self, value)
-
-
-BuiltConditionExpression = namedtuple(
- 'BuiltConditionExpression',
- ['condition_expression', 'attribute_name_placeholders',
- 'attribute_value_placeholders']
-)
-
-
-class ConditionExpressionBuilder(object):
- """This class is used to build condition expressions with placeholders"""
- def __init__(self):
- self._name_count = 0
- self._value_count = 0
- self._name_placeholder = 'n'
- self._value_placeholder = 'v'
-
- def _get_name_placeholder(self):
- return '#' + self._name_placeholder + str(self._name_count)
-
- def _get_value_placeholder(self):
- return ':' + self._value_placeholder + str(self._value_count)
-
- def reset(self):
- """Resets the placeholder name and values"""
- self._name_count = 0
- self._value_count = 0
-
- def build_expression(self, condition, is_key_condition=False):
- """Builds the condition expression and the dictionary of placeholders.
-
- :type condition: ConditionBase
- :param condition: A condition to be built into a condition expression
- string with any necessary placeholders.
-
- :type is_key_condition: Boolean
- :param is_key_condition: True if the expression is for a
- KeyConditionExpression. False otherwise.
-
- :rtype: (string, dict, dict)
- :returns: Will return a string representing the condition with
- placeholders inserted where necessary, a dictionary of
- placeholders for attribute names, and a dictionary of
- placeholders for attribute values. Here is a sample return value:
-
- ('#n0 = :v0', {'#n0': 'myattribute'}, {':v1': 'myvalue'})
- """
- if not isinstance(condition, ConditionBase):
- raise DynamoDBNeedsConditionError(condition)
- attribute_name_placeholders = {}
- attribute_value_placeholders = {}
- condition_expression = self._build_expression(
- condition, attribute_name_placeholders,
- attribute_value_placeholders, is_key_condition=is_key_condition)
- return BuiltConditionExpression(
- condition_expression=condition_expression,
- attribute_name_placeholders=attribute_name_placeholders,
- attribute_value_placeholders=attribute_value_placeholders
- )
-
- def _build_expression(self, condition, attribute_name_placeholders,
- attribute_value_placeholders, is_key_condition):
- expression_dict = condition.get_expression()
- replaced_values = []
- for value in expression_dict['values']:
- # Build the necessary placeholders for that value.
- # Placeholders are built for both attribute names and values.
- replaced_value = self._build_expression_component(
- value, attribute_name_placeholders,
- attribute_value_placeholders, condition.has_grouped_values,
- is_key_condition)
- replaced_values.append(replaced_value)
- # Fill out the expression using the operator and the
- # values that have been replaced with placeholders.
- return expression_dict['format'].format(
- *replaced_values, operator=expression_dict['operator'])
-
- def _build_expression_component(self, value, attribute_name_placeholders,
- attribute_value_placeholders,
- has_grouped_values, is_key_condition):
- # Continue to recurse if the value is a ConditionBase in order
- # to extract out all parts of the expression.
- if isinstance(value, ConditionBase):
- return self._build_expression(
- value, attribute_name_placeholders,
- attribute_value_placeholders, is_key_condition)
- # If it is not a ConditionBase, we can recurse no further.
- # So we check if it is an attribute and add placeholders for
- # its name
- elif isinstance(value, AttributeBase):
- if is_key_condition and not isinstance(value, Key):
- raise DynamoDBNeedsKeyConditionError(
- 'Attribute object %s is of type %s. '
- 'KeyConditionExpression only supports Attribute objects '
- 'of type Key' % (value.name, type(value)))
- return self._build_name_placeholder(
- value, attribute_name_placeholders)
- # If it is anything else, we treat it as a value and thus placeholders
- # are needed for the value.
- else:
- return self._build_value_placeholder(
- value, attribute_value_placeholders, has_grouped_values)
-
- def _build_name_placeholder(self, value, attribute_name_placeholders):
- attribute_name = value.name
- # Figure out which parts of the attribute name that needs replacement.
- attribute_name_parts = ATTR_NAME_REGEX.findall(attribute_name)
-
- # Add a temporary placeholder for each of these parts.
- placeholder_format = ATTR_NAME_REGEX.sub('%s', attribute_name)
- str_format_args = []
- for part in attribute_name_parts:
- name_placeholder = self._get_name_placeholder()
- self._name_count += 1
- str_format_args.append(name_placeholder)
- # Add the placeholder and value to dictionary of name placeholders.
- attribute_name_placeholders[name_placeholder] = part
- # Replace the temporary placeholders with the designated placeholders.
- return placeholder_format % tuple(str_format_args)
-
- def _build_value_placeholder(self, value, attribute_value_placeholders,
- has_grouped_values=False):
- # If the values are grouped, we need to add a placeholder for
- # each element inside of the actual value.
- if has_grouped_values:
- placeholder_list = []
- for v in value:
- value_placeholder = self._get_value_placeholder()
- self._value_count += 1
- placeholder_list.append(value_placeholder)
- attribute_value_placeholders[value_placeholder] = v
- # Assuming the values are grouped by parenthesis.
- # IN is the currently the only one that uses this so it maybe
- # needed to be changed in future.
- return '(' + ', '.join(placeholder_list) + ')'
- # Otherwise, treat the value as a single value that needs only
- # one placeholder.
- else:
- value_placeholder = self._get_value_placeholder()
- self._value_count += 1
- attribute_value_placeholders[value_placeholder] = value
- return value_placeholder
+class ComparisonCondition(ConditionBase):
+ expression_format = '{0} {operator} {1}'
+
+
+class Equals(ComparisonCondition):
+ expression_operator = '='
+
+
+class NotEquals(ComparisonCondition):
+ expression_operator = '<>'
+
+
+class LessThan(ComparisonCondition):
+ expression_operator = '<'
+
+
+class LessThanEquals(ComparisonCondition):
+ expression_operator = '<='
+
+
+class GreaterThan(ComparisonCondition):
+ expression_operator = '>'
+
+
+class GreaterThanEquals(ComparisonCondition):
+ expression_operator = '>='
+
+
+class In(ComparisonCondition):
+ expression_operator = 'IN'
+ has_grouped_values = True
+
+
+class Between(ConditionBase):
+ expression_operator = 'BETWEEN'
+ expression_format = '{0} {operator} {1} AND {2}'
+
+
+class BeginsWith(ConditionBase):
+ expression_operator = 'begins_with'
+ expression_format = '{operator}({0}, {1})'
+
+
+class Contains(ConditionBase):
+ expression_operator = 'contains'
+ expression_format = '{operator}({0}, {1})'
+
+
+class Size(ConditionAttributeBase):
+ expression_operator = 'size'
+ expression_format = '{operator}({0})'
+
+
+class AttributeType(ConditionBase):
+ expression_operator = 'attribute_type'
+ expression_format = '{operator}({0}, {1})'
+
+
+class AttributeExists(ConditionBase):
+ expression_operator = 'attribute_exists'
+ expression_format = '{operator}({0})'
+
+
+class AttributeNotExists(ConditionBase):
+ expression_operator = 'attribute_not_exists'
+ expression_format = '{operator}({0})'
+
+
+class And(ConditionBase):
+ expression_operator = 'AND'
+ expression_format = '({0} {operator} {1})'
+
+
+class Or(ConditionBase):
+ expression_operator = 'OR'
+ expression_format = '({0} {operator} {1})'
+
+
+class Not(ConditionBase):
+ expression_operator = 'NOT'
+ expression_format = '({operator} {0})'
+
+
+class Key(AttributeBase):
+ pass
+
+
+class Attr(AttributeBase):
+ """Represents an DynamoDB item's attribute."""
+ def ne(self, value):
+ """Creates a condition where the attribute is not equal to the value
+
+ :param value: The value that the attribute is not equal to.
+ """
+ return NotEquals(self, value)
+
+ def is_in(self, value):
+ """Creates a condition where the attribute is in the value,
+
+ :type value: list
+ :param value: The value that the attribute is in.
+ """
+ return In(self, value)
+
+ def exists(self):
+ """Creates a condition where the attribute exists."""
+ return AttributeExists(self)
+
+ def not_exists(self):
+ """Creates a condition where the attribute does not exist."""
+ return AttributeNotExists(self)
+
+ def contains(self, value):
+ """Creates a condition where the attribute contains the value.
+
+ :param value: The value the attribute contains.
+ """
+ return Contains(self, value)
+
+ def size(self):
+ """Creates a condition for the attribute size.
+
+ Note another AttributeBase method must be called on the returned
+ size condition to be a valid DynamoDB condition.
+ """
+ return Size(self)
+
+ def attribute_type(self, value):
+ """Creates a condition for the attribute type.
+
+ :param value: The type of the attribute.
+ """
+ return AttributeType(self, value)
+
+
+BuiltConditionExpression = namedtuple(
+ 'BuiltConditionExpression',
+ ['condition_expression', 'attribute_name_placeholders',
+ 'attribute_value_placeholders']
+)
+
+
+class ConditionExpressionBuilder(object):
+ """This class is used to build condition expressions with placeholders"""
+ def __init__(self):
+ self._name_count = 0
+ self._value_count = 0
+ self._name_placeholder = 'n'
+ self._value_placeholder = 'v'
+
+ def _get_name_placeholder(self):
+ return '#' + self._name_placeholder + str(self._name_count)
+
+ def _get_value_placeholder(self):
+ return ':' + self._value_placeholder + str(self._value_count)
+
+ def reset(self):
+ """Resets the placeholder name and values"""
+ self._name_count = 0
+ self._value_count = 0
+
+ def build_expression(self, condition, is_key_condition=False):
+ """Builds the condition expression and the dictionary of placeholders.
+
+ :type condition: ConditionBase
+ :param condition: A condition to be built into a condition expression
+ string with any necessary placeholders.
+
+ :type is_key_condition: Boolean
+ :param is_key_condition: True if the expression is for a
+ KeyConditionExpression. False otherwise.
+
+ :rtype: (string, dict, dict)
+ :returns: Will return a string representing the condition with
+ placeholders inserted where necessary, a dictionary of
+ placeholders for attribute names, and a dictionary of
+ placeholders for attribute values. Here is a sample return value:
+
+ ('#n0 = :v0', {'#n0': 'myattribute'}, {':v1': 'myvalue'})
+ """
+ if not isinstance(condition, ConditionBase):
+ raise DynamoDBNeedsConditionError(condition)
+ attribute_name_placeholders = {}
+ attribute_value_placeholders = {}
+ condition_expression = self._build_expression(
+ condition, attribute_name_placeholders,
+ attribute_value_placeholders, is_key_condition=is_key_condition)
+ return BuiltConditionExpression(
+ condition_expression=condition_expression,
+ attribute_name_placeholders=attribute_name_placeholders,
+ attribute_value_placeholders=attribute_value_placeholders
+ )
+
+ def _build_expression(self, condition, attribute_name_placeholders,
+ attribute_value_placeholders, is_key_condition):
+ expression_dict = condition.get_expression()
+ replaced_values = []
+ for value in expression_dict['values']:
+ # Build the necessary placeholders for that value.
+ # Placeholders are built for both attribute names and values.
+ replaced_value = self._build_expression_component(
+ value, attribute_name_placeholders,
+ attribute_value_placeholders, condition.has_grouped_values,
+ is_key_condition)
+ replaced_values.append(replaced_value)
+ # Fill out the expression using the operator and the
+ # values that have been replaced with placeholders.
+ return expression_dict['format'].format(
+ *replaced_values, operator=expression_dict['operator'])
+
+ def _build_expression_component(self, value, attribute_name_placeholders,
+ attribute_value_placeholders,
+ has_grouped_values, is_key_condition):
+ # Continue to recurse if the value is a ConditionBase in order
+ # to extract out all parts of the expression.
+ if isinstance(value, ConditionBase):
+ return self._build_expression(
+ value, attribute_name_placeholders,
+ attribute_value_placeholders, is_key_condition)
+ # If it is not a ConditionBase, we can recurse no further.
+ # So we check if it is an attribute and add placeholders for
+ # its name
+ elif isinstance(value, AttributeBase):
+ if is_key_condition and not isinstance(value, Key):
+ raise DynamoDBNeedsKeyConditionError(
+ 'Attribute object %s is of type %s. '
+ 'KeyConditionExpression only supports Attribute objects '
+ 'of type Key' % (value.name, type(value)))
+ return self._build_name_placeholder(
+ value, attribute_name_placeholders)
+ # If it is anything else, we treat it as a value and thus placeholders
+ # are needed for the value.
+ else:
+ return self._build_value_placeholder(
+ value, attribute_value_placeholders, has_grouped_values)
+
+ def _build_name_placeholder(self, value, attribute_name_placeholders):
+ attribute_name = value.name
+ # Figure out which parts of the attribute name that needs replacement.
+ attribute_name_parts = ATTR_NAME_REGEX.findall(attribute_name)
+
+ # Add a temporary placeholder for each of these parts.
+ placeholder_format = ATTR_NAME_REGEX.sub('%s', attribute_name)
+ str_format_args = []
+ for part in attribute_name_parts:
+ name_placeholder = self._get_name_placeholder()
+ self._name_count += 1
+ str_format_args.append(name_placeholder)
+ # Add the placeholder and value to dictionary of name placeholders.
+ attribute_name_placeholders[name_placeholder] = part
+ # Replace the temporary placeholders with the designated placeholders.
+ return placeholder_format % tuple(str_format_args)
+
+ def _build_value_placeholder(self, value, attribute_value_placeholders,
+ has_grouped_values=False):
+ # If the values are grouped, we need to add a placeholder for
+ # each element inside of the actual value.
+ if has_grouped_values:
+ placeholder_list = []
+ for v in value:
+ value_placeholder = self._get_value_placeholder()
+ self._value_count += 1
+ placeholder_list.append(value_placeholder)
+ attribute_value_placeholders[value_placeholder] = v
+ # Assuming the values are grouped by parenthesis.
+ # IN is the currently the only one that uses this so it maybe
+ # needed to be changed in future.
+ return '(' + ', '.join(placeholder_list) + ')'
+ # Otherwise, treat the value as a single value that needs only
+ # one placeholder.
+ else:
+ value_placeholder = self._get_value_placeholder()
+ self._value_count += 1
+ attribute_value_placeholders[value_placeholder] = value
+ return value_placeholder
diff --git a/contrib/python/boto3/boto3/dynamodb/table.py b/contrib/python/boto3/boto3/dynamodb/table.py
index 6b2b3fbaaf..e9fcf10c9d 100644
--- a/contrib/python/boto3/boto3/dynamodb/table.py
+++ b/contrib/python/boto3/boto3/dynamodb/table.py
@@ -1,156 +1,156 @@
-# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-import logging
-
-
-logger = logging.getLogger(__name__)
-
-
-def register_table_methods(base_classes, **kwargs):
- base_classes.insert(0, TableResource)
-
-
-# This class can be used to add any additional methods we want
-# onto a table resource. Ideally to avoid creating a new
-# base class for every method we can just update this
-# class instead. Just be sure to move the bulk of the
-# actual method implementation to another class.
-class TableResource(object):
- def __init__(self, *args, **kwargs):
- super(TableResource, self).__init__(*args, **kwargs)
-
- def batch_writer(self, overwrite_by_pkeys=None):
- """Create a batch writer object.
-
- This method creates a context manager for writing
- objects to Amazon DynamoDB in batch.
-
- The batch writer will automatically handle buffering and sending items
- in batches. In addition, the batch writer will also automatically
- handle any unprocessed items and resend them as needed. All you need
- to do is call ``put_item`` for any items you want to add, and
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+import logging
+
+
+logger = logging.getLogger(__name__)
+
+
+def register_table_methods(base_classes, **kwargs):
+ base_classes.insert(0, TableResource)
+
+
+# This class can be used to add any additional methods we want
+# onto a table resource. Ideally to avoid creating a new
+# base class for every method we can just update this
+# class instead. Just be sure to move the bulk of the
+# actual method implementation to another class.
+class TableResource(object):
+ def __init__(self, *args, **kwargs):
+ super(TableResource, self).__init__(*args, **kwargs)
+
+ def batch_writer(self, overwrite_by_pkeys=None):
+ """Create a batch writer object.
+
+ This method creates a context manager for writing
+ objects to Amazon DynamoDB in batch.
+
+ The batch writer will automatically handle buffering and sending items
+ in batches. In addition, the batch writer will also automatically
+ handle any unprocessed items and resend them as needed. All you need
+ to do is call ``put_item`` for any items you want to add, and
``delete_item`` for any items you want to delete.
-
- Example usage::
-
- with table.batch_writer() as batch:
+
+ Example usage::
+
+ with table.batch_writer() as batch:
for _ in range(1000000):
- batch.put_item(Item={'HashKey': '...',
- 'Otherstuff': '...'})
- # You can also delete_items in a batch.
- batch.delete_item(Key={'HashKey': 'SomeHashKey'})
-
- :type overwrite_by_pkeys: list(string)
- :param overwrite_by_pkeys: De-duplicate request items in buffer
- if match new request item on specified primary keys. i.e
- ``["partition_key1", "sort_key2", "sort_key3"]``
-
- """
- return BatchWriter(self.name, self.meta.client,
- overwrite_by_pkeys=overwrite_by_pkeys)
-
-
-class BatchWriter(object):
- """Automatically handle batch writes to DynamoDB for a single table."""
- def __init__(self, table_name, client, flush_amount=25,
- overwrite_by_pkeys=None):
- """
-
- :type table_name: str
- :param table_name: The name of the table. The class handles
- batch writes to a single table.
-
- :type client: ``botocore.client.Client``
- :param client: A botocore client. Note this client
- **must** have the dynamodb customizations applied
- to it for transforming AttributeValues into the
- wire protocol. What this means in practice is that
- you need to use a client that comes from a DynamoDB
- resource if you're going to instantiate this class
- directly, i.e
- ``boto3.resource('dynamodb').Table('foo').meta.client``.
-
- :type flush_amount: int
- :param flush_amount: The number of items to keep in
- a local buffer before sending a batch_write_item
- request to DynamoDB.
-
- :type overwrite_by_pkeys: list(string)
- :param overwrite_by_pkeys: De-duplicate request items in buffer
- if match new request item on specified primary keys. i.e
- ``["partition_key1", "sort_key2", "sort_key3"]``
-
- """
- self._table_name = table_name
- self._client = client
- self._items_buffer = []
- self._flush_amount = flush_amount
- self._overwrite_by_pkeys = overwrite_by_pkeys
-
- def put_item(self, Item):
- self._add_request_and_process({'PutRequest': {'Item': Item}})
-
- def delete_item(self, Key):
- self._add_request_and_process({'DeleteRequest': {'Key': Key}})
-
- def _add_request_and_process(self, request):
- if self._overwrite_by_pkeys:
- self._remove_dup_pkeys_request_if_any(request)
- self._items_buffer.append(request)
- self._flush_if_needed()
-
- def _remove_dup_pkeys_request_if_any(self, request):
- pkey_values_new = self._extract_pkey_values(request)
- for item in self._items_buffer:
- if self._extract_pkey_values(item) == pkey_values_new:
- self._items_buffer.remove(item)
- logger.debug("With overwrite_by_pkeys enabled, skipping "
- "request:%s", item)
-
- def _extract_pkey_values(self, request):
- if request.get('PutRequest'):
- return [request['PutRequest']['Item'][key]
- for key in self._overwrite_by_pkeys]
- elif request.get('DeleteRequest'):
- return [request['DeleteRequest']['Key'][key]
- for key in self._overwrite_by_pkeys]
- return None
-
- def _flush_if_needed(self):
- if len(self._items_buffer) >= self._flush_amount:
- self._flush()
-
- def _flush(self):
- items_to_send = self._items_buffer[:self._flush_amount]
- self._items_buffer = self._items_buffer[self._flush_amount:]
- response = self._client.batch_write_item(
- RequestItems={self._table_name: items_to_send})
- unprocessed_items = response['UnprocessedItems']
-
- if unprocessed_items and unprocessed_items[self._table_name]:
- # Any unprocessed_items are immediately added to the
- # next batch we send.
- self._items_buffer.extend(unprocessed_items[self._table_name])
- else:
- self._items_buffer = []
- logger.debug("Batch write sent %s, unprocessed: %s",
- len(items_to_send), len(self._items_buffer))
-
- def __enter__(self):
- return self
-
- def __exit__(self, exc_type, exc_value, tb):
- # When we exit, we need to keep flushing whatever's left
- # until there's nothing left in our items buffer.
- while self._items_buffer:
- self._flush()
+ batch.put_item(Item={'HashKey': '...',
+ 'Otherstuff': '...'})
+ # You can also delete_items in a batch.
+ batch.delete_item(Key={'HashKey': 'SomeHashKey'})
+
+ :type overwrite_by_pkeys: list(string)
+ :param overwrite_by_pkeys: De-duplicate request items in buffer
+ if match new request item on specified primary keys. i.e
+ ``["partition_key1", "sort_key2", "sort_key3"]``
+
+ """
+ return BatchWriter(self.name, self.meta.client,
+ overwrite_by_pkeys=overwrite_by_pkeys)
+
+
+class BatchWriter(object):
+ """Automatically handle batch writes to DynamoDB for a single table."""
+ def __init__(self, table_name, client, flush_amount=25,
+ overwrite_by_pkeys=None):
+ """
+
+ :type table_name: str
+ :param table_name: The name of the table. The class handles
+ batch writes to a single table.
+
+ :type client: ``botocore.client.Client``
+ :param client: A botocore client. Note this client
+ **must** have the dynamodb customizations applied
+ to it for transforming AttributeValues into the
+ wire protocol. What this means in practice is that
+ you need to use a client that comes from a DynamoDB
+ resource if you're going to instantiate this class
+ directly, i.e
+ ``boto3.resource('dynamodb').Table('foo').meta.client``.
+
+ :type flush_amount: int
+ :param flush_amount: The number of items to keep in
+ a local buffer before sending a batch_write_item
+ request to DynamoDB.
+
+ :type overwrite_by_pkeys: list(string)
+ :param overwrite_by_pkeys: De-duplicate request items in buffer
+ if match new request item on specified primary keys. i.e
+ ``["partition_key1", "sort_key2", "sort_key3"]``
+
+ """
+ self._table_name = table_name
+ self._client = client
+ self._items_buffer = []
+ self._flush_amount = flush_amount
+ self._overwrite_by_pkeys = overwrite_by_pkeys
+
+ def put_item(self, Item):
+ self._add_request_and_process({'PutRequest': {'Item': Item}})
+
+ def delete_item(self, Key):
+ self._add_request_and_process({'DeleteRequest': {'Key': Key}})
+
+ def _add_request_and_process(self, request):
+ if self._overwrite_by_pkeys:
+ self._remove_dup_pkeys_request_if_any(request)
+ self._items_buffer.append(request)
+ self._flush_if_needed()
+
+ def _remove_dup_pkeys_request_if_any(self, request):
+ pkey_values_new = self._extract_pkey_values(request)
+ for item in self._items_buffer:
+ if self._extract_pkey_values(item) == pkey_values_new:
+ self._items_buffer.remove(item)
+ logger.debug("With overwrite_by_pkeys enabled, skipping "
+ "request:%s", item)
+
+ def _extract_pkey_values(self, request):
+ if request.get('PutRequest'):
+ return [request['PutRequest']['Item'][key]
+ for key in self._overwrite_by_pkeys]
+ elif request.get('DeleteRequest'):
+ return [request['DeleteRequest']['Key'][key]
+ for key in self._overwrite_by_pkeys]
+ return None
+
+ def _flush_if_needed(self):
+ if len(self._items_buffer) >= self._flush_amount:
+ self._flush()
+
+ def _flush(self):
+ items_to_send = self._items_buffer[:self._flush_amount]
+ self._items_buffer = self._items_buffer[self._flush_amount:]
+ response = self._client.batch_write_item(
+ RequestItems={self._table_name: items_to_send})
+ unprocessed_items = response['UnprocessedItems']
+
+ if unprocessed_items and unprocessed_items[self._table_name]:
+ # Any unprocessed_items are immediately added to the
+ # next batch we send.
+ self._items_buffer.extend(unprocessed_items[self._table_name])
+ else:
+ self._items_buffer = []
+ logger.debug("Batch write sent %s, unprocessed: %s",
+ len(items_to_send), len(self._items_buffer))
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, tb):
+ # When we exit, we need to keep flushing whatever's left
+ # until there's nothing left in our items buffer.
+ while self._items_buffer:
+ self._flush()
diff --git a/contrib/python/boto3/boto3/dynamodb/transform.py b/contrib/python/boto3/boto3/dynamodb/transform.py
index 29fd1135e1..e38f75b8fd 100644
--- a/contrib/python/boto3/boto3/dynamodb/transform.py
+++ b/contrib/python/boto3/boto3/dynamodb/transform.py
@@ -1,300 +1,300 @@
-# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-import copy
-
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+import copy
+
from boto3.compat import collections_abc
-from boto3.dynamodb.types import TypeSerializer, TypeDeserializer
-from boto3.dynamodb.conditions import ConditionBase
-from boto3.dynamodb.conditions import ConditionExpressionBuilder
-from boto3.docs.utils import DocumentModifiedShape
-
-
-def register_high_level_interface(base_classes, **kwargs):
- base_classes.insert(0, DynamoDBHighLevelResource)
-
-
-def copy_dynamodb_params(params, **kwargs):
- return copy.deepcopy(params)
-
-
-class DynamoDBHighLevelResource(object):
- def __init__(self, *args, **kwargs):
- super(DynamoDBHighLevelResource, self).__init__(*args, **kwargs)
-
- # Apply handler that creates a copy of the user provided dynamodb
- # item such that it can be modified.
- self.meta.client.meta.events.register(
- 'provide-client-params.dynamodb',
- copy_dynamodb_params,
- unique_id='dynamodb-create-params-copy'
- )
-
- self._injector = TransformationInjector()
- # Apply the handler that generates condition expressions including
- # placeholders.
- self.meta.client.meta.events.register(
- 'before-parameter-build.dynamodb',
- self._injector.inject_condition_expressions,
- unique_id='dynamodb-condition-expression')
-
- # Apply the handler that serializes the request from python
- # types to dynamodb types.
- self.meta.client.meta.events.register(
- 'before-parameter-build.dynamodb',
- self._injector.inject_attribute_value_input,
- unique_id='dynamodb-attr-value-input')
-
- # Apply the handler that deserializes the response from dynamodb
- # types to python types.
- self.meta.client.meta.events.register(
- 'after-call.dynamodb',
- self._injector.inject_attribute_value_output,
- unique_id='dynamodb-attr-value-output')
-
- # Apply the documentation customizations to account for
- # the transformations.
- attr_value_shape_docs = DocumentModifiedShape(
- 'AttributeValue',
- new_type='valid DynamoDB type',
- new_description=(
- '- The value of the attribute. The valid value types are '
- 'listed in the '
- ':ref:`DynamoDB Reference Guide<ref_valid_dynamodb_types>`.'
- ),
- new_example_value=(
- '\'string\'|123|Binary(b\'bytes\')|True|None|set([\'string\'])'
- '|set([123])|set([Binary(b\'bytes\')])|[]|{}')
- )
-
- key_expression_shape_docs = DocumentModifiedShape(
- 'KeyExpression',
- new_type=(
- 'condition from :py:class:`boto3.dynamodb.conditions.Key` '
- 'method'
- ),
- new_description=(
- 'The condition(s) a key(s) must meet. Valid conditions are '
- 'listed in the '
- ':ref:`DynamoDB Reference Guide<ref_dynamodb_conditions>`.'
- ),
- new_example_value='Key(\'mykey\').eq(\'myvalue\')'
- )
-
- con_expression_shape_docs = DocumentModifiedShape(
- 'ConditionExpression',
- new_type=(
- 'condition from :py:class:`boto3.dynamodb.conditions.Attr` '
- 'method'
- ),
- new_description=(
- 'The condition(s) an attribute(s) must meet. Valid conditions '
- 'are listed in the '
- ':ref:`DynamoDB Reference Guide<ref_dynamodb_conditions>`.'
- ),
- new_example_value='Attr(\'myattribute\').eq(\'myvalue\')'
- )
-
- self.meta.client.meta.events.register(
- 'docs.*.dynamodb.*.complete-section',
- attr_value_shape_docs.replace_documentation_for_matching_shape,
- unique_id='dynamodb-attr-value-docs')
-
- self.meta.client.meta.events.register(
- 'docs.*.dynamodb.*.complete-section',
- key_expression_shape_docs.replace_documentation_for_matching_shape,
- unique_id='dynamodb-key-expression-docs')
-
- self.meta.client.meta.events.register(
- 'docs.*.dynamodb.*.complete-section',
- con_expression_shape_docs.replace_documentation_for_matching_shape,
- unique_id='dynamodb-cond-expression-docs')
-
-
-class TransformationInjector(object):
- """Injects the transformations into the user provided parameters."""
- def __init__(self, transformer=None, condition_builder=None,
- serializer=None, deserializer=None):
- self._transformer = transformer
- if transformer is None:
- self._transformer = ParameterTransformer()
-
- self._condition_builder = condition_builder
- if condition_builder is None:
- self._condition_builder = ConditionExpressionBuilder()
-
- self._serializer = serializer
- if serializer is None:
- self._serializer = TypeSerializer()
-
- self._deserializer = deserializer
- if deserializer is None:
- self._deserializer = TypeDeserializer()
-
- def inject_condition_expressions(self, params, model, **kwargs):
- """Injects the condition expression transformation into the parameters
-
- This injection includes transformations for ConditionExpression shapes
- and KeyExpression shapes. It also handles any placeholder names and
- values that are generated when transforming the condition expressions.
- """
- self._condition_builder.reset()
- generated_names = {}
- generated_values = {}
-
- # Create and apply the Condition Expression transformation.
- transformation = ConditionExpressionTransformation(
- self._condition_builder,
- placeholder_names=generated_names,
- placeholder_values=generated_values,
- is_key_condition=False
- )
- self._transformer.transform(
- params, model.input_shape, transformation,
- 'ConditionExpression')
-
- # Create and apply the Key Condition Expression transformation.
- transformation = ConditionExpressionTransformation(
- self._condition_builder,
- placeholder_names=generated_names,
- placeholder_values=generated_values,
- is_key_condition=True
- )
- self._transformer.transform(
- params, model.input_shape, transformation,
- 'KeyExpression')
-
- expr_attr_names_input = 'ExpressionAttributeNames'
- expr_attr_values_input = 'ExpressionAttributeValues'
-
- # Now that all of the condition expression transformation are done,
- # update the placeholder dictionaries in the request.
- if expr_attr_names_input in params:
- params[expr_attr_names_input].update(generated_names)
- else:
- if generated_names:
- params[expr_attr_names_input] = generated_names
-
- if expr_attr_values_input in params:
- params[expr_attr_values_input].update(generated_values)
- else:
- if generated_values:
- params[expr_attr_values_input] = generated_values
-
- def inject_attribute_value_input(self, params, model, **kwargs):
- """Injects DynamoDB serialization into parameter input"""
- self._transformer.transform(
- params, model.input_shape, self._serializer.serialize,
- 'AttributeValue')
-
- def inject_attribute_value_output(self, parsed, model, **kwargs):
- """Injects DynamoDB deserialization into responses"""
+from boto3.dynamodb.types import TypeSerializer, TypeDeserializer
+from boto3.dynamodb.conditions import ConditionBase
+from boto3.dynamodb.conditions import ConditionExpressionBuilder
+from boto3.docs.utils import DocumentModifiedShape
+
+
+def register_high_level_interface(base_classes, **kwargs):
+ base_classes.insert(0, DynamoDBHighLevelResource)
+
+
+def copy_dynamodb_params(params, **kwargs):
+ return copy.deepcopy(params)
+
+
+class DynamoDBHighLevelResource(object):
+ def __init__(self, *args, **kwargs):
+ super(DynamoDBHighLevelResource, self).__init__(*args, **kwargs)
+
+ # Apply handler that creates a copy of the user provided dynamodb
+ # item such that it can be modified.
+ self.meta.client.meta.events.register(
+ 'provide-client-params.dynamodb',
+ copy_dynamodb_params,
+ unique_id='dynamodb-create-params-copy'
+ )
+
+ self._injector = TransformationInjector()
+ # Apply the handler that generates condition expressions including
+ # placeholders.
+ self.meta.client.meta.events.register(
+ 'before-parameter-build.dynamodb',
+ self._injector.inject_condition_expressions,
+ unique_id='dynamodb-condition-expression')
+
+ # Apply the handler that serializes the request from python
+ # types to dynamodb types.
+ self.meta.client.meta.events.register(
+ 'before-parameter-build.dynamodb',
+ self._injector.inject_attribute_value_input,
+ unique_id='dynamodb-attr-value-input')
+
+ # Apply the handler that deserializes the response from dynamodb
+ # types to python types.
+ self.meta.client.meta.events.register(
+ 'after-call.dynamodb',
+ self._injector.inject_attribute_value_output,
+ unique_id='dynamodb-attr-value-output')
+
+ # Apply the documentation customizations to account for
+ # the transformations.
+ attr_value_shape_docs = DocumentModifiedShape(
+ 'AttributeValue',
+ new_type='valid DynamoDB type',
+ new_description=(
+ '- The value of the attribute. The valid value types are '
+ 'listed in the '
+ ':ref:`DynamoDB Reference Guide<ref_valid_dynamodb_types>`.'
+ ),
+ new_example_value=(
+ '\'string\'|123|Binary(b\'bytes\')|True|None|set([\'string\'])'
+ '|set([123])|set([Binary(b\'bytes\')])|[]|{}')
+ )
+
+ key_expression_shape_docs = DocumentModifiedShape(
+ 'KeyExpression',
+ new_type=(
+ 'condition from :py:class:`boto3.dynamodb.conditions.Key` '
+ 'method'
+ ),
+ new_description=(
+ 'The condition(s) a key(s) must meet. Valid conditions are '
+ 'listed in the '
+ ':ref:`DynamoDB Reference Guide<ref_dynamodb_conditions>`.'
+ ),
+ new_example_value='Key(\'mykey\').eq(\'myvalue\')'
+ )
+
+ con_expression_shape_docs = DocumentModifiedShape(
+ 'ConditionExpression',
+ new_type=(
+ 'condition from :py:class:`boto3.dynamodb.conditions.Attr` '
+ 'method'
+ ),
+ new_description=(
+ 'The condition(s) an attribute(s) must meet. Valid conditions '
+ 'are listed in the '
+ ':ref:`DynamoDB Reference Guide<ref_dynamodb_conditions>`.'
+ ),
+ new_example_value='Attr(\'myattribute\').eq(\'myvalue\')'
+ )
+
+ self.meta.client.meta.events.register(
+ 'docs.*.dynamodb.*.complete-section',
+ attr_value_shape_docs.replace_documentation_for_matching_shape,
+ unique_id='dynamodb-attr-value-docs')
+
+ self.meta.client.meta.events.register(
+ 'docs.*.dynamodb.*.complete-section',
+ key_expression_shape_docs.replace_documentation_for_matching_shape,
+ unique_id='dynamodb-key-expression-docs')
+
+ self.meta.client.meta.events.register(
+ 'docs.*.dynamodb.*.complete-section',
+ con_expression_shape_docs.replace_documentation_for_matching_shape,
+ unique_id='dynamodb-cond-expression-docs')
+
+
+class TransformationInjector(object):
+ """Injects the transformations into the user provided parameters."""
+ def __init__(self, transformer=None, condition_builder=None,
+ serializer=None, deserializer=None):
+ self._transformer = transformer
+ if transformer is None:
+ self._transformer = ParameterTransformer()
+
+ self._condition_builder = condition_builder
+ if condition_builder is None:
+ self._condition_builder = ConditionExpressionBuilder()
+
+ self._serializer = serializer
+ if serializer is None:
+ self._serializer = TypeSerializer()
+
+ self._deserializer = deserializer
+ if deserializer is None:
+ self._deserializer = TypeDeserializer()
+
+ def inject_condition_expressions(self, params, model, **kwargs):
+ """Injects the condition expression transformation into the parameters
+
+ This injection includes transformations for ConditionExpression shapes
+ and KeyExpression shapes. It also handles any placeholder names and
+ values that are generated when transforming the condition expressions.
+ """
+ self._condition_builder.reset()
+ generated_names = {}
+ generated_values = {}
+
+ # Create and apply the Condition Expression transformation.
+ transformation = ConditionExpressionTransformation(
+ self._condition_builder,
+ placeholder_names=generated_names,
+ placeholder_values=generated_values,
+ is_key_condition=False
+ )
+ self._transformer.transform(
+ params, model.input_shape, transformation,
+ 'ConditionExpression')
+
+ # Create and apply the Key Condition Expression transformation.
+ transformation = ConditionExpressionTransformation(
+ self._condition_builder,
+ placeholder_names=generated_names,
+ placeholder_values=generated_values,
+ is_key_condition=True
+ )
+ self._transformer.transform(
+ params, model.input_shape, transformation,
+ 'KeyExpression')
+
+ expr_attr_names_input = 'ExpressionAttributeNames'
+ expr_attr_values_input = 'ExpressionAttributeValues'
+
+ # Now that all of the condition expression transformation are done,
+ # update the placeholder dictionaries in the request.
+ if expr_attr_names_input in params:
+ params[expr_attr_names_input].update(generated_names)
+ else:
+ if generated_names:
+ params[expr_attr_names_input] = generated_names
+
+ if expr_attr_values_input in params:
+ params[expr_attr_values_input].update(generated_values)
+ else:
+ if generated_values:
+ params[expr_attr_values_input] = generated_values
+
+ def inject_attribute_value_input(self, params, model, **kwargs):
+ """Injects DynamoDB serialization into parameter input"""
+ self._transformer.transform(
+ params, model.input_shape, self._serializer.serialize,
+ 'AttributeValue')
+
+ def inject_attribute_value_output(self, parsed, model, **kwargs):
+ """Injects DynamoDB deserialization into responses"""
if model.output_shape is not None:
self._transformer.transform(
parsed, model.output_shape, self._deserializer.deserialize,
'AttributeValue'
)
-
-
-class ConditionExpressionTransformation(object):
- """Provides a transformation for condition expressions
-
- The ``ParameterTransformer`` class can call this class directly
- to transform the condition expressions in the parameters provided.
- """
- def __init__(self, condition_builder, placeholder_names,
- placeholder_values, is_key_condition=False):
- self._condition_builder = condition_builder
- self._placeholder_names = placeholder_names
- self._placeholder_values = placeholder_values
- self._is_key_condition = is_key_condition
-
- def __call__(self, value):
- if isinstance(value, ConditionBase):
- # Create a conditional expression string with placeholders
- # for the provided condition.
- built_expression = self._condition_builder.build_expression(
- value, is_key_condition=self._is_key_condition)
-
- self._placeholder_names.update(
- built_expression.attribute_name_placeholders)
- self._placeholder_values.update(
- built_expression.attribute_value_placeholders)
-
- return built_expression.condition_expression
- # Use the user provided value if it is not a ConditonBase object.
- return value
-
-
-class ParameterTransformer(object):
- """Transforms the input to and output from botocore based on shape"""
-
- def transform(self, params, model, transformation, target_shape):
- """Transforms the dynamodb input to or output from botocore
-
- It applies a specified transformation whenever a specific shape name
- is encountered while traversing the parameters in the dictionary.
-
- :param params: The parameters structure to transform.
- :param model: The operation model.
- :param transformation: The function to apply the parameter
- :param target_shape: The name of the shape to apply the
- transformation to
- """
- self._transform_parameters(
- model, params, transformation, target_shape)
-
- def _transform_parameters(self, model, params, transformation,
- target_shape):
- type_name = model.type_name
- if type_name in ['structure', 'map', 'list']:
- getattr(self, '_transform_%s' % type_name)(
- model, params, transformation, target_shape)
-
- def _transform_structure(self, model, params, transformation,
- target_shape):
+
+
+class ConditionExpressionTransformation(object):
+ """Provides a transformation for condition expressions
+
+ The ``ParameterTransformer`` class can call this class directly
+ to transform the condition expressions in the parameters provided.
+ """
+ def __init__(self, condition_builder, placeholder_names,
+ placeholder_values, is_key_condition=False):
+ self._condition_builder = condition_builder
+ self._placeholder_names = placeholder_names
+ self._placeholder_values = placeholder_values
+ self._is_key_condition = is_key_condition
+
+ def __call__(self, value):
+ if isinstance(value, ConditionBase):
+ # Create a conditional expression string with placeholders
+ # for the provided condition.
+ built_expression = self._condition_builder.build_expression(
+ value, is_key_condition=self._is_key_condition)
+
+ self._placeholder_names.update(
+ built_expression.attribute_name_placeholders)
+ self._placeholder_values.update(
+ built_expression.attribute_value_placeholders)
+
+ return built_expression.condition_expression
+ # Use the user provided value if it is not a ConditonBase object.
+ return value
+
+
+class ParameterTransformer(object):
+ """Transforms the input to and output from botocore based on shape"""
+
+ def transform(self, params, model, transformation, target_shape):
+ """Transforms the dynamodb input to or output from botocore
+
+ It applies a specified transformation whenever a specific shape name
+ is encountered while traversing the parameters in the dictionary.
+
+ :param params: The parameters structure to transform.
+ :param model: The operation model.
+ :param transformation: The function to apply the parameter
+ :param target_shape: The name of the shape to apply the
+ transformation to
+ """
+ self._transform_parameters(
+ model, params, transformation, target_shape)
+
+ def _transform_parameters(self, model, params, transformation,
+ target_shape):
+ type_name = model.type_name
+ if type_name in ['structure', 'map', 'list']:
+ getattr(self, '_transform_%s' % type_name)(
+ model, params, transformation, target_shape)
+
+ def _transform_structure(self, model, params, transformation,
+ target_shape):
if not isinstance(params, collections_abc.Mapping):
- return
- for param in params:
- if param in model.members:
- member_model = model.members[param]
- member_shape = member_model.name
- if member_shape == target_shape:
- params[param] = transformation(params[param])
- else:
- self._transform_parameters(
- member_model, params[param], transformation,
- target_shape)
-
- def _transform_map(self, model, params, transformation, target_shape):
+ return
+ for param in params:
+ if param in model.members:
+ member_model = model.members[param]
+ member_shape = member_model.name
+ if member_shape == target_shape:
+ params[param] = transformation(params[param])
+ else:
+ self._transform_parameters(
+ member_model, params[param], transformation,
+ target_shape)
+
+ def _transform_map(self, model, params, transformation, target_shape):
if not isinstance(params, collections_abc.Mapping):
- return
- value_model = model.value
- value_shape = value_model.name
- for key, value in params.items():
- if value_shape == target_shape:
- params[key] = transformation(value)
- else:
- self._transform_parameters(
- value_model, params[key], transformation, target_shape)
-
- def _transform_list(self, model, params, transformation, target_shape):
+ return
+ value_model = model.value
+ value_shape = value_model.name
+ for key, value in params.items():
+ if value_shape == target_shape:
+ params[key] = transformation(value)
+ else:
+ self._transform_parameters(
+ value_model, params[key], transformation, target_shape)
+
+ def _transform_list(self, model, params, transformation, target_shape):
if not isinstance(params, collections_abc.MutableSequence):
- return
- member_model = model.member
- member_shape = member_model.name
- for i, item in enumerate(params):
- if member_shape == target_shape:
- params[i] = transformation(item)
- else:
- self._transform_parameters(
- member_model, params[i], transformation, target_shape)
+ return
+ member_model = model.member
+ member_shape = member_model.name
+ for i, item in enumerate(params):
+ if member_shape == target_shape:
+ params[i] = transformation(item)
+ else:
+ self._transform_parameters(
+ member_model, params[i], transformation, target_shape)
diff --git a/contrib/python/boto3/boto3/dynamodb/types.py b/contrib/python/boto3/boto3/dynamodb/types.py
index ae33e0c15c..fb069e3a6d 100644
--- a/contrib/python/boto3/boto3/dynamodb/types.py
+++ b/contrib/python/boto3/boto3/dynamodb/types.py
@@ -1,301 +1,301 @@
-# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-from decimal import Decimal, Context, Clamped
-from decimal import Overflow, Inexact, Underflow, Rounded
-
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+from decimal import Decimal, Context, Clamped
+from decimal import Overflow, Inexact, Underflow, Rounded
+
from boto3.compat import collections_abc
-from botocore.compat import six
-
-
-STRING = 'S'
-NUMBER = 'N'
-BINARY = 'B'
-STRING_SET = 'SS'
-NUMBER_SET = 'NS'
-BINARY_SET = 'BS'
-NULL = 'NULL'
-BOOLEAN = 'BOOL'
-MAP = 'M'
-LIST = 'L'
-
-
-DYNAMODB_CONTEXT = Context(
- Emin=-128, Emax=126, prec=38,
- traps=[Clamped, Overflow, Inexact, Rounded, Underflow])
-
-
-BINARY_TYPES = (bytearray, six.binary_type)
-
-
-class Binary(object):
- """A class for representing Binary in dynamodb
-
- Especially for Python 2, use this class to explicitly specify
- binary data for item in DynamoDB. It is essentially a wrapper around
- binary. Unicode and Python 3 string types are not allowed.
- """
- def __init__(self, value):
- if not isinstance(value, BINARY_TYPES):
- raise TypeError('Value must be of the following types: %s.' %
- ', '.join([str(t) for t in BINARY_TYPES]))
- self.value = value
-
- def __eq__(self, other):
- if isinstance(other, Binary):
- return self.value == other.value
- return self.value == other
-
- def __ne__(self, other):
- return not self.__eq__(other)
-
- def __repr__(self):
- return 'Binary(%r)' % self.value
-
- def __str__(self):
- return self.value
-
+from botocore.compat import six
+
+
+STRING = 'S'
+NUMBER = 'N'
+BINARY = 'B'
+STRING_SET = 'SS'
+NUMBER_SET = 'NS'
+BINARY_SET = 'BS'
+NULL = 'NULL'
+BOOLEAN = 'BOOL'
+MAP = 'M'
+LIST = 'L'
+
+
+DYNAMODB_CONTEXT = Context(
+ Emin=-128, Emax=126, prec=38,
+ traps=[Clamped, Overflow, Inexact, Rounded, Underflow])
+
+
+BINARY_TYPES = (bytearray, six.binary_type)
+
+
+class Binary(object):
+ """A class for representing Binary in dynamodb
+
+ Especially for Python 2, use this class to explicitly specify
+ binary data for item in DynamoDB. It is essentially a wrapper around
+ binary. Unicode and Python 3 string types are not allowed.
+ """
+ def __init__(self, value):
+ if not isinstance(value, BINARY_TYPES):
+ raise TypeError('Value must be of the following types: %s.' %
+ ', '.join([str(t) for t in BINARY_TYPES]))
+ self.value = value
+
+ def __eq__(self, other):
+ if isinstance(other, Binary):
+ return self.value == other.value
+ return self.value == other
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def __repr__(self):
+ return 'Binary(%r)' % self.value
+
+ def __str__(self):
+ return self.value
+
def __bytes__(self):
return self.value
- def __hash__(self):
- return hash(self.value)
-
-
-class TypeSerializer(object):
- """This class serializes Python data types to DynamoDB types."""
- def serialize(self, value):
- """The method to serialize the Python data types.
-
- :param value: A python value to be serialized to DynamoDB. Here are
- the various conversions:
-
- Python DynamoDB
- ------ --------
- None {'NULL': True}
- True/False {'BOOL': True/False}
- int/Decimal {'N': str(value)}
- string {'S': string}
- Binary/bytearray/bytes (py3 only) {'B': bytes}
- set([int/Decimal]) {'NS': [str(value)]}
- set([string]) {'SS': [string])
- set([Binary/bytearray/bytes]) {'BS': [bytes]}
- list {'L': list}
- dict {'M': dict}
-
- For types that involve numbers, it is recommended that ``Decimal``
- objects are used to be able to round-trip the Python type.
- For types that involve binary, it is recommended that ``Binary``
- objects are used to be able to round-trip the Python type.
-
- :rtype: dict
- :returns: A dictionary that represents a dynamoDB data type. These
- dictionaries can be directly passed to botocore methods.
- """
- dynamodb_type = self._get_dynamodb_type(value)
- serializer = getattr(self, '_serialize_%s' % dynamodb_type.lower())
- return {dynamodb_type: serializer(value)}
-
- def _get_dynamodb_type(self, value):
- dynamodb_type = None
-
- if self._is_null(value):
- dynamodb_type = NULL
-
- elif self._is_boolean(value):
- dynamodb_type = BOOLEAN
-
- elif self._is_number(value):
- dynamodb_type = NUMBER
-
- elif self._is_string(value):
- dynamodb_type = STRING
-
- elif self._is_binary(value):
- dynamodb_type = BINARY
-
- elif self._is_type_set(value, self._is_number):
- dynamodb_type = NUMBER_SET
-
- elif self._is_type_set(value, self._is_string):
- dynamodb_type = STRING_SET
-
- elif self._is_type_set(value, self._is_binary):
- dynamodb_type = BINARY_SET
-
- elif self._is_map(value):
- dynamodb_type = MAP
-
+ def __hash__(self):
+ return hash(self.value)
+
+
+class TypeSerializer(object):
+ """This class serializes Python data types to DynamoDB types."""
+ def serialize(self, value):
+ """The method to serialize the Python data types.
+
+ :param value: A python value to be serialized to DynamoDB. Here are
+ the various conversions:
+
+ Python DynamoDB
+ ------ --------
+ None {'NULL': True}
+ True/False {'BOOL': True/False}
+ int/Decimal {'N': str(value)}
+ string {'S': string}
+ Binary/bytearray/bytes (py3 only) {'B': bytes}
+ set([int/Decimal]) {'NS': [str(value)]}
+ set([string]) {'SS': [string])
+ set([Binary/bytearray/bytes]) {'BS': [bytes]}
+ list {'L': list}
+ dict {'M': dict}
+
+ For types that involve numbers, it is recommended that ``Decimal``
+ objects are used to be able to round-trip the Python type.
+ For types that involve binary, it is recommended that ``Binary``
+ objects are used to be able to round-trip the Python type.
+
+ :rtype: dict
+ :returns: A dictionary that represents a dynamoDB data type. These
+ dictionaries can be directly passed to botocore methods.
+ """
+ dynamodb_type = self._get_dynamodb_type(value)
+ serializer = getattr(self, '_serialize_%s' % dynamodb_type.lower())
+ return {dynamodb_type: serializer(value)}
+
+ def _get_dynamodb_type(self, value):
+ dynamodb_type = None
+
+ if self._is_null(value):
+ dynamodb_type = NULL
+
+ elif self._is_boolean(value):
+ dynamodb_type = BOOLEAN
+
+ elif self._is_number(value):
+ dynamodb_type = NUMBER
+
+ elif self._is_string(value):
+ dynamodb_type = STRING
+
+ elif self._is_binary(value):
+ dynamodb_type = BINARY
+
+ elif self._is_type_set(value, self._is_number):
+ dynamodb_type = NUMBER_SET
+
+ elif self._is_type_set(value, self._is_string):
+ dynamodb_type = STRING_SET
+
+ elif self._is_type_set(value, self._is_binary):
+ dynamodb_type = BINARY_SET
+
+ elif self._is_map(value):
+ dynamodb_type = MAP
+
elif self._is_listlike(value):
- dynamodb_type = LIST
-
- else:
- msg = 'Unsupported type "%s" for value "%s"' % (type(value), value)
- raise TypeError(msg)
-
- return dynamodb_type
-
- def _is_null(self, value):
- if value is None:
- return True
- return False
-
- def _is_boolean(self, value):
- if isinstance(value, bool):
- return True
- return False
-
- def _is_number(self, value):
- if isinstance(value, (six.integer_types, Decimal)):
- return True
- elif isinstance(value, float):
- raise TypeError(
- 'Float types are not supported. Use Decimal types instead.')
- return False
-
- def _is_string(self, value):
- if isinstance(value, six.string_types):
- return True
- return False
-
- def _is_binary(self, value):
- if isinstance(value, Binary):
- return True
- elif isinstance(value, bytearray):
- return True
- elif six.PY3 and isinstance(value, six.binary_type):
- return True
- return False
-
- def _is_set(self, value):
+ dynamodb_type = LIST
+
+ else:
+ msg = 'Unsupported type "%s" for value "%s"' % (type(value), value)
+ raise TypeError(msg)
+
+ return dynamodb_type
+
+ def _is_null(self, value):
+ if value is None:
+ return True
+ return False
+
+ def _is_boolean(self, value):
+ if isinstance(value, bool):
+ return True
+ return False
+
+ def _is_number(self, value):
+ if isinstance(value, (six.integer_types, Decimal)):
+ return True
+ elif isinstance(value, float):
+ raise TypeError(
+ 'Float types are not supported. Use Decimal types instead.')
+ return False
+
+ def _is_string(self, value):
+ if isinstance(value, six.string_types):
+ return True
+ return False
+
+ def _is_binary(self, value):
+ if isinstance(value, Binary):
+ return True
+ elif isinstance(value, bytearray):
+ return True
+ elif six.PY3 and isinstance(value, six.binary_type):
+ return True
+ return False
+
+ def _is_set(self, value):
if isinstance(value, collections_abc.Set):
- return True
- return False
-
- def _is_type_set(self, value, type_validator):
- if self._is_set(value):
- if False not in map(type_validator, value):
- return True
- return False
-
- def _is_map(self, value):
+ return True
+ return False
+
+ def _is_type_set(self, value, type_validator):
+ if self._is_set(value):
+ if False not in map(type_validator, value):
+ return True
+ return False
+
+ def _is_map(self, value):
if isinstance(value, collections_abc.Mapping):
- return True
- return False
-
+ return True
+ return False
+
def _is_listlike(self, value):
if isinstance(value, (list, tuple)):
- return True
- return False
-
- def _serialize_null(self, value):
- return True
-
- def _serialize_bool(self, value):
- return value
-
- def _serialize_n(self, value):
- number = str(DYNAMODB_CONTEXT.create_decimal(value))
- if number in ['Infinity', 'NaN']:
- raise TypeError('Infinity and NaN not supported')
- return number
-
- def _serialize_s(self, value):
- return value
-
- def _serialize_b(self, value):
- if isinstance(value, Binary):
- value = value.value
- return value
-
- def _serialize_ss(self, value):
- return [self._serialize_s(s) for s in value]
-
- def _serialize_ns(self, value):
- return [self._serialize_n(n) for n in value]
-
- def _serialize_bs(self, value):
- return [self._serialize_b(b) for b in value]
-
- def _serialize_l(self, value):
- return [self.serialize(v) for v in value]
-
- def _serialize_m(self, value):
- return dict([(k, self.serialize(v)) for k, v in value.items()])
-
-
-class TypeDeserializer(object):
- """This class deserializes DynamoDB types to Python types."""
- def deserialize(self, value):
- """The method to deserialize the DynamoDB data types.
-
- :param value: A DynamoDB value to be deserialized to a pythonic value.
- Here are the various conversions:
-
- DynamoDB Python
- -------- ------
- {'NULL': True} None
- {'BOOL': True/False} True/False
- {'N': str(value)} Decimal(str(value))
- {'S': string} string
- {'B': bytes} Binary(bytes)
- {'NS': [str(value)]} set([Decimal(str(value))])
- {'SS': [string]} set([string])
- {'BS': [bytes]} set([bytes])
- {'L': list} list
- {'M': dict} dict
-
- :returns: The pythonic value of the DynamoDB type.
- """
-
- if not value:
- raise TypeError('Value must be a nonempty dictionary whose key '
- 'is a valid dynamodb type.')
- dynamodb_type = list(value.keys())[0]
- try:
- deserializer = getattr(
- self, '_deserialize_%s' % dynamodb_type.lower())
- except AttributeError:
- raise TypeError(
- 'Dynamodb type %s is not supported' % dynamodb_type)
- return deserializer(value[dynamodb_type])
-
- def _deserialize_null(self, value):
- return None
-
- def _deserialize_bool(self, value):
- return value
-
- def _deserialize_n(self, value):
- return DYNAMODB_CONTEXT.create_decimal(value)
-
- def _deserialize_s(self, value):
- return value
-
- def _deserialize_b(self, value):
- return Binary(value)
-
- def _deserialize_ns(self, value):
- return set(map(self._deserialize_n, value))
-
- def _deserialize_ss(self, value):
- return set(map(self._deserialize_s, value))
-
- def _deserialize_bs(self, value):
- return set(map(self._deserialize_b, value))
-
- def _deserialize_l(self, value):
- return [self.deserialize(v) for v in value]
-
- def _deserialize_m(self, value):
- return dict([(k, self.deserialize(v)) for k, v in value.items()])
+ return True
+ return False
+
+ def _serialize_null(self, value):
+ return True
+
+ def _serialize_bool(self, value):
+ return value
+
+ def _serialize_n(self, value):
+ number = str(DYNAMODB_CONTEXT.create_decimal(value))
+ if number in ['Infinity', 'NaN']:
+ raise TypeError('Infinity and NaN not supported')
+ return number
+
+ def _serialize_s(self, value):
+ return value
+
+ def _serialize_b(self, value):
+ if isinstance(value, Binary):
+ value = value.value
+ return value
+
+ def _serialize_ss(self, value):
+ return [self._serialize_s(s) for s in value]
+
+ def _serialize_ns(self, value):
+ return [self._serialize_n(n) for n in value]
+
+ def _serialize_bs(self, value):
+ return [self._serialize_b(b) for b in value]
+
+ def _serialize_l(self, value):
+ return [self.serialize(v) for v in value]
+
+ def _serialize_m(self, value):
+ return dict([(k, self.serialize(v)) for k, v in value.items()])
+
+
+class TypeDeserializer(object):
+ """This class deserializes DynamoDB types to Python types."""
+ def deserialize(self, value):
+ """The method to deserialize the DynamoDB data types.
+
+ :param value: A DynamoDB value to be deserialized to a pythonic value.
+ Here are the various conversions:
+
+ DynamoDB Python
+ -------- ------
+ {'NULL': True} None
+ {'BOOL': True/False} True/False
+ {'N': str(value)} Decimal(str(value))
+ {'S': string} string
+ {'B': bytes} Binary(bytes)
+ {'NS': [str(value)]} set([Decimal(str(value))])
+ {'SS': [string]} set([string])
+ {'BS': [bytes]} set([bytes])
+ {'L': list} list
+ {'M': dict} dict
+
+ :returns: The pythonic value of the DynamoDB type.
+ """
+
+ if not value:
+ raise TypeError('Value must be a nonempty dictionary whose key '
+ 'is a valid dynamodb type.')
+ dynamodb_type = list(value.keys())[0]
+ try:
+ deserializer = getattr(
+ self, '_deserialize_%s' % dynamodb_type.lower())
+ except AttributeError:
+ raise TypeError(
+ 'Dynamodb type %s is not supported' % dynamodb_type)
+ return deserializer(value[dynamodb_type])
+
+ def _deserialize_null(self, value):
+ return None
+
+ def _deserialize_bool(self, value):
+ return value
+
+ def _deserialize_n(self, value):
+ return DYNAMODB_CONTEXT.create_decimal(value)
+
+ def _deserialize_s(self, value):
+ return value
+
+ def _deserialize_b(self, value):
+ return Binary(value)
+
+ def _deserialize_ns(self, value):
+ return set(map(self._deserialize_n, value))
+
+ def _deserialize_ss(self, value):
+ return set(map(self._deserialize_s, value))
+
+ def _deserialize_bs(self, value):
+ return set(map(self._deserialize_b, value))
+
+ def _deserialize_l(self, value):
+ return [self.deserialize(v) for v in value]
+
+ def _deserialize_m(self, value):
+ return dict([(k, self.deserialize(v)) for k, v in value.items()])
diff --git a/contrib/python/boto3/boto3/ec2/__init__.py b/contrib/python/boto3/boto3/ec2/__init__.py
index 58f30dc875..c89416d7a5 100644
--- a/contrib/python/boto3/boto3/ec2/__init__.py
+++ b/contrib/python/boto3/boto3/ec2/__init__.py
@@ -1,12 +1,12 @@
-# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
diff --git a/contrib/python/boto3/boto3/ec2/createtags.py b/contrib/python/boto3/boto3/ec2/createtags.py
index 34de31f90d..14e0971bb2 100644
--- a/contrib/python/boto3/boto3/ec2/createtags.py
+++ b/contrib/python/boto3/boto3/ec2/createtags.py
@@ -1,40 +1,40 @@
-# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-
-
-def inject_create_tags(event_name, class_attributes, **kwargs):
- """This injects a custom create_tags method onto the ec2 service resource
-
- This is needed because the resource model is not able to express
- creating multiple tag resources based on the fact you can apply a set
- of tags to multiple ec2 resources.
- """
- class_attributes['create_tags'] = create_tags
-
-
-def create_tags(self, **kwargs):
- # Call the client method
- self.meta.client.create_tags(**kwargs)
- resources = kwargs.get('Resources', [])
- tags = kwargs.get('Tags', [])
- tag_resources = []
-
- # Generate all of the tag resources that just were created with the
- # preceding client call.
- for resource in resources:
- for tag in tags:
- # Add each tag from the tag set for each resource to the list
- # that is returned by the method.
- tag_resource = self.Tag(resource, tag['Key'], tag['Value'])
- tag_resources.append(tag_resource)
- return tag_resources
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+
+
+def inject_create_tags(event_name, class_attributes, **kwargs):
+ """This injects a custom create_tags method onto the ec2 service resource
+
+ This is needed because the resource model is not able to express
+ creating multiple tag resources based on the fact you can apply a set
+ of tags to multiple ec2 resources.
+ """
+ class_attributes['create_tags'] = create_tags
+
+
+def create_tags(self, **kwargs):
+ # Call the client method
+ self.meta.client.create_tags(**kwargs)
+ resources = kwargs.get('Resources', [])
+ tags = kwargs.get('Tags', [])
+ tag_resources = []
+
+ # Generate all of the tag resources that just were created with the
+ # preceding client call.
+ for resource in resources:
+ for tag in tags:
+ # Add each tag from the tag set for each resource to the list
+ # that is returned by the method.
+ tag_resource = self.Tag(resource, tag['Key'], tag['Value'])
+ tag_resources.append(tag_resource)
+ return tag_resources
diff --git a/contrib/python/boto3/boto3/ec2/deletetags.py b/contrib/python/boto3/boto3/ec2/deletetags.py
index 91c2939773..0600b7c5a7 100644
--- a/contrib/python/boto3/boto3/ec2/deletetags.py
+++ b/contrib/python/boto3/boto3/ec2/deletetags.py
@@ -1,34 +1,34 @@
-# Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-from boto3.resources.action import CustomModeledAction
-
-
-def inject_delete_tags(event_emitter, **kwargs):
- action_model = {
- 'request': {
- 'operation': 'DeleteTags',
- 'params': [{
- 'target': 'Resources[0]',
- 'source': 'identifier',
- 'name': 'Id'
- }]
- }
- }
- action = CustomModeledAction(
- 'delete_tags', action_model, delete_tags, event_emitter)
- action.inject(**kwargs)
-
-
-def delete_tags(self, **kwargs):
- kwargs['Resources'] = [self.id]
- return self.meta.client.delete_tags(**kwargs)
+# Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+from boto3.resources.action import CustomModeledAction
+
+
+def inject_delete_tags(event_emitter, **kwargs):
+ action_model = {
+ 'request': {
+ 'operation': 'DeleteTags',
+ 'params': [{
+ 'target': 'Resources[0]',
+ 'source': 'identifier',
+ 'name': 'Id'
+ }]
+ }
+ }
+ action = CustomModeledAction(
+ 'delete_tags', action_model, delete_tags, event_emitter)
+ action.inject(**kwargs)
+
+
+def delete_tags(self, **kwargs):
+ kwargs['Resources'] = [self.id]
+ return self.meta.client.delete_tags(**kwargs)
diff --git a/contrib/python/boto3/boto3/exceptions.py b/contrib/python/boto3/boto3/exceptions.py
index 6aaf1cd9af..1cd7c08ad8 100644
--- a/contrib/python/boto3/boto3/exceptions.py
+++ b/contrib/python/boto3/boto3/exceptions.py
@@ -1,112 +1,112 @@
-# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-
-# All exceptions in this class should subclass from Boto3Error.
-import botocore.exceptions
-
-
-# All exceptions should subclass from Boto3Error in this module.
-class Boto3Error(Exception):
- """Base class for all Boto3 errors."""
-
-
-class ResourceLoadException(Boto3Error):
- pass
-
-
-# NOTE: This doesn't appear to be used anywhere.
-# It's probably safe to remove this.
-class NoVersionFound(Boto3Error):
- pass
-
-
-# We're subclassing from botocore.exceptions.DataNotFoundError
-# to keep backwards compatibility with anyone that was catching
-# this low level Botocore error before this exception was
-# introduced in boto3.
-# Same thing for ResourceNotExistsError below.
-class UnknownAPIVersionError(Boto3Error,
- botocore.exceptions.DataNotFoundError):
- def __init__(self, service_name, bad_api_version,
- available_api_versions):
- msg = (
- "The '%s' resource does not an API version of: %s\n"
- "Valid API versions are: %s"
- % (service_name, bad_api_version, available_api_versions)
- )
- # Not using super because we don't want the DataNotFoundError
- # to be called, it has a different __init__ signature.
- Boto3Error.__init__(self, msg)
-
-
-class ResourceNotExistsError(Boto3Error,
- botocore.exceptions.DataNotFoundError):
- """Raised when you attempt to create a resource that does not exist."""
- def __init__(self, service_name, available_services, has_low_level_client):
- msg = (
- "The '%s' resource does not exist.\n"
- "The available resources are:\n"
- " - %s\n" % (service_name, '\n - '.join(available_services))
- )
- if has_low_level_client:
- msg += (
- "\nConsider using a boto3.client('%s') instead "
- "of a resource for '%s'" % (service_name, service_name))
- # Not using super because we don't want the DataNotFoundError
- # to be called, it has a different __init__ signature.
- Boto3Error.__init__(self, msg)
-
-
-class RetriesExceededError(Boto3Error):
- def __init__(self, last_exception, msg='Max Retries Exceeded'):
- super(RetriesExceededError, self).__init__(msg)
- self.last_exception = last_exception
-
-
-class S3TransferFailedError(Boto3Error):
- pass
-
-
-class S3UploadFailedError(Boto3Error):
- pass
-
-
-class DynamoDBOperationNotSupportedError(Boto3Error):
+# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+
+# All exceptions in this class should subclass from Boto3Error.
+import botocore.exceptions
+
+
+# All exceptions should subclass from Boto3Error in this module.
+class Boto3Error(Exception):
+ """Base class for all Boto3 errors."""
+
+
+class ResourceLoadException(Boto3Error):
+ pass
+
+
+# NOTE: This doesn't appear to be used anywhere.
+# It's probably safe to remove this.
+class NoVersionFound(Boto3Error):
+ pass
+
+
+# We're subclassing from botocore.exceptions.DataNotFoundError
+# to keep backwards compatibility with anyone that was catching
+# this low level Botocore error before this exception was
+# introduced in boto3.
+# Same thing for ResourceNotExistsError below.
+class UnknownAPIVersionError(Boto3Error,
+ botocore.exceptions.DataNotFoundError):
+ def __init__(self, service_name, bad_api_version,
+ available_api_versions):
+ msg = (
+ "The '%s' resource does not an API version of: %s\n"
+ "Valid API versions are: %s"
+ % (service_name, bad_api_version, available_api_versions)
+ )
+ # Not using super because we don't want the DataNotFoundError
+ # to be called, it has a different __init__ signature.
+ Boto3Error.__init__(self, msg)
+
+
+class ResourceNotExistsError(Boto3Error,
+ botocore.exceptions.DataNotFoundError):
+ """Raised when you attempt to create a resource that does not exist."""
+ def __init__(self, service_name, available_services, has_low_level_client):
+ msg = (
+ "The '%s' resource does not exist.\n"
+ "The available resources are:\n"
+ " - %s\n" % (service_name, '\n - '.join(available_services))
+ )
+ if has_low_level_client:
+ msg += (
+ "\nConsider using a boto3.client('%s') instead "
+ "of a resource for '%s'" % (service_name, service_name))
+ # Not using super because we don't want the DataNotFoundError
+ # to be called, it has a different __init__ signature.
+ Boto3Error.__init__(self, msg)
+
+
+class RetriesExceededError(Boto3Error):
+ def __init__(self, last_exception, msg='Max Retries Exceeded'):
+ super(RetriesExceededError, self).__init__(msg)
+ self.last_exception = last_exception
+
+
+class S3TransferFailedError(Boto3Error):
+ pass
+
+
+class S3UploadFailedError(Boto3Error):
+ pass
+
+
+class DynamoDBOperationNotSupportedError(Boto3Error):
"""Raised for operations that are not supported for an operand."""
- def __init__(self, operation, value):
- msg = (
- '%s operation cannot be applied to value %s of type %s directly. '
- 'Must use AttributeBase object methods (i.e. Attr().eq()). to '
- 'generate ConditionBase instances first.' %
- (operation, value, type(value)))
- Exception.__init__(self, msg)
-
-# FIXME: Backward compatibility
-DynanmoDBOperationNotSupportedError = DynamoDBOperationNotSupportedError
-
-
-class DynamoDBNeedsConditionError(Boto3Error):
- """Raised when input is not a condition"""
- def __init__(self, value):
- msg = (
- 'Expecting a ConditionBase object. Got %s of type %s. '
- 'Use AttributeBase object methods (i.e. Attr().eq()). to '
- 'generate ConditionBase instances.' % (value, type(value)))
- Exception.__init__(self, msg)
-
-
-class DynamoDBNeedsKeyConditionError(Boto3Error):
- pass
+ def __init__(self, operation, value):
+ msg = (
+ '%s operation cannot be applied to value %s of type %s directly. '
+ 'Must use AttributeBase object methods (i.e. Attr().eq()). to '
+ 'generate ConditionBase instances first.' %
+ (operation, value, type(value)))
+ Exception.__init__(self, msg)
+
+# FIXME: Backward compatibility
+DynanmoDBOperationNotSupportedError = DynamoDBOperationNotSupportedError
+
+
+class DynamoDBNeedsConditionError(Boto3Error):
+ """Raised when input is not a condition"""
+ def __init__(self, value):
+ msg = (
+ 'Expecting a ConditionBase object. Got %s of type %s. '
+ 'Use AttributeBase object methods (i.e. Attr().eq()). to '
+ 'generate ConditionBase instances.' % (value, type(value)))
+ Exception.__init__(self, msg)
+
+
+class DynamoDBNeedsKeyConditionError(Boto3Error):
+ pass
class PythonDeprecationWarning(Warning):
diff --git a/contrib/python/boto3/boto3/resources/action.py b/contrib/python/boto3/boto3/resources/action.py
index e1ff0a83a0..b643426a4c 100644
--- a/contrib/python/boto3/boto3/resources/action.py
+++ b/contrib/python/boto3/boto3/resources/action.py
@@ -1,244 +1,244 @@
-# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-
-import logging
-
-from botocore import xform_name
-
-from .params import create_request_parameters
-from .response import RawHandler, ResourceHandler
-from .model import Action
-
-from boto3.docs.docstring import ActionDocstring
-from boto3.utils import inject_attribute
-
-
-logger = logging.getLogger(__name__)
-
-
-class ServiceAction(object):
- """
- A class representing a callable action on a resource, for example
- ``sqs.get_queue_by_name(...)`` or ``s3.Bucket('foo').delete()``.
- The action may construct parameters from existing resource identifiers
- and may return either a raw response or a new resource instance.
-
- :type action_model: :py:class`~boto3.resources.model.Action`
- :param action_model: The action model.
-
- :type factory: ResourceFactory
- :param factory: The factory that created the resource class to which
- this action is attached.
-
- :type service_context: :py:class:`~boto3.utils.ServiceContext`
- :param service_context: Context about the AWS service
- """
- def __init__(self, action_model, factory=None, service_context=None):
- self._action_model = action_model
-
- # In the simplest case we just return the response, but if a
- # resource is defined, then we must create these before returning.
- resource_response_model = action_model.resource
- if resource_response_model:
- self._response_handler = ResourceHandler(
- search_path=resource_response_model.path,
- factory=factory, resource_model=resource_response_model,
- service_context=service_context,
- operation_name=action_model.request.operation
- )
- else:
- self._response_handler = RawHandler(action_model.path)
-
- def __call__(self, parent, *args, **kwargs):
- """
- Perform the action's request operation after building operation
- parameters and build any defined resources from the response.
-
- :type parent: :py:class:`~boto3.resources.base.ServiceResource`
- :param parent: The resource instance to which this action is attached.
- :rtype: dict or ServiceResource or list(ServiceResource)
- :return: The response, either as a raw dict or resource instance(s).
- """
- operation_name = xform_name(self._action_model.request.operation)
-
- # First, build predefined params and then update with the
- # user-supplied kwargs, which allows overriding the pre-built
- # params if needed.
- params = create_request_parameters(parent, self._action_model.request)
- params.update(kwargs)
-
- logger.debug('Calling %s:%s with %r', parent.meta.service_name,
- operation_name, params)
-
+# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+
+import logging
+
+from botocore import xform_name
+
+from .params import create_request_parameters
+from .response import RawHandler, ResourceHandler
+from .model import Action
+
+from boto3.docs.docstring import ActionDocstring
+from boto3.utils import inject_attribute
+
+
+logger = logging.getLogger(__name__)
+
+
+class ServiceAction(object):
+ """
+ A class representing a callable action on a resource, for example
+ ``sqs.get_queue_by_name(...)`` or ``s3.Bucket('foo').delete()``.
+ The action may construct parameters from existing resource identifiers
+ and may return either a raw response or a new resource instance.
+
+ :type action_model: :py:class`~boto3.resources.model.Action`
+ :param action_model: The action model.
+
+ :type factory: ResourceFactory
+ :param factory: The factory that created the resource class to which
+ this action is attached.
+
+ :type service_context: :py:class:`~boto3.utils.ServiceContext`
+ :param service_context: Context about the AWS service
+ """
+ def __init__(self, action_model, factory=None, service_context=None):
+ self._action_model = action_model
+
+ # In the simplest case we just return the response, but if a
+ # resource is defined, then we must create these before returning.
+ resource_response_model = action_model.resource
+ if resource_response_model:
+ self._response_handler = ResourceHandler(
+ search_path=resource_response_model.path,
+ factory=factory, resource_model=resource_response_model,
+ service_context=service_context,
+ operation_name=action_model.request.operation
+ )
+ else:
+ self._response_handler = RawHandler(action_model.path)
+
+ def __call__(self, parent, *args, **kwargs):
+ """
+ Perform the action's request operation after building operation
+ parameters and build any defined resources from the response.
+
+ :type parent: :py:class:`~boto3.resources.base.ServiceResource`
+ :param parent: The resource instance to which this action is attached.
+ :rtype: dict or ServiceResource or list(ServiceResource)
+ :return: The response, either as a raw dict or resource instance(s).
+ """
+ operation_name = xform_name(self._action_model.request.operation)
+
+ # First, build predefined params and then update with the
+ # user-supplied kwargs, which allows overriding the pre-built
+ # params if needed.
+ params = create_request_parameters(parent, self._action_model.request)
+ params.update(kwargs)
+
+ logger.debug('Calling %s:%s with %r', parent.meta.service_name,
+ operation_name, params)
+
response = getattr(parent.meta.client, operation_name)(*args, **params)
-
- logger.debug('Response: %r', response)
-
- return self._response_handler(parent, params, response)
-
-
-class BatchAction(ServiceAction):
- """
- An action which operates on a batch of items in a collection, typically
- a single page of results from the collection's underlying service
- operation call. For example, this allows you to delete up to 999
- S3 objects in a single operation rather than calling ``.delete()`` on
- each one individually.
-
- :type action_model: :py:class`~boto3.resources.model.Action`
- :param action_model: The action model.
-
- :type factory: ResourceFactory
- :param factory: The factory that created the resource class to which
- this action is attached.
-
- :type service_context: :py:class:`~boto3.utils.ServiceContext`
- :param service_context: Context about the AWS service
- """
- def __call__(self, parent, *args, **kwargs):
- """
- Perform the batch action's operation on every page of results
- from the collection.
-
- :type parent:
- :py:class:`~boto3.resources.collection.ResourceCollection`
- :param parent: The collection iterator to which this action
- is attached.
- :rtype: list(dict)
- :return: A list of low-level response dicts from each call.
- """
- service_name = None
- client = None
- responses = []
- operation_name = xform_name(self._action_model.request.operation)
-
- # Unlike the simple action above, a batch action must operate
- # on batches (or pages) of items. So we get each page, construct
- # the necessary parameters and call the batch operation.
- for page in parent.pages():
- params = {}
- for index, resource in enumerate(page):
- # There is no public interface to get a service name
- # or low-level client from a collection, so we get
- # these from the first resource in the collection.
- if service_name is None:
- service_name = resource.meta.service_name
- if client is None:
- client = resource.meta.client
-
- create_request_parameters(
- resource, self._action_model.request,
- params=params, index=index)
-
- if not params:
- # There are no items, no need to make a call.
- break
-
- params.update(kwargs)
-
- logger.debug('Calling %s:%s with %r',
- service_name, operation_name, params)
-
+
+ logger.debug('Response: %r', response)
+
+ return self._response_handler(parent, params, response)
+
+
+class BatchAction(ServiceAction):
+ """
+ An action which operates on a batch of items in a collection, typically
+ a single page of results from the collection's underlying service
+ operation call. For example, this allows you to delete up to 999
+ S3 objects in a single operation rather than calling ``.delete()`` on
+ each one individually.
+
+ :type action_model: :py:class`~boto3.resources.model.Action`
+ :param action_model: The action model.
+
+ :type factory: ResourceFactory
+ :param factory: The factory that created the resource class to which
+ this action is attached.
+
+ :type service_context: :py:class:`~boto3.utils.ServiceContext`
+ :param service_context: Context about the AWS service
+ """
+ def __call__(self, parent, *args, **kwargs):
+ """
+ Perform the batch action's operation on every page of results
+ from the collection.
+
+ :type parent:
+ :py:class:`~boto3.resources.collection.ResourceCollection`
+ :param parent: The collection iterator to which this action
+ is attached.
+ :rtype: list(dict)
+ :return: A list of low-level response dicts from each call.
+ """
+ service_name = None
+ client = None
+ responses = []
+ operation_name = xform_name(self._action_model.request.operation)
+
+ # Unlike the simple action above, a batch action must operate
+ # on batches (or pages) of items. So we get each page, construct
+ # the necessary parameters and call the batch operation.
+ for page in parent.pages():
+ params = {}
+ for index, resource in enumerate(page):
+ # There is no public interface to get a service name
+ # or low-level client from a collection, so we get
+ # these from the first resource in the collection.
+ if service_name is None:
+ service_name = resource.meta.service_name
+ if client is None:
+ client = resource.meta.client
+
+ create_request_parameters(
+ resource, self._action_model.request,
+ params=params, index=index)
+
+ if not params:
+ # There are no items, no need to make a call.
+ break
+
+ params.update(kwargs)
+
+ logger.debug('Calling %s:%s with %r',
+ service_name, operation_name, params)
+
response = getattr(client, operation_name)(*args, **params)
-
- logger.debug('Response: %r', response)
-
- responses.append(
- self._response_handler(parent, params, response))
-
- return responses
-
-
-class WaiterAction(object):
- """
- A class representing a callable waiter action on a resource, for example
- ``s3.Bucket('foo').wait_until_bucket_exists()``.
- The waiter action may construct parameters from existing resource
- identifiers.
-
- :type waiter_model: :py:class`~boto3.resources.model.Waiter`
- :param waiter_model: The action waiter.
- :type waiter_resource_name: string
- :param waiter_resource_name: The name of the waiter action for the
- resource. It usually begins with a
- ``wait_until_``
- """
- def __init__(self, waiter_model, waiter_resource_name):
- self._waiter_model = waiter_model
- self._waiter_resource_name = waiter_resource_name
-
- def __call__(self, parent, *args, **kwargs):
- """
- Perform the wait operation after building operation
- parameters.
-
- :type parent: :py:class:`~boto3.resources.base.ServiceResource`
- :param parent: The resource instance to which this action is attached.
- """
- client_waiter_name = xform_name(self._waiter_model.waiter_name)
-
- # First, build predefined params and then update with the
- # user-supplied kwargs, which allows overriding the pre-built
- # params if needed.
- params = create_request_parameters(parent, self._waiter_model)
- params.update(kwargs)
-
- logger.debug('Calling %s:%s with %r',
- parent.meta.service_name,
- self._waiter_resource_name, params)
-
- client = parent.meta.client
- waiter = client.get_waiter(client_waiter_name)
- response = waiter.wait(**params)
-
- logger.debug('Response: %r', response)
-
-
-class CustomModeledAction(object):
- """A custom, modeled action to inject into a resource."""
- def __init__(self, action_name, action_model,
- function, event_emitter):
- """
- :type action_name: str
- :param action_name: The name of the action to inject, e.g.
- 'delete_tags'
-
- :type action_model: dict
- :param action_model: A JSON definition of the action, as if it were
- part of the resource model.
-
- :type function: function
- :param function: The function to perform when the action is called.
- The first argument should be 'self', which will be the resource
- the function is to be called on.
-
- :type event_emitter: :py:class:`botocore.hooks.BaseEventHooks`
- :param event_emitter: The session event emitter.
- """
- self.name = action_name
- self.model = action_model
- self.function = function
- self.emitter = event_emitter
-
- def inject(self, class_attributes, service_context, event_name, **kwargs):
- resource_name = event_name.rsplit(".")[-1]
- action = Action(self.name, self.model, {})
- self.function.__name__ = self.name
- self.function.__doc__ = ActionDocstring(
- resource_name=resource_name,
- event_emitter=self.emitter,
- action_model=action,
- service_model=service_context.service_model,
- include_signature=False
- )
- inject_attribute(class_attributes, self.name, self.function)
+
+ logger.debug('Response: %r', response)
+
+ responses.append(
+ self._response_handler(parent, params, response))
+
+ return responses
+
+
+class WaiterAction(object):
+ """
+ A class representing a callable waiter action on a resource, for example
+ ``s3.Bucket('foo').wait_until_bucket_exists()``.
+ The waiter action may construct parameters from existing resource
+ identifiers.
+
+ :type waiter_model: :py:class`~boto3.resources.model.Waiter`
+ :param waiter_model: The action waiter.
+ :type waiter_resource_name: string
+ :param waiter_resource_name: The name of the waiter action for the
+ resource. It usually begins with a
+ ``wait_until_``
+ """
+ def __init__(self, waiter_model, waiter_resource_name):
+ self._waiter_model = waiter_model
+ self._waiter_resource_name = waiter_resource_name
+
+ def __call__(self, parent, *args, **kwargs):
+ """
+ Perform the wait operation after building operation
+ parameters.
+
+ :type parent: :py:class:`~boto3.resources.base.ServiceResource`
+ :param parent: The resource instance to which this action is attached.
+ """
+ client_waiter_name = xform_name(self._waiter_model.waiter_name)
+
+ # First, build predefined params and then update with the
+ # user-supplied kwargs, which allows overriding the pre-built
+ # params if needed.
+ params = create_request_parameters(parent, self._waiter_model)
+ params.update(kwargs)
+
+ logger.debug('Calling %s:%s with %r',
+ parent.meta.service_name,
+ self._waiter_resource_name, params)
+
+ client = parent.meta.client
+ waiter = client.get_waiter(client_waiter_name)
+ response = waiter.wait(**params)
+
+ logger.debug('Response: %r', response)
+
+
+class CustomModeledAction(object):
+ """A custom, modeled action to inject into a resource."""
+ def __init__(self, action_name, action_model,
+ function, event_emitter):
+ """
+ :type action_name: str
+ :param action_name: The name of the action to inject, e.g.
+ 'delete_tags'
+
+ :type action_model: dict
+ :param action_model: A JSON definition of the action, as if it were
+ part of the resource model.
+
+ :type function: function
+ :param function: The function to perform when the action is called.
+ The first argument should be 'self', which will be the resource
+ the function is to be called on.
+
+ :type event_emitter: :py:class:`botocore.hooks.BaseEventHooks`
+ :param event_emitter: The session event emitter.
+ """
+ self.name = action_name
+ self.model = action_model
+ self.function = function
+ self.emitter = event_emitter
+
+ def inject(self, class_attributes, service_context, event_name, **kwargs):
+ resource_name = event_name.rsplit(".")[-1]
+ action = Action(self.name, self.model, {})
+ self.function.__name__ = self.name
+ self.function.__doc__ = ActionDocstring(
+ resource_name=resource_name,
+ event_emitter=self.emitter,
+ action_model=action,
+ service_model=service_context.service_model,
+ include_signature=False
+ )
+ inject_attribute(class_attributes, self.name, self.function)
diff --git a/contrib/python/boto3/boto3/resources/base.py b/contrib/python/boto3/boto3/resources/base.py
index 80b330b812..4e60c5a701 100644
--- a/contrib/python/boto3/boto3/resources/base.py
+++ b/contrib/python/boto3/boto3/resources/base.py
@@ -1,148 +1,148 @@
-# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-
-import logging
-
-import boto3
-
-
-logger = logging.getLogger(__name__)
-
-
-class ResourceMeta(object):
- """
- An object containing metadata about a resource.
- """
- def __init__(self, service_name, identifiers=None, client=None,
- data=None, resource_model=None):
- #: (``string``) The service name, e.g. 's3'
- self.service_name = service_name
-
- if identifiers is None:
- identifiers = []
- #: (``list``) List of identifier names
- self.identifiers = identifiers
-
- #: (:py:class:`~botocore.client.BaseClient`) Low-level Botocore client
- self.client = client
- #: (``dict``) Loaded resource data attributes
- self.data = data
-
- # The resource model for that resource
- self.resource_model = resource_model
-
- def __repr__(self):
- return 'ResourceMeta(\'{0}\', identifiers={1})'.format(
- self.service_name, self.identifiers)
-
- def __eq__(self, other):
- # Two metas are equal if their components are all equal
- if other.__class__.__name__ != self.__class__.__name__:
- return False
-
- return self.__dict__ == other.__dict__
-
- def copy(self):
- """
- Create a copy of this metadata object.
- """
- params = self.__dict__.copy()
- service_name = params.pop('service_name')
- return ResourceMeta(service_name, **params)
-
-
-class ServiceResource(object):
- """
- A base class for resources.
-
- :type client: botocore.client
- :param client: A low-level Botocore client instance
- """
-
- meta = None
- """
- Stores metadata about this resource instance, such as the
- ``service_name``, the low-level ``client`` and any cached ``data``
- from when the instance was hydrated. For example::
-
- # Get a low-level client from a resource instance
- client = resource.meta.client
- response = client.operation(Param='foo')
-
- # Print the resource instance's service short name
- print(resource.meta.service_name)
-
- See :py:class:`ResourceMeta` for more information.
- """
-
- def __init__(self, *args, **kwargs):
- # Always work on a copy of meta, otherwise we would affect other
- # instances of the same subclass.
- self.meta = self.meta.copy()
-
- # Create a default client if none was passed
- if kwargs.get('client') is not None:
- self.meta.client = kwargs.get('client')
- else:
- self.meta.client = boto3.client(self.meta.service_name)
-
- # Allow setting identifiers as positional arguments in the order
- # in which they were defined in the ResourceJSON.
- for i, value in enumerate(args):
- setattr(self, '_' + self.meta.identifiers[i], value)
-
- # Allow setting identifiers via keyword arguments. Here we need
- # extra logic to ignore other keyword arguments like ``client``.
- for name, value in kwargs.items():
- if name == 'client':
- continue
-
- if name not in self.meta.identifiers:
- raise ValueError('Unknown keyword argument: {0}'.format(name))
-
- setattr(self, '_' + name, value)
-
- # Validate that all identifiers have been set.
- for identifier in self.meta.identifiers:
- if getattr(self, identifier) is None:
- raise ValueError(
- 'Required parameter {0} not set'.format(identifier))
-
- def __repr__(self):
- identifiers = []
- for identifier in self.meta.identifiers:
- identifiers.append('{0}={1}'.format(
- identifier, repr(getattr(self, identifier))))
- return "{0}({1})".format(
- self.__class__.__name__,
- ', '.join(identifiers),
- )
-
- def __eq__(self, other):
- # Should be instances of the same resource class
- if other.__class__.__name__ != self.__class__.__name__:
- return False
-
- # Each of the identifiers should have the same value in both
- # instances, e.g. two buckets need the same name to be equal.
- for identifier in self.meta.identifiers:
- if getattr(self, identifier) != getattr(other, identifier):
- return False
-
- return True
-
- def __hash__(self):
- identifiers = []
- for identifier in self.meta.identifiers:
- identifiers.append(getattr(self, identifier))
- return hash((self.__class__.__name__, tuple(identifiers)))
+# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+
+import logging
+
+import boto3
+
+
+logger = logging.getLogger(__name__)
+
+
+class ResourceMeta(object):
+ """
+ An object containing metadata about a resource.
+ """
+ def __init__(self, service_name, identifiers=None, client=None,
+ data=None, resource_model=None):
+ #: (``string``) The service name, e.g. 's3'
+ self.service_name = service_name
+
+ if identifiers is None:
+ identifiers = []
+ #: (``list``) List of identifier names
+ self.identifiers = identifiers
+
+ #: (:py:class:`~botocore.client.BaseClient`) Low-level Botocore client
+ self.client = client
+ #: (``dict``) Loaded resource data attributes
+ self.data = data
+
+ # The resource model for that resource
+ self.resource_model = resource_model
+
+ def __repr__(self):
+ return 'ResourceMeta(\'{0}\', identifiers={1})'.format(
+ self.service_name, self.identifiers)
+
+ def __eq__(self, other):
+ # Two metas are equal if their components are all equal
+ if other.__class__.__name__ != self.__class__.__name__:
+ return False
+
+ return self.__dict__ == other.__dict__
+
+ def copy(self):
+ """
+ Create a copy of this metadata object.
+ """
+ params = self.__dict__.copy()
+ service_name = params.pop('service_name')
+ return ResourceMeta(service_name, **params)
+
+
+class ServiceResource(object):
+ """
+ A base class for resources.
+
+ :type client: botocore.client
+ :param client: A low-level Botocore client instance
+ """
+
+ meta = None
+ """
+ Stores metadata about this resource instance, such as the
+ ``service_name``, the low-level ``client`` and any cached ``data``
+ from when the instance was hydrated. For example::
+
+ # Get a low-level client from a resource instance
+ client = resource.meta.client
+ response = client.operation(Param='foo')
+
+ # Print the resource instance's service short name
+ print(resource.meta.service_name)
+
+ See :py:class:`ResourceMeta` for more information.
+ """
+
+ def __init__(self, *args, **kwargs):
+ # Always work on a copy of meta, otherwise we would affect other
+ # instances of the same subclass.
+ self.meta = self.meta.copy()
+
+ # Create a default client if none was passed
+ if kwargs.get('client') is not None:
+ self.meta.client = kwargs.get('client')
+ else:
+ self.meta.client = boto3.client(self.meta.service_name)
+
+ # Allow setting identifiers as positional arguments in the order
+ # in which they were defined in the ResourceJSON.
+ for i, value in enumerate(args):
+ setattr(self, '_' + self.meta.identifiers[i], value)
+
+ # Allow setting identifiers via keyword arguments. Here we need
+ # extra logic to ignore other keyword arguments like ``client``.
+ for name, value in kwargs.items():
+ if name == 'client':
+ continue
+
+ if name not in self.meta.identifiers:
+ raise ValueError('Unknown keyword argument: {0}'.format(name))
+
+ setattr(self, '_' + name, value)
+
+ # Validate that all identifiers have been set.
+ for identifier in self.meta.identifiers:
+ if getattr(self, identifier) is None:
+ raise ValueError(
+ 'Required parameter {0} not set'.format(identifier))
+
+ def __repr__(self):
+ identifiers = []
+ for identifier in self.meta.identifiers:
+ identifiers.append('{0}={1}'.format(
+ identifier, repr(getattr(self, identifier))))
+ return "{0}({1})".format(
+ self.__class__.__name__,
+ ', '.join(identifiers),
+ )
+
+ def __eq__(self, other):
+ # Should be instances of the same resource class
+ if other.__class__.__name__ != self.__class__.__name__:
+ return False
+
+ # Each of the identifiers should have the same value in both
+ # instances, e.g. two buckets need the same name to be equal.
+ for identifier in self.meta.identifiers:
+ if getattr(self, identifier) != getattr(other, identifier):
+ return False
+
+ return True
+
+ def __hash__(self):
+ identifiers = []
+ for identifier in self.meta.identifiers:
+ identifiers.append(getattr(self, identifier))
+ return hash((self.__class__.__name__, tuple(identifiers)))
diff --git a/contrib/python/boto3/boto3/resources/collection.py b/contrib/python/boto3/boto3/resources/collection.py
index 087aaeb033..083f9cdda4 100644
--- a/contrib/python/boto3/boto3/resources/collection.py
+++ b/contrib/python/boto3/boto3/resources/collection.py
@@ -1,526 +1,526 @@
-# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-
-import copy
-import logging
-
-from botocore import xform_name
-from botocore.utils import merge_dicts
-
-from .action import BatchAction
-from .params import create_request_parameters
-from .response import ResourceHandler
-from ..docs import docstring
-
-
-logger = logging.getLogger(__name__)
-
-
-class ResourceCollection(object):
- """
- Represents a collection of resources, which can be iterated through,
- optionally with filtering. Collections automatically handle pagination
- for you.
-
- See :ref:`guide_collections` for a high-level overview of collections,
- including when remote service requests are performed.
-
- :type model: :py:class:`~boto3.resources.model.Collection`
- :param model: Collection model
- :type parent: :py:class:`~boto3.resources.base.ServiceResource`
- :param parent: The collection's parent resource
- :type handler: :py:class:`~boto3.resources.response.ResourceHandler`
- :param handler: The resource response handler used to create resource
- instances
- """
- def __init__(self, model, parent, handler, **kwargs):
- self._model = model
- self._parent = parent
- self._py_operation_name = xform_name(
- model.request.operation)
- self._handler = handler
+# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+
+import copy
+import logging
+
+from botocore import xform_name
+from botocore.utils import merge_dicts
+
+from .action import BatchAction
+from .params import create_request_parameters
+from .response import ResourceHandler
+from ..docs import docstring
+
+
+logger = logging.getLogger(__name__)
+
+
+class ResourceCollection(object):
+ """
+ Represents a collection of resources, which can be iterated through,
+ optionally with filtering. Collections automatically handle pagination
+ for you.
+
+ See :ref:`guide_collections` for a high-level overview of collections,
+ including when remote service requests are performed.
+
+ :type model: :py:class:`~boto3.resources.model.Collection`
+ :param model: Collection model
+ :type parent: :py:class:`~boto3.resources.base.ServiceResource`
+ :param parent: The collection's parent resource
+ :type handler: :py:class:`~boto3.resources.response.ResourceHandler`
+ :param handler: The resource response handler used to create resource
+ instances
+ """
+ def __init__(self, model, parent, handler, **kwargs):
+ self._model = model
+ self._parent = parent
+ self._py_operation_name = xform_name(
+ model.request.operation)
+ self._handler = handler
self._params = copy.deepcopy(kwargs)
-
- def __repr__(self):
- return '{0}({1}, {2})'.format(
- self.__class__.__name__,
- self._parent,
- '{0}.{1}'.format(
- self._parent.meta.service_name,
- self._model.resource.type
- )
- )
-
- def __iter__(self):
- """
- A generator which yields resource instances after doing the
- appropriate service operation calls and handling any pagination
- on your behalf.
-
- Page size, item limit, and filter parameters are applied
- if they have previously been set.
-
- >>> bucket = s3.Bucket('boto3')
- >>> for obj in bucket.objects.all():
- ... print(obj.key)
- 'key1'
- 'key2'
-
- """
- limit = self._params.get('limit', None)
-
- count = 0
- for page in self.pages():
- for item in page:
- yield item
-
- # If the limit is set and has been reached, then
- # we stop processing items here.
- count += 1
- if limit is not None and count >= limit:
- return
-
- def _clone(self, **kwargs):
- """
- Create a clone of this collection. This is used by the methods
- below to provide a chainable interface that returns copies
- rather than the original. This allows things like:
-
- >>> base = collection.filter(Param1=1)
- >>> query1 = base.filter(Param2=2)
- >>> query2 = base.filter(Param3=3)
- >>> query1.params
- {'Param1': 1, 'Param2': 2}
- >>> query2.params
- {'Param1': 1, 'Param3': 3}
-
- :rtype: :py:class:`ResourceCollection`
- :return: A clone of this resource collection
- """
- params = copy.deepcopy(self._params)
- merge_dicts(params, kwargs, append_lists=True)
- clone = self.__class__(self._model, self._parent,
- self._handler, **params)
- return clone
-
- def pages(self):
- """
- A generator which yields pages of resource instances after
- doing the appropriate service operation calls and handling
- any pagination on your behalf. Non-paginated calls will
- return a single page of items.
-
- Page size, item limit, and filter parameters are applied
- if they have previously been set.
-
- >>> bucket = s3.Bucket('boto3')
- >>> for page in bucket.objects.pages():
- ... for obj in page:
- ... print(obj.key)
- 'key1'
- 'key2'
-
- :rtype: list(:py:class:`~boto3.resources.base.ServiceResource`)
- :return: List of resource instances
- """
- client = self._parent.meta.client
- cleaned_params = self._params.copy()
- limit = cleaned_params.pop('limit', None)
- page_size = cleaned_params.pop('page_size', None)
- params = create_request_parameters(
- self._parent, self._model.request)
- merge_dicts(params, cleaned_params, append_lists=True)
-
- # Is this a paginated operation? If so, we need to get an
- # iterator for the various pages. If not, then we simply
- # call the operation and return the result as a single
- # page in a list. For non-paginated results, we just ignore
- # the page size parameter.
- if client.can_paginate(self._py_operation_name):
- logger.debug('Calling paginated %s:%s with %r',
- self._parent.meta.service_name,
- self._py_operation_name, params)
- paginator = client.get_paginator(self._py_operation_name)
- pages = paginator.paginate(
- PaginationConfig={
- 'MaxItems': limit, 'PageSize': page_size}, **params)
- else:
- logger.debug('Calling %s:%s with %r',
- self._parent.meta.service_name,
- self._py_operation_name, params)
- pages = [getattr(client, self._py_operation_name)(**params)]
-
- # Now that we have a page iterator or single page of results
- # we start processing and yielding individual items.
- count = 0
- for page in pages:
- page_items = []
- for item in self._handler(self._parent, params, page):
- page_items.append(item)
-
- # If the limit is set and has been reached, then
- # we stop processing items here.
- count += 1
- if limit is not None and count >= limit:
- break
-
- yield page_items
-
- # Stop reading pages if we've reached out limit
- if limit is not None and count >= limit:
- break
-
- def all(self):
- """
- Get all items from the collection, optionally with a custom
- page size and item count limit.
-
- This method returns an iterable generator which yields
- individual resource instances. Example use::
-
- # Iterate through items
- >>> for queue in sqs.queues.all():
- ... print(queue.url)
- 'https://url1'
- 'https://url2'
-
- # Convert to list
- >>> queues = list(sqs.queues.all())
- >>> len(queues)
- 2
- """
- return self._clone()
-
- def filter(self, **kwargs):
- """
- Get items from the collection, passing keyword arguments along
- as parameters to the underlying service operation, which are
- typically used to filter the results.
-
- This method returns an iterable generator which yields
- individual resource instances. Example use::
-
- # Iterate through items
- >>> for queue in sqs.queues.filter(Param='foo'):
- ... print(queue.url)
- 'https://url1'
- 'https://url2'
-
- # Convert to list
- >>> queues = list(sqs.queues.filter(Param='foo'))
- >>> len(queues)
- 2
-
- :rtype: :py:class:`ResourceCollection`
- """
- return self._clone(**kwargs)
-
- def limit(self, count):
- """
- Return at most this many resources.
-
- >>> for bucket in s3.buckets.limit(5):
- ... print(bucket.name)
- 'bucket1'
- 'bucket2'
- 'bucket3'
- 'bucket4'
- 'bucket5'
-
- :type count: int
- :param count: Return no more than this many items
- :rtype: :py:class:`ResourceCollection`
- """
- return self._clone(limit=count)
-
- def page_size(self, count):
- """
- Fetch at most this many resources per service request.
-
- >>> for obj in s3.Bucket('boto3').objects.page_size(100):
- ... print(obj.key)
-
- :type count: int
- :param count: Fetch this many items per request
- :rtype: :py:class:`ResourceCollection`
- """
- return self._clone(page_size=count)
-
-
-class CollectionManager(object):
- """
- A collection manager provides access to resource collection instances,
- which can be iterated and filtered. The manager exposes some
- convenience functions that are also found on resource collections,
- such as :py:meth:`~ResourceCollection.all` and
- :py:meth:`~ResourceCollection.filter`.
-
- Get all items::
-
- >>> for bucket in s3.buckets.all():
- ... print(bucket.name)
-
- Get only some items via filtering::
-
- >>> for queue in sqs.queues.filter(QueueNamePrefix='AWS'):
- ... print(queue.url)
-
- Get whole pages of items:
-
- >>> for page in s3.Bucket('boto3').objects.pages():
- ... for obj in page:
- ... print(obj.key)
-
- A collection manager is not iterable. You **must** call one of the
- methods that return a :py:class:`ResourceCollection` before trying
- to iterate, slice, or convert to a list.
-
- See the :ref:`guide_collections` guide for a high-level overview
- of collections, including when remote service requests are performed.
-
- :type collection_model: :py:class:`~boto3.resources.model.Collection`
- :param model: Collection model
-
- :type parent: :py:class:`~boto3.resources.base.ServiceResource`
- :param parent: The collection's parent resource
-
- :type factory: :py:class:`~boto3.resources.factory.ResourceFactory`
- :param factory: The resource factory to create new resources
-
- :type service_context: :py:class:`~boto3.utils.ServiceContext`
- :param service_context: Context about the AWS service
- """
- # The class to use when creating an iterator
- _collection_cls = ResourceCollection
-
- def __init__(self, collection_model, parent, factory, service_context):
- self._model = collection_model
- operation_name = self._model.request.operation
- self._parent = parent
-
- search_path = collection_model.resource.path
- self._handler = ResourceHandler(
- search_path=search_path, factory=factory,
- resource_model=collection_model.resource,
- service_context=service_context,
- operation_name=operation_name
- )
-
- def __repr__(self):
- return '{0}({1}, {2})'.format(
- self.__class__.__name__,
- self._parent,
- '{0}.{1}'.format(
- self._parent.meta.service_name,
- self._model.resource.type
- )
- )
-
- def iterator(self, **kwargs):
- """
- Get a resource collection iterator from this manager.
-
- :rtype: :py:class:`ResourceCollection`
- :return: An iterable representing the collection of resources
- """
- return self._collection_cls(self._model, self._parent,
- self._handler, **kwargs)
-
- # Set up some methods to proxy ResourceCollection methods
- def all(self):
- return self.iterator()
- all.__doc__ = ResourceCollection.all.__doc__
-
- def filter(self, **kwargs):
- return self.iterator(**kwargs)
- filter.__doc__ = ResourceCollection.filter.__doc__
-
- def limit(self, count):
- return self.iterator(limit=count)
- limit.__doc__ = ResourceCollection.limit.__doc__
-
- def page_size(self, count):
- return self.iterator(page_size=count)
- page_size.__doc__ = ResourceCollection.page_size.__doc__
-
- def pages(self):
- return self.iterator().pages()
- pages.__doc__ = ResourceCollection.pages.__doc__
-
-
-class CollectionFactory(object):
- """
- A factory to create new
- :py:class:`CollectionManager` and :py:class:`ResourceCollection`
- subclasses from a :py:class:`~boto3.resources.model.Collection`
- model. These subclasses include methods to perform batch operations.
- """
- def load_from_definition(self, resource_name, collection_model,
- service_context, event_emitter):
- """
- Loads a collection from a model, creating a new
- :py:class:`CollectionManager` subclass
- with the correct properties and methods, named based on the service
- and resource name, e.g. ec2.InstanceCollectionManager. It also
- creates a new :py:class:`ResourceCollection` subclass which is used
- by the new manager class.
-
- :type resource_name: string
- :param resource_name: Name of the resource to look up. For services,
- this should match the ``service_name``.
-
- :type service_context: :py:class:`~boto3.utils.ServiceContext`
- :param service_context: Context about the AWS service
-
- :type event_emitter: :py:class:`~botocore.hooks.HierarchialEmitter`
- :param event_emitter: An event emitter
-
- :rtype: Subclass of :py:class:`CollectionManager`
- :return: The collection class.
- """
- attrs = {}
- collection_name = collection_model.name
-
- # Create the batch actions for a collection
- self._load_batch_actions(
- attrs, resource_name, collection_model,
- service_context.service_model, event_emitter)
- # Add the documentation to the collection class's methods
- self._load_documented_collection_methods(
- attrs=attrs, resource_name=resource_name,
- collection_model=collection_model,
- service_model=service_context.service_model,
- event_emitter=event_emitter,
- base_class=ResourceCollection)
-
- if service_context.service_name == resource_name:
- cls_name = '{0}.{1}Collection'.format(
- service_context.service_name, collection_name)
- else:
- cls_name = '{0}.{1}.{2}Collection'.format(
- service_context.service_name, resource_name, collection_name)
-
- collection_cls = type(str(cls_name), (ResourceCollection,),
- attrs)
-
- # Add the documentation to the collection manager's methods
- self._load_documented_collection_methods(
- attrs=attrs, resource_name=resource_name,
- collection_model=collection_model,
- service_model=service_context.service_model,
- event_emitter=event_emitter,
- base_class=CollectionManager)
- attrs['_collection_cls'] = collection_cls
- cls_name += 'Manager'
-
- return type(str(cls_name), (CollectionManager,), attrs)
-
- def _load_batch_actions(self, attrs, resource_name, collection_model,
- service_model, event_emitter):
- """
- Batch actions on the collection become methods on both
- the collection manager and iterators.
- """
- for action_model in collection_model.batch_actions:
- snake_cased = xform_name(action_model.name)
- attrs[snake_cased] = self._create_batch_action(
- resource_name, snake_cased, action_model, collection_model,
- service_model, event_emitter)
-
- def _load_documented_collection_methods(
- factory_self, attrs, resource_name, collection_model,
- service_model, event_emitter, base_class):
- # The base class already has these methods defined. However
- # the docstrings are generic and not based for a particular service
- # or resource. So we override these methods by proxying to the
- # base class's builtin method and adding a docstring
- # that pertains to the resource.
-
- # A collection's all() method.
- def all(self):
- return base_class.all(self)
-
- all.__doc__ = docstring.CollectionMethodDocstring(
- resource_name=resource_name,
- action_name='all',
- event_emitter=event_emitter,
- collection_model=collection_model,
- service_model=service_model,
- include_signature=False
- )
- attrs['all'] = all
-
- # The collection's filter() method.
- def filter(self, **kwargs):
- return base_class.filter(self, **kwargs)
-
- filter.__doc__ = docstring.CollectionMethodDocstring(
- resource_name=resource_name,
- action_name='filter',
- event_emitter=event_emitter,
- collection_model=collection_model,
- service_model=service_model,
- include_signature=False
- )
- attrs['filter'] = filter
-
- # The collection's limit method.
- def limit(self, count):
- return base_class.limit(self, count)
-
- limit.__doc__ = docstring.CollectionMethodDocstring(
- resource_name=resource_name,
- action_name='limit',
- event_emitter=event_emitter,
- collection_model=collection_model,
- service_model=service_model,
- include_signature=False
- )
- attrs['limit'] = limit
-
- # The collection's page_size method.
- def page_size(self, count):
- return base_class.page_size(self, count)
-
- page_size.__doc__ = docstring.CollectionMethodDocstring(
- resource_name=resource_name,
- action_name='page_size',
- event_emitter=event_emitter,
- collection_model=collection_model,
- service_model=service_model,
- include_signature=False
- )
- attrs['page_size'] = page_size
-
- def _create_batch_action(factory_self, resource_name, snake_cased,
- action_model, collection_model, service_model,
- event_emitter):
- """
- Creates a new method which makes a batch operation request
- to the underlying service API.
- """
- action = BatchAction(action_model)
-
- def batch_action(self, *args, **kwargs):
- return action(self, *args, **kwargs)
-
- batch_action.__name__ = str(snake_cased)
- batch_action.__doc__ = docstring.BatchActionDocstring(
- resource_name=resource_name,
- event_emitter=event_emitter,
- batch_action_model=action_model,
- service_model=service_model,
- collection_model=collection_model,
- include_signature=False
- )
- return batch_action
+
+ def __repr__(self):
+ return '{0}({1}, {2})'.format(
+ self.__class__.__name__,
+ self._parent,
+ '{0}.{1}'.format(
+ self._parent.meta.service_name,
+ self._model.resource.type
+ )
+ )
+
+ def __iter__(self):
+ """
+ A generator which yields resource instances after doing the
+ appropriate service operation calls and handling any pagination
+ on your behalf.
+
+ Page size, item limit, and filter parameters are applied
+ if they have previously been set.
+
+ >>> bucket = s3.Bucket('boto3')
+ >>> for obj in bucket.objects.all():
+ ... print(obj.key)
+ 'key1'
+ 'key2'
+
+ """
+ limit = self._params.get('limit', None)
+
+ count = 0
+ for page in self.pages():
+ for item in page:
+ yield item
+
+ # If the limit is set and has been reached, then
+ # we stop processing items here.
+ count += 1
+ if limit is not None and count >= limit:
+ return
+
+ def _clone(self, **kwargs):
+ """
+ Create a clone of this collection. This is used by the methods
+ below to provide a chainable interface that returns copies
+ rather than the original. This allows things like:
+
+ >>> base = collection.filter(Param1=1)
+ >>> query1 = base.filter(Param2=2)
+ >>> query2 = base.filter(Param3=3)
+ >>> query1.params
+ {'Param1': 1, 'Param2': 2}
+ >>> query2.params
+ {'Param1': 1, 'Param3': 3}
+
+ :rtype: :py:class:`ResourceCollection`
+ :return: A clone of this resource collection
+ """
+ params = copy.deepcopy(self._params)
+ merge_dicts(params, kwargs, append_lists=True)
+ clone = self.__class__(self._model, self._parent,
+ self._handler, **params)
+ return clone
+
+ def pages(self):
+ """
+ A generator which yields pages of resource instances after
+ doing the appropriate service operation calls and handling
+ any pagination on your behalf. Non-paginated calls will
+ return a single page of items.
+
+ Page size, item limit, and filter parameters are applied
+ if they have previously been set.
+
+ >>> bucket = s3.Bucket('boto3')
+ >>> for page in bucket.objects.pages():
+ ... for obj in page:
+ ... print(obj.key)
+ 'key1'
+ 'key2'
+
+ :rtype: list(:py:class:`~boto3.resources.base.ServiceResource`)
+ :return: List of resource instances
+ """
+ client = self._parent.meta.client
+ cleaned_params = self._params.copy()
+ limit = cleaned_params.pop('limit', None)
+ page_size = cleaned_params.pop('page_size', None)
+ params = create_request_parameters(
+ self._parent, self._model.request)
+ merge_dicts(params, cleaned_params, append_lists=True)
+
+ # Is this a paginated operation? If so, we need to get an
+ # iterator for the various pages. If not, then we simply
+ # call the operation and return the result as a single
+ # page in a list. For non-paginated results, we just ignore
+ # the page size parameter.
+ if client.can_paginate(self._py_operation_name):
+ logger.debug('Calling paginated %s:%s with %r',
+ self._parent.meta.service_name,
+ self._py_operation_name, params)
+ paginator = client.get_paginator(self._py_operation_name)
+ pages = paginator.paginate(
+ PaginationConfig={
+ 'MaxItems': limit, 'PageSize': page_size}, **params)
+ else:
+ logger.debug('Calling %s:%s with %r',
+ self._parent.meta.service_name,
+ self._py_operation_name, params)
+ pages = [getattr(client, self._py_operation_name)(**params)]
+
+ # Now that we have a page iterator or single page of results
+ # we start processing and yielding individual items.
+ count = 0
+ for page in pages:
+ page_items = []
+ for item in self._handler(self._parent, params, page):
+ page_items.append(item)
+
+ # If the limit is set and has been reached, then
+ # we stop processing items here.
+ count += 1
+ if limit is not None and count >= limit:
+ break
+
+ yield page_items
+
+ # Stop reading pages if we've reached out limit
+ if limit is not None and count >= limit:
+ break
+
+ def all(self):
+ """
+ Get all items from the collection, optionally with a custom
+ page size and item count limit.
+
+ This method returns an iterable generator which yields
+ individual resource instances. Example use::
+
+ # Iterate through items
+ >>> for queue in sqs.queues.all():
+ ... print(queue.url)
+ 'https://url1'
+ 'https://url2'
+
+ # Convert to list
+ >>> queues = list(sqs.queues.all())
+ >>> len(queues)
+ 2
+ """
+ return self._clone()
+
+ def filter(self, **kwargs):
+ """
+ Get items from the collection, passing keyword arguments along
+ as parameters to the underlying service operation, which are
+ typically used to filter the results.
+
+ This method returns an iterable generator which yields
+ individual resource instances. Example use::
+
+ # Iterate through items
+ >>> for queue in sqs.queues.filter(Param='foo'):
+ ... print(queue.url)
+ 'https://url1'
+ 'https://url2'
+
+ # Convert to list
+ >>> queues = list(sqs.queues.filter(Param='foo'))
+ >>> len(queues)
+ 2
+
+ :rtype: :py:class:`ResourceCollection`
+ """
+ return self._clone(**kwargs)
+
+ def limit(self, count):
+ """
+ Return at most this many resources.
+
+ >>> for bucket in s3.buckets.limit(5):
+ ... print(bucket.name)
+ 'bucket1'
+ 'bucket2'
+ 'bucket3'
+ 'bucket4'
+ 'bucket5'
+
+ :type count: int
+ :param count: Return no more than this many items
+ :rtype: :py:class:`ResourceCollection`
+ """
+ return self._clone(limit=count)
+
+ def page_size(self, count):
+ """
+ Fetch at most this many resources per service request.
+
+ >>> for obj in s3.Bucket('boto3').objects.page_size(100):
+ ... print(obj.key)
+
+ :type count: int
+ :param count: Fetch this many items per request
+ :rtype: :py:class:`ResourceCollection`
+ """
+ return self._clone(page_size=count)
+
+
+class CollectionManager(object):
+ """
+ A collection manager provides access to resource collection instances,
+ which can be iterated and filtered. The manager exposes some
+ convenience functions that are also found on resource collections,
+ such as :py:meth:`~ResourceCollection.all` and
+ :py:meth:`~ResourceCollection.filter`.
+
+ Get all items::
+
+ >>> for bucket in s3.buckets.all():
+ ... print(bucket.name)
+
+ Get only some items via filtering::
+
+ >>> for queue in sqs.queues.filter(QueueNamePrefix='AWS'):
+ ... print(queue.url)
+
+ Get whole pages of items:
+
+ >>> for page in s3.Bucket('boto3').objects.pages():
+ ... for obj in page:
+ ... print(obj.key)
+
+ A collection manager is not iterable. You **must** call one of the
+ methods that return a :py:class:`ResourceCollection` before trying
+ to iterate, slice, or convert to a list.
+
+ See the :ref:`guide_collections` guide for a high-level overview
+ of collections, including when remote service requests are performed.
+
+ :type collection_model: :py:class:`~boto3.resources.model.Collection`
+ :param model: Collection model
+
+ :type parent: :py:class:`~boto3.resources.base.ServiceResource`
+ :param parent: The collection's parent resource
+
+ :type factory: :py:class:`~boto3.resources.factory.ResourceFactory`
+ :param factory: The resource factory to create new resources
+
+ :type service_context: :py:class:`~boto3.utils.ServiceContext`
+ :param service_context: Context about the AWS service
+ """
+ # The class to use when creating an iterator
+ _collection_cls = ResourceCollection
+
+ def __init__(self, collection_model, parent, factory, service_context):
+ self._model = collection_model
+ operation_name = self._model.request.operation
+ self._parent = parent
+
+ search_path = collection_model.resource.path
+ self._handler = ResourceHandler(
+ search_path=search_path, factory=factory,
+ resource_model=collection_model.resource,
+ service_context=service_context,
+ operation_name=operation_name
+ )
+
+ def __repr__(self):
+ return '{0}({1}, {2})'.format(
+ self.__class__.__name__,
+ self._parent,
+ '{0}.{1}'.format(
+ self._parent.meta.service_name,
+ self._model.resource.type
+ )
+ )
+
+ def iterator(self, **kwargs):
+ """
+ Get a resource collection iterator from this manager.
+
+ :rtype: :py:class:`ResourceCollection`
+ :return: An iterable representing the collection of resources
+ """
+ return self._collection_cls(self._model, self._parent,
+ self._handler, **kwargs)
+
+ # Set up some methods to proxy ResourceCollection methods
+ def all(self):
+ return self.iterator()
+ all.__doc__ = ResourceCollection.all.__doc__
+
+ def filter(self, **kwargs):
+ return self.iterator(**kwargs)
+ filter.__doc__ = ResourceCollection.filter.__doc__
+
+ def limit(self, count):
+ return self.iterator(limit=count)
+ limit.__doc__ = ResourceCollection.limit.__doc__
+
+ def page_size(self, count):
+ return self.iterator(page_size=count)
+ page_size.__doc__ = ResourceCollection.page_size.__doc__
+
+ def pages(self):
+ return self.iterator().pages()
+ pages.__doc__ = ResourceCollection.pages.__doc__
+
+
+class CollectionFactory(object):
+ """
+ A factory to create new
+ :py:class:`CollectionManager` and :py:class:`ResourceCollection`
+ subclasses from a :py:class:`~boto3.resources.model.Collection`
+ model. These subclasses include methods to perform batch operations.
+ """
+ def load_from_definition(self, resource_name, collection_model,
+ service_context, event_emitter):
+ """
+ Loads a collection from a model, creating a new
+ :py:class:`CollectionManager` subclass
+ with the correct properties and methods, named based on the service
+ and resource name, e.g. ec2.InstanceCollectionManager. It also
+ creates a new :py:class:`ResourceCollection` subclass which is used
+ by the new manager class.
+
+ :type resource_name: string
+ :param resource_name: Name of the resource to look up. For services,
+ this should match the ``service_name``.
+
+ :type service_context: :py:class:`~boto3.utils.ServiceContext`
+ :param service_context: Context about the AWS service
+
+ :type event_emitter: :py:class:`~botocore.hooks.HierarchialEmitter`
+ :param event_emitter: An event emitter
+
+ :rtype: Subclass of :py:class:`CollectionManager`
+ :return: The collection class.
+ """
+ attrs = {}
+ collection_name = collection_model.name
+
+ # Create the batch actions for a collection
+ self._load_batch_actions(
+ attrs, resource_name, collection_model,
+ service_context.service_model, event_emitter)
+ # Add the documentation to the collection class's methods
+ self._load_documented_collection_methods(
+ attrs=attrs, resource_name=resource_name,
+ collection_model=collection_model,
+ service_model=service_context.service_model,
+ event_emitter=event_emitter,
+ base_class=ResourceCollection)
+
+ if service_context.service_name == resource_name:
+ cls_name = '{0}.{1}Collection'.format(
+ service_context.service_name, collection_name)
+ else:
+ cls_name = '{0}.{1}.{2}Collection'.format(
+ service_context.service_name, resource_name, collection_name)
+
+ collection_cls = type(str(cls_name), (ResourceCollection,),
+ attrs)
+
+ # Add the documentation to the collection manager's methods
+ self._load_documented_collection_methods(
+ attrs=attrs, resource_name=resource_name,
+ collection_model=collection_model,
+ service_model=service_context.service_model,
+ event_emitter=event_emitter,
+ base_class=CollectionManager)
+ attrs['_collection_cls'] = collection_cls
+ cls_name += 'Manager'
+
+ return type(str(cls_name), (CollectionManager,), attrs)
+
+ def _load_batch_actions(self, attrs, resource_name, collection_model,
+ service_model, event_emitter):
+ """
+ Batch actions on the collection become methods on both
+ the collection manager and iterators.
+ """
+ for action_model in collection_model.batch_actions:
+ snake_cased = xform_name(action_model.name)
+ attrs[snake_cased] = self._create_batch_action(
+ resource_name, snake_cased, action_model, collection_model,
+ service_model, event_emitter)
+
+ def _load_documented_collection_methods(
+ factory_self, attrs, resource_name, collection_model,
+ service_model, event_emitter, base_class):
+ # The base class already has these methods defined. However
+ # the docstrings are generic and not based for a particular service
+ # or resource. So we override these methods by proxying to the
+ # base class's builtin method and adding a docstring
+ # that pertains to the resource.
+
+ # A collection's all() method.
+ def all(self):
+ return base_class.all(self)
+
+ all.__doc__ = docstring.CollectionMethodDocstring(
+ resource_name=resource_name,
+ action_name='all',
+ event_emitter=event_emitter,
+ collection_model=collection_model,
+ service_model=service_model,
+ include_signature=False
+ )
+ attrs['all'] = all
+
+ # The collection's filter() method.
+ def filter(self, **kwargs):
+ return base_class.filter(self, **kwargs)
+
+ filter.__doc__ = docstring.CollectionMethodDocstring(
+ resource_name=resource_name,
+ action_name='filter',
+ event_emitter=event_emitter,
+ collection_model=collection_model,
+ service_model=service_model,
+ include_signature=False
+ )
+ attrs['filter'] = filter
+
+ # The collection's limit method.
+ def limit(self, count):
+ return base_class.limit(self, count)
+
+ limit.__doc__ = docstring.CollectionMethodDocstring(
+ resource_name=resource_name,
+ action_name='limit',
+ event_emitter=event_emitter,
+ collection_model=collection_model,
+ service_model=service_model,
+ include_signature=False
+ )
+ attrs['limit'] = limit
+
+ # The collection's page_size method.
+ def page_size(self, count):
+ return base_class.page_size(self, count)
+
+ page_size.__doc__ = docstring.CollectionMethodDocstring(
+ resource_name=resource_name,
+ action_name='page_size',
+ event_emitter=event_emitter,
+ collection_model=collection_model,
+ service_model=service_model,
+ include_signature=False
+ )
+ attrs['page_size'] = page_size
+
+ def _create_batch_action(factory_self, resource_name, snake_cased,
+ action_model, collection_model, service_model,
+ event_emitter):
+ """
+ Creates a new method which makes a batch operation request
+ to the underlying service API.
+ """
+ action = BatchAction(action_model)
+
+ def batch_action(self, *args, **kwargs):
+ return action(self, *args, **kwargs)
+
+ batch_action.__name__ = str(snake_cased)
+ batch_action.__doc__ = docstring.BatchActionDocstring(
+ resource_name=resource_name,
+ event_emitter=event_emitter,
+ batch_action_model=action_model,
+ service_model=service_model,
+ collection_model=collection_model,
+ include_signature=False
+ )
+ return batch_action
diff --git a/contrib/python/boto3/boto3/resources/factory.py b/contrib/python/boto3/boto3/resources/factory.py
index f1147c5f4c..ccd55bfeff 100644
--- a/contrib/python/boto3/boto3/resources/factory.py
+++ b/contrib/python/boto3/boto3/resources/factory.py
@@ -1,539 +1,539 @@
-# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-
-import logging
-from functools import partial
-
-from .action import ServiceAction
-from .action import WaiterAction
-from .base import ResourceMeta, ServiceResource
-from .collection import CollectionFactory
-from .model import ResourceModel
-from .response import build_identifiers, ResourceHandler
-from ..exceptions import ResourceLoadException
-from ..docs import docstring
-
-
-logger = logging.getLogger(__name__)
-
-
-class ResourceFactory(object):
- """
- A factory to create new :py:class:`~boto3.resources.base.ServiceResource`
- classes from a :py:class:`~boto3.resources.model.ResourceModel`. There are
- two types of lookups that can be done: one on the service itself (e.g. an
- SQS resource) and another on models contained within the service (e.g. an
- SQS Queue resource).
- """
- def __init__(self, emitter):
- self._collection_factory = CollectionFactory()
- self._emitter = emitter
-
- def load_from_definition(self, resource_name,
- single_resource_json_definition, service_context):
- """
- Loads a resource from a model, creating a new
- :py:class:`~boto3.resources.base.ServiceResource` subclass
- with the correct properties and methods, named based on the service
- and resource name, e.g. EC2.Instance.
-
- :type resource_name: string
- :param resource_name: Name of the resource to look up. For services,
- this should match the ``service_name``.
-
- :type single_resource_json_definition: dict
- :param single_resource_json_definition:
- The loaded json of a single service resource or resource
- definition.
-
- :type service_context: :py:class:`~boto3.utils.ServiceContext`
- :param service_context: Context about the AWS service
-
- :rtype: Subclass of :py:class:`~boto3.resources.base.ServiceResource`
- :return: The service or resource class.
- """
- logger.debug('Loading %s:%s', service_context.service_name,
- resource_name)
-
- # Using the loaded JSON create a ResourceModel object.
- resource_model = ResourceModel(
- resource_name, single_resource_json_definition,
- service_context.resource_json_definitions
- )
-
- # Do some renaming of the shape if there was a naming collision
- # that needed to be accounted for.
- shape = None
- if resource_model.shape:
- shape = service_context.service_model.shape_for(
- resource_model.shape)
- resource_model.load_rename_map(shape)
-
- # Set some basic info
- meta = ResourceMeta(
- service_context.service_name, resource_model=resource_model)
- attrs = {
- 'meta': meta,
- }
-
- # Create and load all of attributes of the resource class based
- # on the models.
-
- # Identifiers
- self._load_identifiers(
- attrs=attrs, meta=meta, resource_name=resource_name,
- resource_model=resource_model
- )
-
- # Load/Reload actions
- self._load_actions(
- attrs=attrs, resource_name=resource_name,
- resource_model=resource_model, service_context=service_context
- )
-
- # Attributes that get auto-loaded
- self._load_attributes(
- attrs=attrs, meta=meta, resource_name=resource_name,
- resource_model=resource_model,
- service_context=service_context)
-
- # Collections and their corresponding methods
- self._load_collections(
- attrs=attrs, resource_model=resource_model,
- service_context=service_context)
-
- # References and Subresources
- self._load_has_relations(
- attrs=attrs, resource_name=resource_name,
- resource_model=resource_model, service_context=service_context
- )
-
- # Waiter resource actions
- self._load_waiters(
- attrs=attrs, resource_name=resource_name,
- resource_model=resource_model, service_context=service_context
- )
-
- # Create the name based on the requested service and resource
- cls_name = resource_name
- if service_context.service_name == resource_name:
- cls_name = 'ServiceResource'
- cls_name = service_context.service_name + '.' + cls_name
-
- base_classes = [ServiceResource]
- if self._emitter is not None:
- self._emitter.emit(
- 'creating-resource-class.%s' % cls_name,
- class_attributes=attrs, base_classes=base_classes,
- service_context=service_context)
- return type(str(cls_name), tuple(base_classes), attrs)
-
- def _load_identifiers(self, attrs, meta, resource_model, resource_name):
- """
- Populate required identifiers. These are arguments without which
- the resource cannot be used. Identifiers become arguments for
- operations on the resource.
- """
- for identifier in resource_model.identifiers:
- meta.identifiers.append(identifier.name)
- attrs[identifier.name] = self._create_identifier(
- identifier, resource_name)
-
- def _load_actions(self, attrs, resource_name, resource_model,
- service_context):
- """
- Actions on the resource become methods, with the ``load`` method
- being a special case which sets internal data for attributes, and
- ``reload`` is an alias for ``load``.
- """
- if resource_model.load:
- attrs['load'] = self._create_action(
- action_model=resource_model.load, resource_name=resource_name,
- service_context=service_context, is_load=True)
- attrs['reload'] = attrs['load']
-
- for action in resource_model.actions:
- attrs[action.name] = self._create_action(
- action_model=action, resource_name=resource_name,
- service_context=service_context)
-
- def _load_attributes(self, attrs, meta, resource_name, resource_model,
- service_context):
- """
- Load resource attributes based on the resource shape. The shape
- name is referenced in the resource JSON, but the shape itself
- is defined in the Botocore service JSON, hence the need for
- access to the ``service_model``.
- """
- if not resource_model.shape:
- return
-
- shape = service_context.service_model.shape_for(
- resource_model.shape)
-
- identifiers = dict(
- (i.member_name, i)
- for i in resource_model.identifiers if i.member_name)
- attributes = resource_model.get_attributes(shape)
- for name, (orig_name, member) in attributes.items():
- if name in identifiers:
- prop = self._create_identifier_alias(
- resource_name=resource_name,
- identifier=identifiers[name],
- member_model=member,
- service_context=service_context
- )
- else:
- prop = self._create_autoload_property(
- resource_name=resource_name,
- name=orig_name, snake_cased=name,
- member_model=member,
- service_context=service_context
- )
- attrs[name] = prop
-
- def _load_collections(self, attrs, resource_model, service_context):
- """
- Load resource collections from the model. Each collection becomes
- a :py:class:`~boto3.resources.collection.CollectionManager` instance
- on the resource instance, which allows you to iterate and filter
- through the collection's items.
- """
- for collection_model in resource_model.collections:
- attrs[collection_model.name] = self._create_collection(
- resource_name=resource_model.name,
- collection_model=collection_model,
- service_context=service_context
- )
-
- def _load_has_relations(self, attrs, resource_name, resource_model,
- service_context):
- """
- Load related resources, which are defined via a ``has``
- relationship but conceptually come in two forms:
-
- 1. A reference, which is a related resource instance and can be
- ``None``, such as an EC2 instance's ``vpc``.
- 2. A subresource, which is a resource constructor that will always
- return a resource instance which shares identifiers/data with
- this resource, such as ``s3.Bucket('name').Object('key')``.
- """
- for reference in resource_model.references:
- # This is a dangling reference, i.e. we have all
- # the data we need to create the resource, so
- # this instance becomes an attribute on the class.
- attrs[reference.name] = self._create_reference(
- reference_model=reference,
- resource_name=resource_name,
- service_context=service_context
- )
-
- for subresource in resource_model.subresources:
- # This is a sub-resource class you can create
- # by passing in an identifier, e.g. s3.Bucket(name).
- attrs[subresource.name] = self._create_class_partial(
- subresource_model=subresource,
- resource_name=resource_name,
- service_context=service_context
- )
-
- self._create_available_subresources_command(
- attrs, resource_model.subresources)
-
- def _create_available_subresources_command(self, attrs, subresources):
- _subresources = [subresource.name for subresource in subresources]
- _subresources = sorted(_subresources)
-
- def get_available_subresources(factory_self):
- """
- Returns a list of all the available sub-resources for this
- Resource.
-
- :returns: A list containing the name of each sub-resource for this
- resource
- :rtype: list of str
- """
- return _subresources
-
- attrs['get_available_subresources'] = get_available_subresources
-
- def _load_waiters(self, attrs, resource_name, resource_model,
- service_context):
- """
- Load resource waiters from the model. Each waiter allows you to
- wait until a resource reaches a specific state by polling the state
- of the resource.
- """
- for waiter in resource_model.waiters:
- attrs[waiter.name] = self._create_waiter(
- resource_waiter_model=waiter,
- resource_name=resource_name,
- service_context=service_context
- )
-
- def _create_identifier(factory_self, identifier, resource_name):
- """
- Creates a read-only property for identifier attributes.
- """
- def get_identifier(self):
- # The default value is set to ``None`` instead of
- # raising an AttributeError because when resources are
- # instantiated a check is made such that none of the
- # identifiers have a value ``None``. If any are ``None``,
- # a more informative user error than a generic AttributeError
- # is raised.
- return getattr(self, '_' + identifier.name, None)
-
- get_identifier.__name__ = str(identifier.name)
- get_identifier.__doc__ = docstring.IdentifierDocstring(
- resource_name=resource_name,
- identifier_model=identifier,
- include_signature=False
- )
-
- return property(get_identifier)
-
- def _create_identifier_alias(factory_self, resource_name, identifier,
- member_model, service_context):
- """
- Creates a read-only property that aliases an identifier.
- """
- def get_identifier(self):
- return getattr(self, '_' + identifier.name, None)
-
- get_identifier.__name__ = str(identifier.member_name)
- get_identifier.__doc__ = docstring.AttributeDocstring(
- service_name=service_context.service_name,
- resource_name=resource_name,
- attr_name=identifier.member_name,
- event_emitter=factory_self._emitter,
- attr_model=member_model,
- include_signature=False
- )
-
- return property(get_identifier)
-
- def _create_autoload_property(factory_self, resource_name, name,
- snake_cased, member_model, service_context):
- """
- Creates a new property on the resource to lazy-load its value
- via the resource's ``load`` method (if it exists).
- """
- # The property loader will check to see if this resource has already
- # been loaded and return the cached value if possible. If not, then
- # it first checks to see if it CAN be loaded (raise if not), then
- # calls the load before returning the value.
- def property_loader(self):
- if self.meta.data is None:
- if hasattr(self, 'load'):
- self.load()
- else:
- raise ResourceLoadException(
- '{0} has no load method'.format(
- self.__class__.__name__))
-
- return self.meta.data.get(name)
-
- property_loader.__name__ = str(snake_cased)
- property_loader.__doc__ = docstring.AttributeDocstring(
- service_name=service_context.service_name,
- resource_name=resource_name,
- attr_name=snake_cased,
- event_emitter=factory_self._emitter,
- attr_model=member_model,
- include_signature=False
- )
-
- return property(property_loader)
-
- def _create_waiter(factory_self, resource_waiter_model, resource_name,
- service_context):
- """
- Creates a new wait method for each resource where both a waiter and
- resource model is defined.
- """
- waiter = WaiterAction(resource_waiter_model,
- waiter_resource_name=resource_waiter_model.name)
-
- def do_waiter(self, *args, **kwargs):
- waiter(self, *args, **kwargs)
-
- do_waiter.__name__ = str(resource_waiter_model.name)
- do_waiter.__doc__ = docstring.ResourceWaiterDocstring(
- resource_name=resource_name,
- event_emitter=factory_self._emitter,
- service_model=service_context.service_model,
- resource_waiter_model=resource_waiter_model,
- service_waiter_model=service_context.service_waiter_model,
- include_signature=False
- )
- return do_waiter
-
- def _create_collection(factory_self, resource_name, collection_model,
- service_context):
- """
- Creates a new property on the resource to lazy-load a collection.
- """
- cls = factory_self._collection_factory.load_from_definition(
- resource_name=resource_name, collection_model=collection_model,
- service_context=service_context,
- event_emitter=factory_self._emitter)
-
- def get_collection(self):
- return cls(
- collection_model=collection_model, parent=self,
- factory=factory_self, service_context=service_context)
-
- get_collection.__name__ = str(collection_model.name)
- get_collection.__doc__ = docstring.CollectionDocstring(
- collection_model=collection_model, include_signature=False)
- return property(get_collection)
-
- def _create_reference(factory_self, reference_model, resource_name,
- service_context):
- """
- Creates a new property on the resource to lazy-load a reference.
- """
- # References are essentially an action with no request
- # or response, so we can re-use the response handlers to
- # build up resources from identifiers and data members.
- handler = ResourceHandler(
- search_path=reference_model.resource.path, factory=factory_self,
- resource_model=reference_model.resource,
- service_context=service_context
- )
-
- # Are there any identifiers that need access to data members?
- # This is important when building the resource below since
- # it requires the data to be loaded.
- needs_data = any(i.source == 'data' for i in
- reference_model.resource.identifiers)
-
- def get_reference(self):
- # We need to lazy-evaluate the reference to handle circular
- # references between resources. We do this by loading the class
- # when first accessed.
- # This is using a *response handler* so we need to make sure
- # our data is loaded (if possible) and pass that data into
- # the handler as if it were a response. This allows references
- # to have their data loaded properly.
- if needs_data and self.meta.data is None and hasattr(self, 'load'):
- self.load()
- return handler(self, {}, self.meta.data)
-
- get_reference.__name__ = str(reference_model.name)
- get_reference.__doc__ = docstring.ReferenceDocstring(
- reference_model=reference_model,
- include_signature=False
- )
- return property(get_reference)
-
- def _create_class_partial(factory_self, subresource_model, resource_name,
- service_context):
- """
- Creates a new method which acts as a functools.partial, passing
- along the instance's low-level `client` to the new resource
- class' constructor.
- """
- name = subresource_model.resource.type
-
- def create_resource(self, *args, **kwargs):
- # We need a new method here because we want access to the
- # instance's client.
- positional_args = []
-
- # We lazy-load the class to handle circular references.
- json_def = service_context.resource_json_definitions.get(name, {})
- resource_cls = factory_self.load_from_definition(
- resource_name=name,
- single_resource_json_definition=json_def,
- service_context=service_context
- )
-
- # Assumes that identifiers are in order, which lets you do
- # e.g. ``sqs.Queue('foo').Message('bar')`` to create a new message
- # linked with the ``foo`` queue and which has a ``bar`` receipt
- # handle. If we did kwargs here then future positional arguments
- # would lead to failure.
- identifiers = subresource_model.resource.identifiers
- if identifiers is not None:
- for identifier, value in build_identifiers(identifiers, self):
- positional_args.append(value)
-
- return partial(resource_cls, *positional_args,
- client=self.meta.client)(*args, **kwargs)
-
- create_resource.__name__ = str(name)
- create_resource.__doc__ = docstring.SubResourceDocstring(
- resource_name=resource_name,
- sub_resource_model=subresource_model,
- service_model=service_context.service_model,
- include_signature=False
- )
- return create_resource
-
- def _create_action(factory_self, action_model, resource_name,
- service_context, is_load=False):
- """
- Creates a new method which makes a request to the underlying
- AWS service.
- """
- # Create the action in in this closure but before the ``do_action``
- # method below is invoked, which allows instances of the resource
- # to share the ServiceAction instance.
- action = ServiceAction(
- action_model, factory=factory_self,
- service_context=service_context
- )
-
- # A resource's ``load`` method is special because it sets
- # values on the resource instead of returning the response.
- if is_load:
- # We need a new method here because we want access to the
- # instance via ``self``.
- def do_action(self, *args, **kwargs):
- response = action(self, *args, **kwargs)
- self.meta.data = response
- # Create the docstring for the load/reload mehtods.
- lazy_docstring = docstring.LoadReloadDocstring(
- action_name=action_model.name,
- resource_name=resource_name,
- event_emitter=factory_self._emitter,
- load_model=action_model,
- service_model=service_context.service_model,
- include_signature=False
- )
- else:
- # We need a new method here because we want access to the
- # instance via ``self``.
- def do_action(self, *args, **kwargs):
- response = action(self, *args, **kwargs)
-
- if hasattr(self, 'load'):
- # Clear cached data. It will be reloaded the next
- # time that an attribute is accessed.
- # TODO: Make this configurable in the future?
- self.meta.data = None
-
- return response
- lazy_docstring = docstring.ActionDocstring(
- resource_name=resource_name,
- event_emitter=factory_self._emitter,
- action_model=action_model,
- service_model=service_context.service_model,
- include_signature=False
- )
-
- do_action.__name__ = str(action_model.name)
- do_action.__doc__ = lazy_docstring
- return do_action
+# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+
+import logging
+from functools import partial
+
+from .action import ServiceAction
+from .action import WaiterAction
+from .base import ResourceMeta, ServiceResource
+from .collection import CollectionFactory
+from .model import ResourceModel
+from .response import build_identifiers, ResourceHandler
+from ..exceptions import ResourceLoadException
+from ..docs import docstring
+
+
+logger = logging.getLogger(__name__)
+
+
+class ResourceFactory(object):
+ """
+ A factory to create new :py:class:`~boto3.resources.base.ServiceResource`
+ classes from a :py:class:`~boto3.resources.model.ResourceModel`. There are
+ two types of lookups that can be done: one on the service itself (e.g. an
+ SQS resource) and another on models contained within the service (e.g. an
+ SQS Queue resource).
+ """
+ def __init__(self, emitter):
+ self._collection_factory = CollectionFactory()
+ self._emitter = emitter
+
+ def load_from_definition(self, resource_name,
+ single_resource_json_definition, service_context):
+ """
+ Loads a resource from a model, creating a new
+ :py:class:`~boto3.resources.base.ServiceResource` subclass
+ with the correct properties and methods, named based on the service
+ and resource name, e.g. EC2.Instance.
+
+ :type resource_name: string
+ :param resource_name: Name of the resource to look up. For services,
+ this should match the ``service_name``.
+
+ :type single_resource_json_definition: dict
+ :param single_resource_json_definition:
+ The loaded json of a single service resource or resource
+ definition.
+
+ :type service_context: :py:class:`~boto3.utils.ServiceContext`
+ :param service_context: Context about the AWS service
+
+ :rtype: Subclass of :py:class:`~boto3.resources.base.ServiceResource`
+ :return: The service or resource class.
+ """
+ logger.debug('Loading %s:%s', service_context.service_name,
+ resource_name)
+
+ # Using the loaded JSON create a ResourceModel object.
+ resource_model = ResourceModel(
+ resource_name, single_resource_json_definition,
+ service_context.resource_json_definitions
+ )
+
+ # Do some renaming of the shape if there was a naming collision
+ # that needed to be accounted for.
+ shape = None
+ if resource_model.shape:
+ shape = service_context.service_model.shape_for(
+ resource_model.shape)
+ resource_model.load_rename_map(shape)
+
+ # Set some basic info
+ meta = ResourceMeta(
+ service_context.service_name, resource_model=resource_model)
+ attrs = {
+ 'meta': meta,
+ }
+
+ # Create and load all of attributes of the resource class based
+ # on the models.
+
+ # Identifiers
+ self._load_identifiers(
+ attrs=attrs, meta=meta, resource_name=resource_name,
+ resource_model=resource_model
+ )
+
+ # Load/Reload actions
+ self._load_actions(
+ attrs=attrs, resource_name=resource_name,
+ resource_model=resource_model, service_context=service_context
+ )
+
+ # Attributes that get auto-loaded
+ self._load_attributes(
+ attrs=attrs, meta=meta, resource_name=resource_name,
+ resource_model=resource_model,
+ service_context=service_context)
+
+ # Collections and their corresponding methods
+ self._load_collections(
+ attrs=attrs, resource_model=resource_model,
+ service_context=service_context)
+
+ # References and Subresources
+ self._load_has_relations(
+ attrs=attrs, resource_name=resource_name,
+ resource_model=resource_model, service_context=service_context
+ )
+
+ # Waiter resource actions
+ self._load_waiters(
+ attrs=attrs, resource_name=resource_name,
+ resource_model=resource_model, service_context=service_context
+ )
+
+ # Create the name based on the requested service and resource
+ cls_name = resource_name
+ if service_context.service_name == resource_name:
+ cls_name = 'ServiceResource'
+ cls_name = service_context.service_name + '.' + cls_name
+
+ base_classes = [ServiceResource]
+ if self._emitter is not None:
+ self._emitter.emit(
+ 'creating-resource-class.%s' % cls_name,
+ class_attributes=attrs, base_classes=base_classes,
+ service_context=service_context)
+ return type(str(cls_name), tuple(base_classes), attrs)
+
+ def _load_identifiers(self, attrs, meta, resource_model, resource_name):
+ """
+ Populate required identifiers. These are arguments without which
+ the resource cannot be used. Identifiers become arguments for
+ operations on the resource.
+ """
+ for identifier in resource_model.identifiers:
+ meta.identifiers.append(identifier.name)
+ attrs[identifier.name] = self._create_identifier(
+ identifier, resource_name)
+
+ def _load_actions(self, attrs, resource_name, resource_model,
+ service_context):
+ """
+ Actions on the resource become methods, with the ``load`` method
+ being a special case which sets internal data for attributes, and
+ ``reload`` is an alias for ``load``.
+ """
+ if resource_model.load:
+ attrs['load'] = self._create_action(
+ action_model=resource_model.load, resource_name=resource_name,
+ service_context=service_context, is_load=True)
+ attrs['reload'] = attrs['load']
+
+ for action in resource_model.actions:
+ attrs[action.name] = self._create_action(
+ action_model=action, resource_name=resource_name,
+ service_context=service_context)
+
+ def _load_attributes(self, attrs, meta, resource_name, resource_model,
+ service_context):
+ """
+ Load resource attributes based on the resource shape. The shape
+ name is referenced in the resource JSON, but the shape itself
+ is defined in the Botocore service JSON, hence the need for
+ access to the ``service_model``.
+ """
+ if not resource_model.shape:
+ return
+
+ shape = service_context.service_model.shape_for(
+ resource_model.shape)
+
+ identifiers = dict(
+ (i.member_name, i)
+ for i in resource_model.identifiers if i.member_name)
+ attributes = resource_model.get_attributes(shape)
+ for name, (orig_name, member) in attributes.items():
+ if name in identifiers:
+ prop = self._create_identifier_alias(
+ resource_name=resource_name,
+ identifier=identifiers[name],
+ member_model=member,
+ service_context=service_context
+ )
+ else:
+ prop = self._create_autoload_property(
+ resource_name=resource_name,
+ name=orig_name, snake_cased=name,
+ member_model=member,
+ service_context=service_context
+ )
+ attrs[name] = prop
+
+ def _load_collections(self, attrs, resource_model, service_context):
+ """
+ Load resource collections from the model. Each collection becomes
+ a :py:class:`~boto3.resources.collection.CollectionManager` instance
+ on the resource instance, which allows you to iterate and filter
+ through the collection's items.
+ """
+ for collection_model in resource_model.collections:
+ attrs[collection_model.name] = self._create_collection(
+ resource_name=resource_model.name,
+ collection_model=collection_model,
+ service_context=service_context
+ )
+
+ def _load_has_relations(self, attrs, resource_name, resource_model,
+ service_context):
+ """
+ Load related resources, which are defined via a ``has``
+ relationship but conceptually come in two forms:
+
+ 1. A reference, which is a related resource instance and can be
+ ``None``, such as an EC2 instance's ``vpc``.
+ 2. A subresource, which is a resource constructor that will always
+ return a resource instance which shares identifiers/data with
+ this resource, such as ``s3.Bucket('name').Object('key')``.
+ """
+ for reference in resource_model.references:
+ # This is a dangling reference, i.e. we have all
+ # the data we need to create the resource, so
+ # this instance becomes an attribute on the class.
+ attrs[reference.name] = self._create_reference(
+ reference_model=reference,
+ resource_name=resource_name,
+ service_context=service_context
+ )
+
+ for subresource in resource_model.subresources:
+ # This is a sub-resource class you can create
+ # by passing in an identifier, e.g. s3.Bucket(name).
+ attrs[subresource.name] = self._create_class_partial(
+ subresource_model=subresource,
+ resource_name=resource_name,
+ service_context=service_context
+ )
+
+ self._create_available_subresources_command(
+ attrs, resource_model.subresources)
+
+ def _create_available_subresources_command(self, attrs, subresources):
+ _subresources = [subresource.name for subresource in subresources]
+ _subresources = sorted(_subresources)
+
+ def get_available_subresources(factory_self):
+ """
+ Returns a list of all the available sub-resources for this
+ Resource.
+
+ :returns: A list containing the name of each sub-resource for this
+ resource
+ :rtype: list of str
+ """
+ return _subresources
+
+ attrs['get_available_subresources'] = get_available_subresources
+
+ def _load_waiters(self, attrs, resource_name, resource_model,
+ service_context):
+ """
+ Load resource waiters from the model. Each waiter allows you to
+ wait until a resource reaches a specific state by polling the state
+ of the resource.
+ """
+ for waiter in resource_model.waiters:
+ attrs[waiter.name] = self._create_waiter(
+ resource_waiter_model=waiter,
+ resource_name=resource_name,
+ service_context=service_context
+ )
+
+ def _create_identifier(factory_self, identifier, resource_name):
+ """
+ Creates a read-only property for identifier attributes.
+ """
+ def get_identifier(self):
+ # The default value is set to ``None`` instead of
+ # raising an AttributeError because when resources are
+ # instantiated a check is made such that none of the
+ # identifiers have a value ``None``. If any are ``None``,
+ # a more informative user error than a generic AttributeError
+ # is raised.
+ return getattr(self, '_' + identifier.name, None)
+
+ get_identifier.__name__ = str(identifier.name)
+ get_identifier.__doc__ = docstring.IdentifierDocstring(
+ resource_name=resource_name,
+ identifier_model=identifier,
+ include_signature=False
+ )
+
+ return property(get_identifier)
+
+ def _create_identifier_alias(factory_self, resource_name, identifier,
+ member_model, service_context):
+ """
+ Creates a read-only property that aliases an identifier.
+ """
+ def get_identifier(self):
+ return getattr(self, '_' + identifier.name, None)
+
+ get_identifier.__name__ = str(identifier.member_name)
+ get_identifier.__doc__ = docstring.AttributeDocstring(
+ service_name=service_context.service_name,
+ resource_name=resource_name,
+ attr_name=identifier.member_name,
+ event_emitter=factory_self._emitter,
+ attr_model=member_model,
+ include_signature=False
+ )
+
+ return property(get_identifier)
+
+ def _create_autoload_property(factory_self, resource_name, name,
+ snake_cased, member_model, service_context):
+ """
+ Creates a new property on the resource to lazy-load its value
+ via the resource's ``load`` method (if it exists).
+ """
+ # The property loader will check to see if this resource has already
+ # been loaded and return the cached value if possible. If not, then
+ # it first checks to see if it CAN be loaded (raise if not), then
+ # calls the load before returning the value.
+ def property_loader(self):
+ if self.meta.data is None:
+ if hasattr(self, 'load'):
+ self.load()
+ else:
+ raise ResourceLoadException(
+ '{0} has no load method'.format(
+ self.__class__.__name__))
+
+ return self.meta.data.get(name)
+
+ property_loader.__name__ = str(snake_cased)
+ property_loader.__doc__ = docstring.AttributeDocstring(
+ service_name=service_context.service_name,
+ resource_name=resource_name,
+ attr_name=snake_cased,
+ event_emitter=factory_self._emitter,
+ attr_model=member_model,
+ include_signature=False
+ )
+
+ return property(property_loader)
+
+ def _create_waiter(factory_self, resource_waiter_model, resource_name,
+ service_context):
+ """
+ Creates a new wait method for each resource where both a waiter and
+ resource model is defined.
+ """
+ waiter = WaiterAction(resource_waiter_model,
+ waiter_resource_name=resource_waiter_model.name)
+
+ def do_waiter(self, *args, **kwargs):
+ waiter(self, *args, **kwargs)
+
+ do_waiter.__name__ = str(resource_waiter_model.name)
+ do_waiter.__doc__ = docstring.ResourceWaiterDocstring(
+ resource_name=resource_name,
+ event_emitter=factory_self._emitter,
+ service_model=service_context.service_model,
+ resource_waiter_model=resource_waiter_model,
+ service_waiter_model=service_context.service_waiter_model,
+ include_signature=False
+ )
+ return do_waiter
+
+ def _create_collection(factory_self, resource_name, collection_model,
+ service_context):
+ """
+ Creates a new property on the resource to lazy-load a collection.
+ """
+ cls = factory_self._collection_factory.load_from_definition(
+ resource_name=resource_name, collection_model=collection_model,
+ service_context=service_context,
+ event_emitter=factory_self._emitter)
+
+ def get_collection(self):
+ return cls(
+ collection_model=collection_model, parent=self,
+ factory=factory_self, service_context=service_context)
+
+ get_collection.__name__ = str(collection_model.name)
+ get_collection.__doc__ = docstring.CollectionDocstring(
+ collection_model=collection_model, include_signature=False)
+ return property(get_collection)
+
+ def _create_reference(factory_self, reference_model, resource_name,
+ service_context):
+ """
+ Creates a new property on the resource to lazy-load a reference.
+ """
+ # References are essentially an action with no request
+ # or response, so we can re-use the response handlers to
+ # build up resources from identifiers and data members.
+ handler = ResourceHandler(
+ search_path=reference_model.resource.path, factory=factory_self,
+ resource_model=reference_model.resource,
+ service_context=service_context
+ )
+
+ # Are there any identifiers that need access to data members?
+ # This is important when building the resource below since
+ # it requires the data to be loaded.
+ needs_data = any(i.source == 'data' for i in
+ reference_model.resource.identifiers)
+
+ def get_reference(self):
+ # We need to lazy-evaluate the reference to handle circular
+ # references between resources. We do this by loading the class
+ # when first accessed.
+ # This is using a *response handler* so we need to make sure
+ # our data is loaded (if possible) and pass that data into
+ # the handler as if it were a response. This allows references
+ # to have their data loaded properly.
+ if needs_data and self.meta.data is None and hasattr(self, 'load'):
+ self.load()
+ return handler(self, {}, self.meta.data)
+
+ get_reference.__name__ = str(reference_model.name)
+ get_reference.__doc__ = docstring.ReferenceDocstring(
+ reference_model=reference_model,
+ include_signature=False
+ )
+ return property(get_reference)
+
+ def _create_class_partial(factory_self, subresource_model, resource_name,
+ service_context):
+ """
+ Creates a new method which acts as a functools.partial, passing
+ along the instance's low-level `client` to the new resource
+ class' constructor.
+ """
+ name = subresource_model.resource.type
+
+ def create_resource(self, *args, **kwargs):
+ # We need a new method here because we want access to the
+ # instance's client.
+ positional_args = []
+
+ # We lazy-load the class to handle circular references.
+ json_def = service_context.resource_json_definitions.get(name, {})
+ resource_cls = factory_self.load_from_definition(
+ resource_name=name,
+ single_resource_json_definition=json_def,
+ service_context=service_context
+ )
+
+ # Assumes that identifiers are in order, which lets you do
+ # e.g. ``sqs.Queue('foo').Message('bar')`` to create a new message
+ # linked with the ``foo`` queue and which has a ``bar`` receipt
+ # handle. If we did kwargs here then future positional arguments
+ # would lead to failure.
+ identifiers = subresource_model.resource.identifiers
+ if identifiers is not None:
+ for identifier, value in build_identifiers(identifiers, self):
+ positional_args.append(value)
+
+ return partial(resource_cls, *positional_args,
+ client=self.meta.client)(*args, **kwargs)
+
+ create_resource.__name__ = str(name)
+ create_resource.__doc__ = docstring.SubResourceDocstring(
+ resource_name=resource_name,
+ sub_resource_model=subresource_model,
+ service_model=service_context.service_model,
+ include_signature=False
+ )
+ return create_resource
+
+ def _create_action(factory_self, action_model, resource_name,
+ service_context, is_load=False):
+ """
+ Creates a new method which makes a request to the underlying
+ AWS service.
+ """
+ # Create the action in in this closure but before the ``do_action``
+ # method below is invoked, which allows instances of the resource
+ # to share the ServiceAction instance.
+ action = ServiceAction(
+ action_model, factory=factory_self,
+ service_context=service_context
+ )
+
+ # A resource's ``load`` method is special because it sets
+ # values on the resource instead of returning the response.
+ if is_load:
+ # We need a new method here because we want access to the
+ # instance via ``self``.
+ def do_action(self, *args, **kwargs):
+ response = action(self, *args, **kwargs)
+ self.meta.data = response
+ # Create the docstring for the load/reload mehtods.
+ lazy_docstring = docstring.LoadReloadDocstring(
+ action_name=action_model.name,
+ resource_name=resource_name,
+ event_emitter=factory_self._emitter,
+ load_model=action_model,
+ service_model=service_context.service_model,
+ include_signature=False
+ )
+ else:
+ # We need a new method here because we want access to the
+ # instance via ``self``.
+ def do_action(self, *args, **kwargs):
+ response = action(self, *args, **kwargs)
+
+ if hasattr(self, 'load'):
+ # Clear cached data. It will be reloaded the next
+ # time that an attribute is accessed.
+ # TODO: Make this configurable in the future?
+ self.meta.data = None
+
+ return response
+ lazy_docstring = docstring.ActionDocstring(
+ resource_name=resource_name,
+ event_emitter=factory_self._emitter,
+ action_model=action_model,
+ service_model=service_context.service_model,
+ include_signature=False
+ )
+
+ do_action.__name__ = str(action_model.name)
+ do_action.__doc__ = lazy_docstring
+ return do_action
diff --git a/contrib/python/boto3/boto3/resources/model.py b/contrib/python/boto3/boto3/resources/model.py
index e88d11b5a8..5b65b0e2eb 100644
--- a/contrib/python/boto3/boto3/resources/model.py
+++ b/contrib/python/boto3/boto3/resources/model.py
@@ -1,622 +1,622 @@
-# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-
-"""
-The models defined in this file represent the resource JSON description
-format and provide a layer of abstraction from the raw JSON. The advantages
-of this are:
-
-* Pythonic interface (e.g. ``action.request.operation``)
-* Consumers need not change for minor JSON changes (e.g. renamed field)
-
-These models are used both by the resource factory to generate resource
-classes as well as by the documentation generator.
-"""
-
-import logging
-
-from botocore import xform_name
-
-
-logger = logging.getLogger(__name__)
-
-
-class Identifier(object):
- """
- A resource identifier, given by its name.
-
- :type name: string
- :param name: The name of the identifier
- """
- def __init__(self, name, member_name=None):
- #: (``string``) The name of the identifier
- self.name = name
- self.member_name = member_name
-
-
-class Action(object):
- """
- A service operation action.
-
- :type name: string
- :param name: The name of the action
- :type definition: dict
- :param definition: The JSON definition
- :type resource_defs: dict
- :param resource_defs: All resources defined in the service
- """
- def __init__(self, name, definition, resource_defs):
- self._definition = definition
-
- #: (``string``) The name of the action
- self.name = name
- #: (:py:class:`Request`) This action's request or ``None``
- self.request = None
- if 'request' in definition:
- self.request = Request(definition.get('request', {}))
- #: (:py:class:`ResponseResource`) This action's resource or ``None``
- self.resource = None
- if 'resource' in definition:
- self.resource = ResponseResource(definition.get('resource', {}),
- resource_defs)
- #: (``string``) The JMESPath search path or ``None``
- self.path = definition.get('path')
-
-
-class DefinitionWithParams(object):
- """
- An item which has parameters exposed via the ``params`` property.
- A request has an operation and parameters, while a waiter has
- a name, a low-level waiter name and parameters.
-
- :type definition: dict
- :param definition: The JSON definition
- """
- def __init__(self, definition):
- self._definition = definition
-
- @property
- def params(self):
- """
- Get a list of auto-filled parameters for this request.
-
- :type: list(:py:class:`Parameter`)
- """
- params = []
-
- for item in self._definition.get('params', []):
- params.append(Parameter(**item))
-
- return params
-
-
-class Parameter(object):
- """
- An auto-filled parameter which has a source and target. For example,
- the ``QueueUrl`` may be auto-filled from a resource's ``url`` identifier
- when making calls to ``queue.receive_messages``.
-
- :type target: string
- :param target: The destination parameter name, e.g. ``QueueUrl``
- :type source_type: string
- :param source_type: Where the source is defined.
- :type source: string
- :param source: The source name, e.g. ``Url``
- """
- def __init__(self, target, source, name=None, path=None, value=None,
- **kwargs):
- #: (``string``) The destination parameter name
- self.target = target
- #: (``string``) Where the source is defined
- self.source = source
- #: (``string``) The name of the source, if given
- self.name = name
- #: (``string``) The JMESPath query of the source
- self.path = path
- #: (``string|int|float|bool``) The source constant value
- self.value = value
-
- # Complain if we encounter any unknown values.
- if kwargs:
- logger.warning('Unknown parameter options found: %s', kwargs)
-
-
-class Request(DefinitionWithParams):
- """
- A service operation action request.
-
- :type definition: dict
- :param definition: The JSON definition
- """
- def __init__(self, definition):
- super(Request, self).__init__(definition)
-
- #: (``string``) The name of the low-level service operation
- self.operation = definition.get('operation')
-
-
-class Waiter(DefinitionWithParams):
- """
- An event waiter specification.
-
- :type name: string
- :param name: Name of the waiter
- :type definition: dict
- :param definition: The JSON definition
- """
- PREFIX = 'WaitUntil'
-
- def __init__(self, name, definition):
- super(Waiter, self).__init__(definition)
-
- #: (``string``) The name of this waiter
- self.name = name
-
- #: (``string``) The name of the underlying event waiter
- self.waiter_name = definition.get('waiterName')
-
-
-class ResponseResource(object):
- """
- A resource response to create after performing an action.
-
- :type definition: dict
- :param definition: The JSON definition
- :type resource_defs: dict
- :param resource_defs: All resources defined in the service
- """
- def __init__(self, definition, resource_defs):
- self._definition = definition
- self._resource_defs = resource_defs
-
- #: (``string``) The name of the response resource type
- self.type = definition.get('type')
-
- #: (``string``) The JMESPath search query or ``None``
- self.path = definition.get('path')
-
- @property
- def identifiers(self):
- """
- A list of resource identifiers.
-
- :type: list(:py:class:`Identifier`)
- """
- identifiers = []
-
- for item in self._definition.get('identifiers', []):
- identifiers.append(
- Parameter(**item))
-
- return identifiers
-
- @property
- def model(self):
- """
- Get the resource model for the response resource.
-
- :type: :py:class:`ResourceModel`
- """
- return ResourceModel(self.type, self._resource_defs[self.type],
- self._resource_defs)
-
-
-class Collection(Action):
- """
- A group of resources. See :py:class:`Action`.
-
- :type name: string
- :param name: The name of the collection
- :type definition: dict
- :param definition: The JSON definition
- :type resource_defs: dict
- :param resource_defs: All resources defined in the service
- """
- @property
- def batch_actions(self):
- """
- Get a list of batch actions supported by the resource type
- contained in this action. This is a shortcut for accessing
- the same information through the resource model.
-
- :rtype: list(:py:class:`Action`)
- """
- return self.resource.model.batch_actions
-
-
-class ResourceModel(object):
- """
- A model representing a resource, defined via a JSON description
- format. A resource has identifiers, attributes, actions,
- sub-resources, references and collections. For more information
- on resources, see :ref:`guide_resources`.
-
- :type name: string
- :param name: The name of this resource, e.g. ``sqs`` or ``Queue``
- :type definition: dict
- :param definition: The JSON definition
- :type resource_defs: dict
- :param resource_defs: All resources defined in the service
- """
- def __init__(self, name, definition, resource_defs):
- self._definition = definition
- self._resource_defs = resource_defs
- self._renamed = {}
-
- #: (``string``) The name of this resource
- self.name = name
- #: (``string``) The service shape name for this resource or ``None``
- self.shape = definition.get('shape')
-
- def load_rename_map(self, shape=None):
- """
- Load a name translation map given a shape. This will set
- up renamed values for any collisions, e.g. if the shape,
- an action, and a subresource all are all named ``foo``
- then the resource will have an action ``foo``, a subresource
- named ``Foo`` and a property named ``foo_attribute``.
- This is the order of precedence, from most important to
- least important:
-
- * Load action (resource.load)
- * Identifiers
- * Actions
- * Subresources
- * References
- * Collections
- * Waiters
- * Attributes (shape members)
-
- Batch actions are only exposed on collections, so do not
- get modified here. Subresources use upper camel casing, so
- are unlikely to collide with anything but other subresources.
-
- Creates a structure like this::
-
- renames = {
- ('action', 'id'): 'id_action',
- ('collection', 'id'): 'id_collection',
- ('attribute', 'id'): 'id_attribute'
- }
-
- # Get the final name for an action named 'id'
- name = renames.get(('action', 'id'), 'id')
-
- :type shape: botocore.model.Shape
- :param shape: The underlying shape for this resource.
- """
- # Meta is a reserved name for resources
- names = set(['meta'])
- self._renamed = {}
-
- if self._definition.get('load'):
- names.add('load')
-
- for item in self._definition.get('identifiers', []):
- self._load_name_with_category(names, item['name'], 'identifier')
-
- for name in self._definition.get('actions', {}):
- self._load_name_with_category(names, name, 'action')
-
- for name, ref in self._get_has_definition().items():
- # Subresources require no data members, just typically
- # identifiers and user input.
- data_required = False
- for identifier in ref['resource']['identifiers']:
- if identifier['source'] == 'data':
- data_required = True
- break
-
- if not data_required:
- self._load_name_with_category(names, name, 'subresource',
- snake_case=False)
- else:
- self._load_name_with_category(names, name, 'reference')
-
- for name in self._definition.get('hasMany', {}):
- self._load_name_with_category(names, name, 'collection')
-
- for name in self._definition.get('waiters', {}):
- self._load_name_with_category(names, Waiter.PREFIX + name,
- 'waiter')
-
- if shape is not None:
- for name in shape.members.keys():
- self._load_name_with_category(names, name, 'attribute')
-
- def _load_name_with_category(self, names, name, category,
- snake_case=True):
- """
- Load a name with a given category, possibly renaming it
- if that name is already in use. The name will be stored
- in ``names`` and possibly be set up in ``self._renamed``.
-
- :type names: set
- :param names: Existing names (Python attributes, properties, or
- methods) on the resource.
- :type name: string
- :param name: The original name of the value.
- :type category: string
- :param category: The value type, such as 'identifier' or 'action'
- :type snake_case: bool
- :param snake_case: True (default) if the name should be snake cased.
- """
- if snake_case:
- name = xform_name(name)
-
- if name in names:
- logger.debug('Renaming %s %s %s' % (self.name, category, name))
- self._renamed[(category, name)] = name + '_' + category
- name += '_' + category
-
- if name in names:
- # This isn't good, let's raise instead of trying to keep
- # renaming this value.
- raise ValueError('Problem renaming {0} {1} to {2}!'.format(
- self.name, category, name))
-
- names.add(name)
-
- def _get_name(self, category, name, snake_case=True):
- """
- Get a possibly renamed value given a category and name. This
- uses the rename map set up in ``load_rename_map``, so that
- method must be called once first.
-
- :type category: string
- :param category: The value type, such as 'identifier' or 'action'
- :type name: string
- :param name: The original name of the value
- :type snake_case: bool
- :param snake_case: True (default) if the name should be snake cased.
- :rtype: string
- :return: Either the renamed value if it is set, otherwise the
- original name.
- """
- if snake_case:
- name = xform_name(name)
-
- return self._renamed.get((category, name), name)
-
- def get_attributes(self, shape):
- """
- Get a dictionary of attribute names to original name and shape
- models that represent the attributes of this resource. Looks
- like the following:
-
- {
- 'some_name': ('SomeName', <Shape...>)
- }
-
- :type shape: botocore.model.Shape
- :param shape: The underlying shape for this resource.
- :rtype: dict
- :return: Mapping of resource attributes.
- """
- attributes = {}
- identifier_names = [i.name for i in self.identifiers]
-
- for name, member in shape.members.items():
- snake_cased = xform_name(name)
- if snake_cased in identifier_names:
- # Skip identifiers, these are set through other means
- continue
- snake_cased = self._get_name('attribute', snake_cased,
- snake_case=False)
- attributes[snake_cased] = (name, member)
-
- return attributes
-
- @property
- def identifiers(self):
- """
- Get a list of resource identifiers.
-
- :type: list(:py:class:`Identifier`)
- """
- identifiers = []
-
- for item in self._definition.get('identifiers', []):
- name = self._get_name('identifier', item['name'])
- member_name = item.get('memberName', None)
- if member_name:
- member_name = self._get_name('attribute', member_name)
- identifiers.append(Identifier(name, member_name))
-
- return identifiers
-
- @property
- def load(self):
- """
- Get the load action for this resource, if it is defined.
-
- :type: :py:class:`Action` or ``None``
- """
- action = self._definition.get('load')
-
- if action is not None:
- action = Action('load', action, self._resource_defs)
-
- return action
-
- @property
- def actions(self):
- """
- Get a list of actions for this resource.
-
- :type: list(:py:class:`Action`)
- """
- actions = []
-
- for name, item in self._definition.get('actions', {}).items():
- name = self._get_name('action', name)
- actions.append(Action(name, item, self._resource_defs))
-
- return actions
-
- @property
- def batch_actions(self):
- """
- Get a list of batch actions for this resource.
-
- :type: list(:py:class:`Action`)
- """
- actions = []
-
- for name, item in self._definition.get('batchActions', {}).items():
- name = self._get_name('batch_action', name)
- actions.append(Action(name, item, self._resource_defs))
-
- return actions
-
- def _get_has_definition(self):
- """
- Get a ``has`` relationship definition from a model, where the
- service resource model is treated special in that it contains
- a relationship to every resource defined for the service. This
- allows things like ``s3.Object('bucket-name', 'key')`` to
- work even though the JSON doesn't define it explicitly.
-
- :rtype: dict
- :return: Mapping of names to subresource and reference
- definitions.
- """
- if self.name not in self._resource_defs:
- # This is the service resource, so let us expose all of
- # the defined resources as subresources.
- definition = {}
-
- for name, resource_def in self._resource_defs.items():
- # It's possible for the service to have renamed a
- # resource or to have defined multiple names that
- # point to the same resource type, so we need to
- # take that into account.
- found = False
- has_items = self._definition.get('has', {}).items()
- for has_name, has_def in has_items:
- if has_def.get('resource', {}).get('type') == name:
- definition[has_name] = has_def
- found = True
-
- if not found:
- # Create a relationship definition and attach it
- # to the model, such that all identifiers must be
- # supplied by the user. It will look something like:
- #
- # {
- # 'resource': {
- # 'type': 'ResourceName',
- # 'identifiers': [
- # {'target': 'Name1', 'source': 'input'},
- # {'target': 'Name2', 'source': 'input'},
- # ...
- # ]
- # }
- # }
- #
- fake_has = {
- 'resource': {
- 'type': name,
- 'identifiers': []
- }
- }
-
- for identifier in resource_def.get('identifiers', []):
- fake_has['resource']['identifiers'].append({
- 'target': identifier['name'], 'source': 'input'
- })
-
- definition[name] = fake_has
- else:
- definition = self._definition.get('has', {})
-
- return definition
-
- def _get_related_resources(self, subresources):
- """
- Get a list of sub-resources or references.
-
- :type subresources: bool
- :param subresources: ``True`` to get sub-resources, ``False`` to
- get references.
- :rtype: list(:py:class:`ResponseResource`)
- """
- resources = []
-
- for name, definition in self._get_has_definition().items():
- if subresources:
- name = self._get_name('subresource', name, snake_case=False)
- else:
- name = self._get_name('reference', name)
- action = Action(name, definition, self._resource_defs)
-
- data_required = False
- for identifier in action.resource.identifiers:
- if identifier.source == 'data':
- data_required = True
- break
-
- if subresources and not data_required:
- resources.append(action)
- elif not subresources and data_required:
- resources.append(action)
-
- return resources
-
- @property
- def subresources(self):
- """
- Get a list of sub-resources.
-
+# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+
+"""
+The models defined in this file represent the resource JSON description
+format and provide a layer of abstraction from the raw JSON. The advantages
+of this are:
+
+* Pythonic interface (e.g. ``action.request.operation``)
+* Consumers need not change for minor JSON changes (e.g. renamed field)
+
+These models are used both by the resource factory to generate resource
+classes as well as by the documentation generator.
+"""
+
+import logging
+
+from botocore import xform_name
+
+
+logger = logging.getLogger(__name__)
+
+
+class Identifier(object):
+ """
+ A resource identifier, given by its name.
+
+ :type name: string
+ :param name: The name of the identifier
+ """
+ def __init__(self, name, member_name=None):
+ #: (``string``) The name of the identifier
+ self.name = name
+ self.member_name = member_name
+
+
+class Action(object):
+ """
+ A service operation action.
+
+ :type name: string
+ :param name: The name of the action
+ :type definition: dict
+ :param definition: The JSON definition
+ :type resource_defs: dict
+ :param resource_defs: All resources defined in the service
+ """
+ def __init__(self, name, definition, resource_defs):
+ self._definition = definition
+
+ #: (``string``) The name of the action
+ self.name = name
+ #: (:py:class:`Request`) This action's request or ``None``
+ self.request = None
+ if 'request' in definition:
+ self.request = Request(definition.get('request', {}))
+ #: (:py:class:`ResponseResource`) This action's resource or ``None``
+ self.resource = None
+ if 'resource' in definition:
+ self.resource = ResponseResource(definition.get('resource', {}),
+ resource_defs)
+ #: (``string``) The JMESPath search path or ``None``
+ self.path = definition.get('path')
+
+
+class DefinitionWithParams(object):
+ """
+ An item which has parameters exposed via the ``params`` property.
+ A request has an operation and parameters, while a waiter has
+ a name, a low-level waiter name and parameters.
+
+ :type definition: dict
+ :param definition: The JSON definition
+ """
+ def __init__(self, definition):
+ self._definition = definition
+
+ @property
+ def params(self):
+ """
+ Get a list of auto-filled parameters for this request.
+
+ :type: list(:py:class:`Parameter`)
+ """
+ params = []
+
+ for item in self._definition.get('params', []):
+ params.append(Parameter(**item))
+
+ return params
+
+
+class Parameter(object):
+ """
+ An auto-filled parameter which has a source and target. For example,
+ the ``QueueUrl`` may be auto-filled from a resource's ``url`` identifier
+ when making calls to ``queue.receive_messages``.
+
+ :type target: string
+ :param target: The destination parameter name, e.g. ``QueueUrl``
+ :type source_type: string
+ :param source_type: Where the source is defined.
+ :type source: string
+ :param source: The source name, e.g. ``Url``
+ """
+ def __init__(self, target, source, name=None, path=None, value=None,
+ **kwargs):
+ #: (``string``) The destination parameter name
+ self.target = target
+ #: (``string``) Where the source is defined
+ self.source = source
+ #: (``string``) The name of the source, if given
+ self.name = name
+ #: (``string``) The JMESPath query of the source
+ self.path = path
+ #: (``string|int|float|bool``) The source constant value
+ self.value = value
+
+ # Complain if we encounter any unknown values.
+ if kwargs:
+ logger.warning('Unknown parameter options found: %s', kwargs)
+
+
+class Request(DefinitionWithParams):
+ """
+ A service operation action request.
+
+ :type definition: dict
+ :param definition: The JSON definition
+ """
+ def __init__(self, definition):
+ super(Request, self).__init__(definition)
+
+ #: (``string``) The name of the low-level service operation
+ self.operation = definition.get('operation')
+
+
+class Waiter(DefinitionWithParams):
+ """
+ An event waiter specification.
+
+ :type name: string
+ :param name: Name of the waiter
+ :type definition: dict
+ :param definition: The JSON definition
+ """
+ PREFIX = 'WaitUntil'
+
+ def __init__(self, name, definition):
+ super(Waiter, self).__init__(definition)
+
+ #: (``string``) The name of this waiter
+ self.name = name
+
+ #: (``string``) The name of the underlying event waiter
+ self.waiter_name = definition.get('waiterName')
+
+
+class ResponseResource(object):
+ """
+ A resource response to create after performing an action.
+
+ :type definition: dict
+ :param definition: The JSON definition
+ :type resource_defs: dict
+ :param resource_defs: All resources defined in the service
+ """
+ def __init__(self, definition, resource_defs):
+ self._definition = definition
+ self._resource_defs = resource_defs
+
+ #: (``string``) The name of the response resource type
+ self.type = definition.get('type')
+
+ #: (``string``) The JMESPath search query or ``None``
+ self.path = definition.get('path')
+
+ @property
+ def identifiers(self):
+ """
+ A list of resource identifiers.
+
+ :type: list(:py:class:`Identifier`)
+ """
+ identifiers = []
+
+ for item in self._definition.get('identifiers', []):
+ identifiers.append(
+ Parameter(**item))
+
+ return identifiers
+
+ @property
+ def model(self):
+ """
+ Get the resource model for the response resource.
+
+ :type: :py:class:`ResourceModel`
+ """
+ return ResourceModel(self.type, self._resource_defs[self.type],
+ self._resource_defs)
+
+
+class Collection(Action):
+ """
+ A group of resources. See :py:class:`Action`.
+
+ :type name: string
+ :param name: The name of the collection
+ :type definition: dict
+ :param definition: The JSON definition
+ :type resource_defs: dict
+ :param resource_defs: All resources defined in the service
+ """
+ @property
+ def batch_actions(self):
+ """
+ Get a list of batch actions supported by the resource type
+ contained in this action. This is a shortcut for accessing
+ the same information through the resource model.
+
+ :rtype: list(:py:class:`Action`)
+ """
+ return self.resource.model.batch_actions
+
+
+class ResourceModel(object):
+ """
+ A model representing a resource, defined via a JSON description
+ format. A resource has identifiers, attributes, actions,
+ sub-resources, references and collections. For more information
+ on resources, see :ref:`guide_resources`.
+
+ :type name: string
+ :param name: The name of this resource, e.g. ``sqs`` or ``Queue``
+ :type definition: dict
+ :param definition: The JSON definition
+ :type resource_defs: dict
+ :param resource_defs: All resources defined in the service
+ """
+ def __init__(self, name, definition, resource_defs):
+ self._definition = definition
+ self._resource_defs = resource_defs
+ self._renamed = {}
+
+ #: (``string``) The name of this resource
+ self.name = name
+ #: (``string``) The service shape name for this resource or ``None``
+ self.shape = definition.get('shape')
+
+ def load_rename_map(self, shape=None):
+ """
+ Load a name translation map given a shape. This will set
+ up renamed values for any collisions, e.g. if the shape,
+ an action, and a subresource all are all named ``foo``
+ then the resource will have an action ``foo``, a subresource
+ named ``Foo`` and a property named ``foo_attribute``.
+ This is the order of precedence, from most important to
+ least important:
+
+ * Load action (resource.load)
+ * Identifiers
+ * Actions
+ * Subresources
+ * References
+ * Collections
+ * Waiters
+ * Attributes (shape members)
+
+ Batch actions are only exposed on collections, so do not
+ get modified here. Subresources use upper camel casing, so
+ are unlikely to collide with anything but other subresources.
+
+ Creates a structure like this::
+
+ renames = {
+ ('action', 'id'): 'id_action',
+ ('collection', 'id'): 'id_collection',
+ ('attribute', 'id'): 'id_attribute'
+ }
+
+ # Get the final name for an action named 'id'
+ name = renames.get(('action', 'id'), 'id')
+
+ :type shape: botocore.model.Shape
+ :param shape: The underlying shape for this resource.
+ """
+ # Meta is a reserved name for resources
+ names = set(['meta'])
+ self._renamed = {}
+
+ if self._definition.get('load'):
+ names.add('load')
+
+ for item in self._definition.get('identifiers', []):
+ self._load_name_with_category(names, item['name'], 'identifier')
+
+ for name in self._definition.get('actions', {}):
+ self._load_name_with_category(names, name, 'action')
+
+ for name, ref in self._get_has_definition().items():
+ # Subresources require no data members, just typically
+ # identifiers and user input.
+ data_required = False
+ for identifier in ref['resource']['identifiers']:
+ if identifier['source'] == 'data':
+ data_required = True
+ break
+
+ if not data_required:
+ self._load_name_with_category(names, name, 'subresource',
+ snake_case=False)
+ else:
+ self._load_name_with_category(names, name, 'reference')
+
+ for name in self._definition.get('hasMany', {}):
+ self._load_name_with_category(names, name, 'collection')
+
+ for name in self._definition.get('waiters', {}):
+ self._load_name_with_category(names, Waiter.PREFIX + name,
+ 'waiter')
+
+ if shape is not None:
+ for name in shape.members.keys():
+ self._load_name_with_category(names, name, 'attribute')
+
+ def _load_name_with_category(self, names, name, category,
+ snake_case=True):
+ """
+ Load a name with a given category, possibly renaming it
+ if that name is already in use. The name will be stored
+ in ``names`` and possibly be set up in ``self._renamed``.
+
+ :type names: set
+ :param names: Existing names (Python attributes, properties, or
+ methods) on the resource.
+ :type name: string
+ :param name: The original name of the value.
+ :type category: string
+ :param category: The value type, such as 'identifier' or 'action'
+ :type snake_case: bool
+ :param snake_case: True (default) if the name should be snake cased.
+ """
+ if snake_case:
+ name = xform_name(name)
+
+ if name in names:
+ logger.debug('Renaming %s %s %s' % (self.name, category, name))
+ self._renamed[(category, name)] = name + '_' + category
+ name += '_' + category
+
+ if name in names:
+ # This isn't good, let's raise instead of trying to keep
+ # renaming this value.
+ raise ValueError('Problem renaming {0} {1} to {2}!'.format(
+ self.name, category, name))
+
+ names.add(name)
+
+ def _get_name(self, category, name, snake_case=True):
+ """
+ Get a possibly renamed value given a category and name. This
+ uses the rename map set up in ``load_rename_map``, so that
+ method must be called once first.
+
+ :type category: string
+ :param category: The value type, such as 'identifier' or 'action'
+ :type name: string
+ :param name: The original name of the value
+ :type snake_case: bool
+ :param snake_case: True (default) if the name should be snake cased.
+ :rtype: string
+ :return: Either the renamed value if it is set, otherwise the
+ original name.
+ """
+ if snake_case:
+ name = xform_name(name)
+
+ return self._renamed.get((category, name), name)
+
+ def get_attributes(self, shape):
+ """
+ Get a dictionary of attribute names to original name and shape
+ models that represent the attributes of this resource. Looks
+ like the following:
+
+ {
+ 'some_name': ('SomeName', <Shape...>)
+ }
+
+ :type shape: botocore.model.Shape
+ :param shape: The underlying shape for this resource.
+ :rtype: dict
+ :return: Mapping of resource attributes.
+ """
+ attributes = {}
+ identifier_names = [i.name for i in self.identifiers]
+
+ for name, member in shape.members.items():
+ snake_cased = xform_name(name)
+ if snake_cased in identifier_names:
+ # Skip identifiers, these are set through other means
+ continue
+ snake_cased = self._get_name('attribute', snake_cased,
+ snake_case=False)
+ attributes[snake_cased] = (name, member)
+
+ return attributes
+
+ @property
+ def identifiers(self):
+ """
+ Get a list of resource identifiers.
+
+ :type: list(:py:class:`Identifier`)
+ """
+ identifiers = []
+
+ for item in self._definition.get('identifiers', []):
+ name = self._get_name('identifier', item['name'])
+ member_name = item.get('memberName', None)
+ if member_name:
+ member_name = self._get_name('attribute', member_name)
+ identifiers.append(Identifier(name, member_name))
+
+ return identifiers
+
+ @property
+ def load(self):
+ """
+ Get the load action for this resource, if it is defined.
+
+ :type: :py:class:`Action` or ``None``
+ """
+ action = self._definition.get('load')
+
+ if action is not None:
+ action = Action('load', action, self._resource_defs)
+
+ return action
+
+ @property
+ def actions(self):
+ """
+ Get a list of actions for this resource.
+
+ :type: list(:py:class:`Action`)
+ """
+ actions = []
+
+ for name, item in self._definition.get('actions', {}).items():
+ name = self._get_name('action', name)
+ actions.append(Action(name, item, self._resource_defs))
+
+ return actions
+
+ @property
+ def batch_actions(self):
+ """
+ Get a list of batch actions for this resource.
+
+ :type: list(:py:class:`Action`)
+ """
+ actions = []
+
+ for name, item in self._definition.get('batchActions', {}).items():
+ name = self._get_name('batch_action', name)
+ actions.append(Action(name, item, self._resource_defs))
+
+ return actions
+
+ def _get_has_definition(self):
+ """
+ Get a ``has`` relationship definition from a model, where the
+ service resource model is treated special in that it contains
+ a relationship to every resource defined for the service. This
+ allows things like ``s3.Object('bucket-name', 'key')`` to
+ work even though the JSON doesn't define it explicitly.
+
+ :rtype: dict
+ :return: Mapping of names to subresource and reference
+ definitions.
+ """
+ if self.name not in self._resource_defs:
+ # This is the service resource, so let us expose all of
+ # the defined resources as subresources.
+ definition = {}
+
+ for name, resource_def in self._resource_defs.items():
+ # It's possible for the service to have renamed a
+ # resource or to have defined multiple names that
+ # point to the same resource type, so we need to
+ # take that into account.
+ found = False
+ has_items = self._definition.get('has', {}).items()
+ for has_name, has_def in has_items:
+ if has_def.get('resource', {}).get('type') == name:
+ definition[has_name] = has_def
+ found = True
+
+ if not found:
+ # Create a relationship definition and attach it
+ # to the model, such that all identifiers must be
+ # supplied by the user. It will look something like:
+ #
+ # {
+ # 'resource': {
+ # 'type': 'ResourceName',
+ # 'identifiers': [
+ # {'target': 'Name1', 'source': 'input'},
+ # {'target': 'Name2', 'source': 'input'},
+ # ...
+ # ]
+ # }
+ # }
+ #
+ fake_has = {
+ 'resource': {
+ 'type': name,
+ 'identifiers': []
+ }
+ }
+
+ for identifier in resource_def.get('identifiers', []):
+ fake_has['resource']['identifiers'].append({
+ 'target': identifier['name'], 'source': 'input'
+ })
+
+ definition[name] = fake_has
+ else:
+ definition = self._definition.get('has', {})
+
+ return definition
+
+ def _get_related_resources(self, subresources):
+ """
+ Get a list of sub-resources or references.
+
+ :type subresources: bool
+ :param subresources: ``True`` to get sub-resources, ``False`` to
+ get references.
+ :rtype: list(:py:class:`ResponseResource`)
+ """
+ resources = []
+
+ for name, definition in self._get_has_definition().items():
+ if subresources:
+ name = self._get_name('subresource', name, snake_case=False)
+ else:
+ name = self._get_name('reference', name)
+ action = Action(name, definition, self._resource_defs)
+
+ data_required = False
+ for identifier in action.resource.identifiers:
+ if identifier.source == 'data':
+ data_required = True
+ break
+
+ if subresources and not data_required:
+ resources.append(action)
+ elif not subresources and data_required:
+ resources.append(action)
+
+ return resources
+
+ @property
+ def subresources(self):
+ """
+ Get a list of sub-resources.
+
:type: list(:py:class:`ResponseResource`)
- """
- return self._get_related_resources(True)
-
- @property
- def references(self):
- """
- Get a list of reference resources.
-
- :type: list(:py:class:`ResponseResource`)
- """
- return self._get_related_resources(False)
-
- @property
- def collections(self):
- """
- Get a list of collections for this resource.
-
- :type: list(:py:class:`Collection`)
- """
- collections = []
-
- for name, item in self._definition.get('hasMany', {}).items():
- name = self._get_name('collection', name)
- collections.append(Collection(name, item, self._resource_defs))
-
- return collections
-
- @property
- def waiters(self):
- """
- Get a list of waiters for this resource.
-
- :type: list(:py:class:`Waiter`)
- """
- waiters = []
-
- for name, item in self._definition.get('waiters', {}).items():
- name = self._get_name('waiter', Waiter.PREFIX + name)
- waiters.append(Waiter(name, item))
-
- return waiters
+ """
+ return self._get_related_resources(True)
+
+ @property
+ def references(self):
+ """
+ Get a list of reference resources.
+
+ :type: list(:py:class:`ResponseResource`)
+ """
+ return self._get_related_resources(False)
+
+ @property
+ def collections(self):
+ """
+ Get a list of collections for this resource.
+
+ :type: list(:py:class:`Collection`)
+ """
+ collections = []
+
+ for name, item in self._definition.get('hasMany', {}).items():
+ name = self._get_name('collection', name)
+ collections.append(Collection(name, item, self._resource_defs))
+
+ return collections
+
+ @property
+ def waiters(self):
+ """
+ Get a list of waiters for this resource.
+
+ :type: list(:py:class:`Waiter`)
+ """
+ waiters = []
+
+ for name, item in self._definition.get('waiters', {}).items():
+ name = self._get_name('waiter', Waiter.PREFIX + name)
+ waiters.append(Waiter(name, item))
+
+ return waiters
diff --git a/contrib/python/boto3/boto3/resources/params.py b/contrib/python/boto3/boto3/resources/params.py
index 7b7c73be6b..148d69849c 100644
--- a/contrib/python/boto3/boto3/resources/params.py
+++ b/contrib/python/boto3/boto3/resources/params.py
@@ -1,168 +1,168 @@
-# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-
-import re
-
-import jmespath
-from botocore import xform_name
-
-from ..exceptions import ResourceLoadException
-
-
+# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+
+import re
+
+import jmespath
+from botocore import xform_name
+
+from ..exceptions import ResourceLoadException
+
+
INDEX_RE = re.compile(r'\[(.*)\]$')
-
-
-def get_data_member(parent, path):
- """
- Get a data member from a parent using a JMESPath search query,
- loading the parent if required. If the parent cannot be loaded
- and no data is present then an exception is raised.
-
- :type parent: ServiceResource
- :param parent: The resource instance to which contains data we
- are interested in.
- :type path: string
- :param path: The JMESPath expression to query
- :raises ResourceLoadException: When no data is present and the
- resource cannot be loaded.
- :returns: The queried data or ``None``.
- """
- # Ensure the parent has its data loaded, if possible.
- if parent.meta.data is None:
- if hasattr(parent, 'load'):
- parent.load()
- else:
- raise ResourceLoadException(
- '{0} has no load method!'.format(parent.__class__.__name__))
-
- return jmespath.search(path, parent.meta.data)
-
-
-def create_request_parameters(parent, request_model, params=None, index=None):
- """
- Handle request parameters that can be filled in from identifiers,
- resource data members or constants.
-
- By passing ``params``, you can invoke this method multiple times and
- build up a parameter dict over time, which is particularly useful
- for reverse JMESPath expressions that append to lists.
-
- :type parent: ServiceResource
- :param parent: The resource instance to which this action is attached.
- :type request_model: :py:class:`~boto3.resources.model.Request`
- :param request_model: The action request model.
- :type params: dict
- :param params: If set, then add to this existing dict. It is both
- edited in-place and returned.
- :type index: int
- :param index: The position of an item within a list
- :rtype: dict
- :return: Pre-filled parameters to be sent to the request operation.
- """
- if params is None:
- params = {}
-
- for param in request_model.params:
- source = param.source
- target = param.target
-
- if source == 'identifier':
- # Resource identifier, e.g. queue.url
- value = getattr(parent, xform_name(param.name))
- elif source == 'data':
- # If this is a data member then it may incur a load
- # action before returning the value.
- value = get_data_member(parent, param.path)
- elif source in ['string', 'integer', 'boolean']:
- # These are hard-coded values in the definition
- value = param.value
- elif source == 'input':
- # This is provided by the user, so ignore it here
- continue
- else:
- raise NotImplementedError(
- 'Unsupported source type: {0}'.format(source))
-
- build_param_structure(params, target, value, index)
-
- return params
-
-
-def build_param_structure(params, target, value, index=None):
- """
- This method provides a basic reverse JMESPath implementation that
- lets you go from a JMESPath-like string to a possibly deeply nested
- object. The ``params`` are mutated in-place, so subsequent calls
- can modify the same element by its index.
-
- >>> build_param_structure(params, 'test[0]', 1)
- >>> print(params)
- {'test': [1]}
-
- >>> build_param_structure(params, 'foo.bar[0].baz', 'hello world')
- >>> print(params)
- {'test': [1], 'foo': {'bar': [{'baz': 'hello, world'}]}}
-
- """
- pos = params
- parts = target.split('.')
-
- # First, split into parts like 'foo', 'bar[0]', 'baz' and process
- # each piece. It can either be a list or a dict, depending on if
- # an index like `[0]` is present. We detect this via a regular
- # expression, and keep track of where we are in params via the
- # pos variable, walking down to the last item. Once there, we
- # set the value.
- for i, part in enumerate(parts):
- # Is it indexing an array?
- result = INDEX_RE.search(part)
- if result:
- if result.group(1):
- if result.group(1) == '*':
- part = part[:-3]
- else:
- # We have an explicit index
- index = int(result.group(1))
- part = part[:-len(str(index) + '[]')]
- else:
- # Index will be set after we know the proper part
- # name and that it's a list instance.
- index = None
- part = part[:-2]
-
- if part not in pos or not isinstance(pos[part], list):
- pos[part] = []
-
- # This means we should append, e.g. 'foo[]'
- if index is None:
- index = len(pos[part])
-
- while len(pos[part]) <= index:
- # Assume it's a dict until we set the final value below
- pos[part].append({})
-
- # Last item? Set the value, otherwise set the new position
- if i == len(parts) - 1:
- pos[part][index] = value
- else:
- # The new pos is the *item* in the array, not the array!
- pos = pos[part][index]
- else:
- if part not in pos:
- pos[part] = {}
-
- # Last item? Set the value, otherwise set the new position
- if i == len(parts) - 1:
- pos[part] = value
- else:
- pos = pos[part]
+
+
+def get_data_member(parent, path):
+ """
+ Get a data member from a parent using a JMESPath search query,
+ loading the parent if required. If the parent cannot be loaded
+ and no data is present then an exception is raised.
+
+ :type parent: ServiceResource
+ :param parent: The resource instance to which contains data we
+ are interested in.
+ :type path: string
+ :param path: The JMESPath expression to query
+ :raises ResourceLoadException: When no data is present and the
+ resource cannot be loaded.
+ :returns: The queried data or ``None``.
+ """
+ # Ensure the parent has its data loaded, if possible.
+ if parent.meta.data is None:
+ if hasattr(parent, 'load'):
+ parent.load()
+ else:
+ raise ResourceLoadException(
+ '{0} has no load method!'.format(parent.__class__.__name__))
+
+ return jmespath.search(path, parent.meta.data)
+
+
+def create_request_parameters(parent, request_model, params=None, index=None):
+ """
+ Handle request parameters that can be filled in from identifiers,
+ resource data members or constants.
+
+ By passing ``params``, you can invoke this method multiple times and
+ build up a parameter dict over time, which is particularly useful
+ for reverse JMESPath expressions that append to lists.
+
+ :type parent: ServiceResource
+ :param parent: The resource instance to which this action is attached.
+ :type request_model: :py:class:`~boto3.resources.model.Request`
+ :param request_model: The action request model.
+ :type params: dict
+ :param params: If set, then add to this existing dict. It is both
+ edited in-place and returned.
+ :type index: int
+ :param index: The position of an item within a list
+ :rtype: dict
+ :return: Pre-filled parameters to be sent to the request operation.
+ """
+ if params is None:
+ params = {}
+
+ for param in request_model.params:
+ source = param.source
+ target = param.target
+
+ if source == 'identifier':
+ # Resource identifier, e.g. queue.url
+ value = getattr(parent, xform_name(param.name))
+ elif source == 'data':
+ # If this is a data member then it may incur a load
+ # action before returning the value.
+ value = get_data_member(parent, param.path)
+ elif source in ['string', 'integer', 'boolean']:
+ # These are hard-coded values in the definition
+ value = param.value
+ elif source == 'input':
+ # This is provided by the user, so ignore it here
+ continue
+ else:
+ raise NotImplementedError(
+ 'Unsupported source type: {0}'.format(source))
+
+ build_param_structure(params, target, value, index)
+
+ return params
+
+
+def build_param_structure(params, target, value, index=None):
+ """
+ This method provides a basic reverse JMESPath implementation that
+ lets you go from a JMESPath-like string to a possibly deeply nested
+ object. The ``params`` are mutated in-place, so subsequent calls
+ can modify the same element by its index.
+
+ >>> build_param_structure(params, 'test[0]', 1)
+ >>> print(params)
+ {'test': [1]}
+
+ >>> build_param_structure(params, 'foo.bar[0].baz', 'hello world')
+ >>> print(params)
+ {'test': [1], 'foo': {'bar': [{'baz': 'hello, world'}]}}
+
+ """
+ pos = params
+ parts = target.split('.')
+
+ # First, split into parts like 'foo', 'bar[0]', 'baz' and process
+ # each piece. It can either be a list or a dict, depending on if
+ # an index like `[0]` is present. We detect this via a regular
+ # expression, and keep track of where we are in params via the
+ # pos variable, walking down to the last item. Once there, we
+ # set the value.
+ for i, part in enumerate(parts):
+ # Is it indexing an array?
+ result = INDEX_RE.search(part)
+ if result:
+ if result.group(1):
+ if result.group(1) == '*':
+ part = part[:-3]
+ else:
+ # We have an explicit index
+ index = int(result.group(1))
+ part = part[:-len(str(index) + '[]')]
+ else:
+ # Index will be set after we know the proper part
+ # name and that it's a list instance.
+ index = None
+ part = part[:-2]
+
+ if part not in pos or not isinstance(pos[part], list):
+ pos[part] = []
+
+ # This means we should append, e.g. 'foo[]'
+ if index is None:
+ index = len(pos[part])
+
+ while len(pos[part]) <= index:
+ # Assume it's a dict until we set the final value below
+ pos[part].append({})
+
+ # Last item? Set the value, otherwise set the new position
+ if i == len(parts) - 1:
+ pos[part][index] = value
+ else:
+ # The new pos is the *item* in the array, not the array!
+ pos = pos[part][index]
+ else:
+ if part not in pos:
+ pos[part] = {}
+
+ # Last item? Set the value, otherwise set the new position
+ if i == len(parts) - 1:
+ pos[part] = value
+ else:
+ pos = pos[part]
diff --git a/contrib/python/boto3/boto3/resources/response.py b/contrib/python/boto3/boto3/resources/response.py
index 8eee70dcdb..57803614d3 100644
--- a/contrib/python/boto3/boto3/resources/response.py
+++ b/contrib/python/boto3/boto3/resources/response.py
@@ -1,300 +1,300 @@
-# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-
-import jmespath
-from botocore import xform_name
-
-from .params import get_data_member
-
-
-def all_not_none(iterable):
- """
- Return True if all elements of the iterable are not None (or if the
- iterable is empty). This is like the built-in ``all``, except checks
- against None, so 0 and False are allowable values.
- """
- for element in iterable:
- if element is None:
- return False
- return True
-
-
-def build_identifiers(identifiers, parent, params=None, raw_response=None):
- """
- Builds a mapping of identifier names to values based on the
- identifier source location, type, and target. Identifier
- values may be scalars or lists depending on the source type
- and location.
-
- :type identifiers: list
- :param identifiers: List of :py:class:`~boto3.resources.model.Parameter`
- definitions
- :type parent: ServiceResource
- :param parent: The resource instance to which this action is attached.
- :type params: dict
- :param params: Request parameters sent to the service.
- :type raw_response: dict
- :param raw_response: Low-level operation response.
- :rtype: list
- :return: An ordered list of ``(name, value)`` identifier tuples.
- """
- results = []
-
- for identifier in identifiers:
- source = identifier.source
- target = identifier.target
-
- if source == 'response':
- value = jmespath.search(identifier.path, raw_response)
- elif source == 'requestParameter':
- value = jmespath.search(identifier.path, params)
- elif source == 'identifier':
- value = getattr(parent, xform_name(identifier.name))
- elif source == 'data':
- # If this is a data member then it may incur a load
- # action before returning the value.
- value = get_data_member(parent, identifier.path)
- elif source == 'input':
- # This value is set by the user, so ignore it here
- continue
- else:
- raise NotImplementedError(
- 'Unsupported source type: {0}'.format(source))
-
- results.append((xform_name(target), value))
-
- return results
-
-
-def build_empty_response(search_path, operation_name, service_model):
- """
- Creates an appropriate empty response for the type that is expected,
- based on the service model's shape type. For example, a value that
- is normally a list would then return an empty list. A structure would
- return an empty dict, and a number would return None.
-
- :type search_path: string
- :param search_path: JMESPath expression to search in the response
- :type operation_name: string
- :param operation_name: Name of the underlying service operation.
- :type service_model: :ref:`botocore.model.ServiceModel`
- :param service_model: The Botocore service model
- :rtype: dict, list, or None
- :return: An appropriate empty value
- """
- response = None
-
- operation_model = service_model.operation_model(operation_name)
- shape = operation_model.output_shape
-
- if search_path:
- # Walk the search path and find the final shape. For example, given
- # a path of ``foo.bar[0].baz``, we first find the shape for ``foo``,
- # then the shape for ``bar`` (ignoring the indexing), and finally
- # the shape for ``baz``.
- for item in search_path.split('.'):
- item = item.strip('[0123456789]$')
-
- if shape.type_name == 'structure':
- shape = shape.members[item]
- elif shape.type_name == 'list':
- shape = shape.member
- else:
- raise NotImplementedError(
- 'Search path hits shape type {0} from {1}'.format(
- shape.type_name, item))
-
- # Anything not handled here is set to None
- if shape.type_name == 'structure':
- response = {}
- elif shape.type_name == 'list':
- response = []
- elif shape.type_name == 'map':
- response = {}
-
- return response
-
-
-class RawHandler(object):
- """
- A raw action response handler. This passed through the response
- dictionary, optionally after performing a JMESPath search if one
- has been defined for the action.
-
- :type search_path: string
- :param search_path: JMESPath expression to search in the response
- :rtype: dict
- :return: Service response
- """
- def __init__(self, search_path):
- self.search_path = search_path
-
- def __call__(self, parent, params, response):
- """
- :type parent: ServiceResource
- :param parent: The resource instance to which this action is attached.
- :type params: dict
- :param params: Request parameters sent to the service.
- :type response: dict
- :param response: Low-level operation response.
- """
- # TODO: Remove the '$' check after JMESPath supports it
- if self.search_path and self.search_path != '$':
- response = jmespath.search(self.search_path, response)
-
- return response
-
-
-class ResourceHandler(object):
- """
- Creates a new resource or list of new resources from the low-level
- response based on the given response resource definition.
-
- :type search_path: string
- :param search_path: JMESPath expression to search in the response
-
- :type factory: ResourceFactory
- :param factory: The factory that created the resource class to which
- this action is attached.
-
- :type resource_model: :py:class:`~boto3.resources.model.ResponseResource`
- :param resource_model: Response resource model.
-
- :type service_context: :py:class:`~boto3.utils.ServiceContext`
- :param service_context: Context about the AWS service
-
- :type operation_name: string
- :param operation_name: Name of the underlying service operation, if it
- exists.
-
- :rtype: ServiceResource or list
- :return: New resource instance(s).
- """
- def __init__(self, search_path, factory, resource_model,
- service_context, operation_name=None):
- self.search_path = search_path
- self.factory = factory
- self.resource_model = resource_model
- self.operation_name = operation_name
- self.service_context = service_context
-
- def __call__(self, parent, params, response):
- """
- :type parent: ServiceResource
- :param parent: The resource instance to which this action is attached.
- :type params: dict
- :param params: Request parameters sent to the service.
- :type response: dict
- :param response: Low-level operation response.
- """
- resource_name = self.resource_model.type
- json_definition = self.service_context.resource_json_definitions.get(
- resource_name)
-
- # Load the new resource class that will result from this action.
- resource_cls = self.factory.load_from_definition(
- resource_name=resource_name,
- single_resource_json_definition=json_definition,
- service_context=self.service_context
- )
- raw_response = response
- search_response = None
-
- # Anytime a path is defined, it means the response contains the
- # resource's attributes, so resource_data gets set here. It
- # eventually ends up in resource.meta.data, which is where
- # the attribute properties look for data.
- if self.search_path:
- search_response = jmespath.search(self.search_path, raw_response)
-
- # First, we parse all the identifiers, then create the individual
- # response resources using them. Any identifiers that are lists
- # will have one item consumed from the front of the list for each
- # resource that is instantiated. Items which are not a list will
- # be set as the same value on each new resource instance.
- identifiers = dict(build_identifiers(
- self.resource_model.identifiers, parent, params,
- raw_response))
-
- # If any of the identifiers is a list, then the response is plural
- plural = [v for v in identifiers.values() if isinstance(v, list)]
-
- if plural:
- response = []
-
- # The number of items in an identifier that is a list will
- # determine how many resource instances to create.
- for i in range(len(plural[0])):
- # Response item data is *only* available if a search path
- # was given. This prevents accidentally loading unrelated
- # data that may be in the response.
- response_item = None
- if search_response:
- response_item = search_response[i]
- response.append(
- self.handle_response_item(resource_cls, parent,
- identifiers, response_item))
- elif all_not_none(identifiers.values()):
- # All identifiers must always exist, otherwise the resource
- # cannot be instantiated.
- response = self.handle_response_item(
- resource_cls, parent, identifiers, search_response)
- else:
- # The response should be empty, but that may mean an
- # empty dict, list, or None based on whether we make
- # a remote service call and what shape it is expected
- # to return.
- response = None
- if self.operation_name is not None:
- # A remote service call was made, so try and determine
- # its shape.
- response = build_empty_response(
- self.search_path, self.operation_name,
- self.service_context.service_model)
-
- return response
-
- def handle_response_item(self, resource_cls, parent, identifiers,
- resource_data):
- """
- Handles the creation of a single response item by setting
- parameters and creating the appropriate resource instance.
-
- :type resource_cls: ServiceResource subclass
- :param resource_cls: The resource class to instantiate.
- :type parent: ServiceResource
- :param parent: The resource instance to which this action is attached.
- :type identifiers: dict
- :param identifiers: Map of identifier names to value or values.
- :type resource_data: dict or None
- :param resource_data: Data for resource attributes.
- :rtype: ServiceResource
- :return: New resource instance.
- """
- kwargs = {
- 'client': parent.meta.client,
- }
-
- for name, value in identifiers.items():
- # If value is a list, then consume the next item
- if isinstance(value, list):
- value = value.pop(0)
-
- kwargs[name] = value
-
- resource = resource_cls(**kwargs)
-
- if resource_data is not None:
- resource.meta.data = resource_data
-
- return resource
+# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+
+import jmespath
+from botocore import xform_name
+
+from .params import get_data_member
+
+
+def all_not_none(iterable):
+ """
+ Return True if all elements of the iterable are not None (or if the
+ iterable is empty). This is like the built-in ``all``, except checks
+ against None, so 0 and False are allowable values.
+ """
+ for element in iterable:
+ if element is None:
+ return False
+ return True
+
+
+def build_identifiers(identifiers, parent, params=None, raw_response=None):
+ """
+ Builds a mapping of identifier names to values based on the
+ identifier source location, type, and target. Identifier
+ values may be scalars or lists depending on the source type
+ and location.
+
+ :type identifiers: list
+ :param identifiers: List of :py:class:`~boto3.resources.model.Parameter`
+ definitions
+ :type parent: ServiceResource
+ :param parent: The resource instance to which this action is attached.
+ :type params: dict
+ :param params: Request parameters sent to the service.
+ :type raw_response: dict
+ :param raw_response: Low-level operation response.
+ :rtype: list
+ :return: An ordered list of ``(name, value)`` identifier tuples.
+ """
+ results = []
+
+ for identifier in identifiers:
+ source = identifier.source
+ target = identifier.target
+
+ if source == 'response':
+ value = jmespath.search(identifier.path, raw_response)
+ elif source == 'requestParameter':
+ value = jmespath.search(identifier.path, params)
+ elif source == 'identifier':
+ value = getattr(parent, xform_name(identifier.name))
+ elif source == 'data':
+ # If this is a data member then it may incur a load
+ # action before returning the value.
+ value = get_data_member(parent, identifier.path)
+ elif source == 'input':
+ # This value is set by the user, so ignore it here
+ continue
+ else:
+ raise NotImplementedError(
+ 'Unsupported source type: {0}'.format(source))
+
+ results.append((xform_name(target), value))
+
+ return results
+
+
+def build_empty_response(search_path, operation_name, service_model):
+ """
+ Creates an appropriate empty response for the type that is expected,
+ based on the service model's shape type. For example, a value that
+ is normally a list would then return an empty list. A structure would
+ return an empty dict, and a number would return None.
+
+ :type search_path: string
+ :param search_path: JMESPath expression to search in the response
+ :type operation_name: string
+ :param operation_name: Name of the underlying service operation.
+ :type service_model: :ref:`botocore.model.ServiceModel`
+ :param service_model: The Botocore service model
+ :rtype: dict, list, or None
+ :return: An appropriate empty value
+ """
+ response = None
+
+ operation_model = service_model.operation_model(operation_name)
+ shape = operation_model.output_shape
+
+ if search_path:
+ # Walk the search path and find the final shape. For example, given
+ # a path of ``foo.bar[0].baz``, we first find the shape for ``foo``,
+ # then the shape for ``bar`` (ignoring the indexing), and finally
+ # the shape for ``baz``.
+ for item in search_path.split('.'):
+ item = item.strip('[0123456789]$')
+
+ if shape.type_name == 'structure':
+ shape = shape.members[item]
+ elif shape.type_name == 'list':
+ shape = shape.member
+ else:
+ raise NotImplementedError(
+ 'Search path hits shape type {0} from {1}'.format(
+ shape.type_name, item))
+
+ # Anything not handled here is set to None
+ if shape.type_name == 'structure':
+ response = {}
+ elif shape.type_name == 'list':
+ response = []
+ elif shape.type_name == 'map':
+ response = {}
+
+ return response
+
+
+class RawHandler(object):
+ """
+ A raw action response handler. This passed through the response
+ dictionary, optionally after performing a JMESPath search if one
+ has been defined for the action.
+
+ :type search_path: string
+ :param search_path: JMESPath expression to search in the response
+ :rtype: dict
+ :return: Service response
+ """
+ def __init__(self, search_path):
+ self.search_path = search_path
+
+ def __call__(self, parent, params, response):
+ """
+ :type parent: ServiceResource
+ :param parent: The resource instance to which this action is attached.
+ :type params: dict
+ :param params: Request parameters sent to the service.
+ :type response: dict
+ :param response: Low-level operation response.
+ """
+ # TODO: Remove the '$' check after JMESPath supports it
+ if self.search_path and self.search_path != '$':
+ response = jmespath.search(self.search_path, response)
+
+ return response
+
+
+class ResourceHandler(object):
+ """
+ Creates a new resource or list of new resources from the low-level
+ response based on the given response resource definition.
+
+ :type search_path: string
+ :param search_path: JMESPath expression to search in the response
+
+ :type factory: ResourceFactory
+ :param factory: The factory that created the resource class to which
+ this action is attached.
+
+ :type resource_model: :py:class:`~boto3.resources.model.ResponseResource`
+ :param resource_model: Response resource model.
+
+ :type service_context: :py:class:`~boto3.utils.ServiceContext`
+ :param service_context: Context about the AWS service
+
+ :type operation_name: string
+ :param operation_name: Name of the underlying service operation, if it
+ exists.
+
+ :rtype: ServiceResource or list
+ :return: New resource instance(s).
+ """
+ def __init__(self, search_path, factory, resource_model,
+ service_context, operation_name=None):
+ self.search_path = search_path
+ self.factory = factory
+ self.resource_model = resource_model
+ self.operation_name = operation_name
+ self.service_context = service_context
+
+ def __call__(self, parent, params, response):
+ """
+ :type parent: ServiceResource
+ :param parent: The resource instance to which this action is attached.
+ :type params: dict
+ :param params: Request parameters sent to the service.
+ :type response: dict
+ :param response: Low-level operation response.
+ """
+ resource_name = self.resource_model.type
+ json_definition = self.service_context.resource_json_definitions.get(
+ resource_name)
+
+ # Load the new resource class that will result from this action.
+ resource_cls = self.factory.load_from_definition(
+ resource_name=resource_name,
+ single_resource_json_definition=json_definition,
+ service_context=self.service_context
+ )
+ raw_response = response
+ search_response = None
+
+ # Anytime a path is defined, it means the response contains the
+ # resource's attributes, so resource_data gets set here. It
+ # eventually ends up in resource.meta.data, which is where
+ # the attribute properties look for data.
+ if self.search_path:
+ search_response = jmespath.search(self.search_path, raw_response)
+
+ # First, we parse all the identifiers, then create the individual
+ # response resources using them. Any identifiers that are lists
+ # will have one item consumed from the front of the list for each
+ # resource that is instantiated. Items which are not a list will
+ # be set as the same value on each new resource instance.
+ identifiers = dict(build_identifiers(
+ self.resource_model.identifiers, parent, params,
+ raw_response))
+
+ # If any of the identifiers is a list, then the response is plural
+ plural = [v for v in identifiers.values() if isinstance(v, list)]
+
+ if plural:
+ response = []
+
+ # The number of items in an identifier that is a list will
+ # determine how many resource instances to create.
+ for i in range(len(plural[0])):
+ # Response item data is *only* available if a search path
+ # was given. This prevents accidentally loading unrelated
+ # data that may be in the response.
+ response_item = None
+ if search_response:
+ response_item = search_response[i]
+ response.append(
+ self.handle_response_item(resource_cls, parent,
+ identifiers, response_item))
+ elif all_not_none(identifiers.values()):
+ # All identifiers must always exist, otherwise the resource
+ # cannot be instantiated.
+ response = self.handle_response_item(
+ resource_cls, parent, identifiers, search_response)
+ else:
+ # The response should be empty, but that may mean an
+ # empty dict, list, or None based on whether we make
+ # a remote service call and what shape it is expected
+ # to return.
+ response = None
+ if self.operation_name is not None:
+ # A remote service call was made, so try and determine
+ # its shape.
+ response = build_empty_response(
+ self.search_path, self.operation_name,
+ self.service_context.service_model)
+
+ return response
+
+ def handle_response_item(self, resource_cls, parent, identifiers,
+ resource_data):
+ """
+ Handles the creation of a single response item by setting
+ parameters and creating the appropriate resource instance.
+
+ :type resource_cls: ServiceResource subclass
+ :param resource_cls: The resource class to instantiate.
+ :type parent: ServiceResource
+ :param parent: The resource instance to which this action is attached.
+ :type identifiers: dict
+ :param identifiers: Map of identifier names to value or values.
+ :type resource_data: dict or None
+ :param resource_data: Data for resource attributes.
+ :rtype: ServiceResource
+ :return: New resource instance.
+ """
+ kwargs = {
+ 'client': parent.meta.client,
+ }
+
+ for name, value in identifiers.items():
+ # If value is a list, then consume the next item
+ if isinstance(value, list):
+ value = value.pop(0)
+
+ kwargs[name] = value
+
+ resource = resource_cls(**kwargs)
+
+ if resource_data is not None:
+ resource.meta.data = resource_data
+
+ return resource
diff --git a/contrib/python/boto3/boto3/s3/__init__.py b/contrib/python/boto3/boto3/s3/__init__.py
index 58f30dc875..c89416d7a5 100644
--- a/contrib/python/boto3/boto3/s3/__init__.py
+++ b/contrib/python/boto3/boto3/s3/__init__.py
@@ -1,12 +1,12 @@
-# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
diff --git a/contrib/python/boto3/boto3/s3/inject.py b/contrib/python/boto3/boto3/s3/inject.py
index 5ba97f8704..0029c0cacb 100644
--- a/contrib/python/boto3/boto3/s3/inject.py
+++ b/contrib/python/boto3/boto3/s3/inject.py
@@ -1,108 +1,108 @@
-# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-from botocore.exceptions import ClientError
-
-from boto3.s3.transfer import create_transfer_manager
-from boto3.s3.transfer import TransferConfig, S3Transfer
-from boto3.s3.transfer import ProgressCallbackInvoker
-from boto3 import utils
-
-
-def inject_s3_transfer_methods(class_attributes, **kwargs):
- utils.inject_attribute(class_attributes, 'upload_file', upload_file)
- utils.inject_attribute(class_attributes, 'download_file', download_file)
- utils.inject_attribute(class_attributes, 'copy', copy)
- utils.inject_attribute(class_attributes, 'upload_fileobj', upload_fileobj)
- utils.inject_attribute(
- class_attributes, 'download_fileobj', download_fileobj)
-
-
-def inject_bucket_methods(class_attributes, **kwargs):
- utils.inject_attribute(class_attributes, 'load', bucket_load)
- utils.inject_attribute(class_attributes, 'upload_file', bucket_upload_file)
- utils.inject_attribute(
- class_attributes, 'download_file', bucket_download_file)
- utils.inject_attribute(class_attributes, 'copy', bucket_copy)
- utils.inject_attribute(
- class_attributes, 'upload_fileobj', bucket_upload_fileobj)
- utils.inject_attribute(
- class_attributes, 'download_fileobj', bucket_download_fileobj)
-
-
-def inject_object_methods(class_attributes, **kwargs):
- utils.inject_attribute(class_attributes, 'upload_file', object_upload_file)
- utils.inject_attribute(
- class_attributes, 'download_file', object_download_file)
- utils.inject_attribute(class_attributes, 'copy', object_copy)
- utils.inject_attribute(
- class_attributes, 'upload_fileobj', object_upload_fileobj)
- utils.inject_attribute(
- class_attributes, 'download_fileobj', object_download_fileobj)
-
-
-def inject_object_summary_methods(class_attributes, **kwargs):
- utils.inject_attribute(class_attributes, 'load', object_summary_load)
-
-
-def bucket_load(self, *args, **kwargs):
- """
- Calls s3.Client.list_buckets() to update the attributes of the Bucket
- resource.
- """
- # The docstring above is phrased this way to match what the autogenerated
- # docs produce.
-
- # We can't actually get the bucket's attributes from a HeadBucket,
- # so we need to use a ListBuckets and search for our bucket.
- # However, we may fail if we lack permissions to ListBuckets
- # or the bucket is in another account. In which case, creation_date
- # will be None.
- self.meta.data = {}
- try:
- response = self.meta.client.list_buckets()
- for bucket_data in response['Buckets']:
- if bucket_data['Name'] == self.name:
- self.meta.data = bucket_data
- break
- except ClientError as e:
- if not e.response.get('Error', {}).get('Code') == 'AccessDenied':
- raise
-
-def object_summary_load(self, *args, **kwargs):
- """
- Calls s3.Client.head_object to update the attributes of the ObjectSummary
- resource.
- """
- response = self.meta.client.head_object(
- Bucket=self.bucket_name, Key=self.key)
- if 'ContentLength' in response:
- response['Size'] = response.pop('ContentLength')
- self.meta.data = response
-
-
-def upload_file(self, Filename, Bucket, Key, ExtraArgs=None,
- Callback=None, Config=None):
- """Upload a file to an S3 object.
-
- Usage::
-
- import boto3
- s3 = boto3.resource('s3')
- s3.meta.client.upload_file('/tmp/hello.txt', 'mybucket', 'hello.txt')
-
- Similar behavior as S3Transfer's upload_file() method,
- except that parameters are capitalized. Detailed examples can be found at
- :ref:`S3Transfer's Usage <ref_s3transfer_usage>`.
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+from botocore.exceptions import ClientError
+
+from boto3.s3.transfer import create_transfer_manager
+from boto3.s3.transfer import TransferConfig, S3Transfer
+from boto3.s3.transfer import ProgressCallbackInvoker
+from boto3 import utils
+
+
+def inject_s3_transfer_methods(class_attributes, **kwargs):
+ utils.inject_attribute(class_attributes, 'upload_file', upload_file)
+ utils.inject_attribute(class_attributes, 'download_file', download_file)
+ utils.inject_attribute(class_attributes, 'copy', copy)
+ utils.inject_attribute(class_attributes, 'upload_fileobj', upload_fileobj)
+ utils.inject_attribute(
+ class_attributes, 'download_fileobj', download_fileobj)
+
+
+def inject_bucket_methods(class_attributes, **kwargs):
+ utils.inject_attribute(class_attributes, 'load', bucket_load)
+ utils.inject_attribute(class_attributes, 'upload_file', bucket_upload_file)
+ utils.inject_attribute(
+ class_attributes, 'download_file', bucket_download_file)
+ utils.inject_attribute(class_attributes, 'copy', bucket_copy)
+ utils.inject_attribute(
+ class_attributes, 'upload_fileobj', bucket_upload_fileobj)
+ utils.inject_attribute(
+ class_attributes, 'download_fileobj', bucket_download_fileobj)
+
+
+def inject_object_methods(class_attributes, **kwargs):
+ utils.inject_attribute(class_attributes, 'upload_file', object_upload_file)
+ utils.inject_attribute(
+ class_attributes, 'download_file', object_download_file)
+ utils.inject_attribute(class_attributes, 'copy', object_copy)
+ utils.inject_attribute(
+ class_attributes, 'upload_fileobj', object_upload_fileobj)
+ utils.inject_attribute(
+ class_attributes, 'download_fileobj', object_download_fileobj)
+
+
+def inject_object_summary_methods(class_attributes, **kwargs):
+ utils.inject_attribute(class_attributes, 'load', object_summary_load)
+
+
+def bucket_load(self, *args, **kwargs):
+ """
+ Calls s3.Client.list_buckets() to update the attributes of the Bucket
+ resource.
+ """
+ # The docstring above is phrased this way to match what the autogenerated
+ # docs produce.
+
+ # We can't actually get the bucket's attributes from a HeadBucket,
+ # so we need to use a ListBuckets and search for our bucket.
+ # However, we may fail if we lack permissions to ListBuckets
+ # or the bucket is in another account. In which case, creation_date
+ # will be None.
+ self.meta.data = {}
+ try:
+ response = self.meta.client.list_buckets()
+ for bucket_data in response['Buckets']:
+ if bucket_data['Name'] == self.name:
+ self.meta.data = bucket_data
+ break
+ except ClientError as e:
+ if not e.response.get('Error', {}).get('Code') == 'AccessDenied':
+ raise
+
+def object_summary_load(self, *args, **kwargs):
+ """
+ Calls s3.Client.head_object to update the attributes of the ObjectSummary
+ resource.
+ """
+ response = self.meta.client.head_object(
+ Bucket=self.bucket_name, Key=self.key)
+ if 'ContentLength' in response:
+ response['Size'] = response.pop('ContentLength')
+ self.meta.data = response
+
+
+def upload_file(self, Filename, Bucket, Key, ExtraArgs=None,
+ Callback=None, Config=None):
+ """Upload a file to an S3 object.
+
+ Usage::
+
+ import boto3
+ s3 = boto3.resource('s3')
+ s3.meta.client.upload_file('/tmp/hello.txt', 'mybucket', 'hello.txt')
+
+ Similar behavior as S3Transfer's upload_file() method,
+ except that parameters are capitalized. Detailed examples can be found at
+ :ref:`S3Transfer's Usage <ref_s3transfer_usage>`.
:type Filename: str
:param Filename: The path to the file to upload.
@@ -124,26 +124,26 @@ def upload_file(self, Filename, Bucket, Key, ExtraArgs=None,
:type Config: boto3.s3.transfer.TransferConfig
:param Config: The transfer configuration to be used when performing the
transfer.
- """
- with S3Transfer(self, Config) as transfer:
- return transfer.upload_file(
- filename=Filename, bucket=Bucket, key=Key,
- extra_args=ExtraArgs, callback=Callback)
-
-
-def download_file(self, Bucket, Key, Filename, ExtraArgs=None,
- Callback=None, Config=None):
- """Download an S3 object to a file.
-
- Usage::
-
- import boto3
- s3 = boto3.resource('s3')
- s3.meta.client.download_file('mybucket', 'hello.txt', '/tmp/hello.txt')
-
- Similar behavior as S3Transfer's download_file() method,
- except that parameters are capitalized. Detailed examples can be found at
- :ref:`S3Transfer's Usage <ref_s3transfer_usage>`.
+ """
+ with S3Transfer(self, Config) as transfer:
+ return transfer.upload_file(
+ filename=Filename, bucket=Bucket, key=Key,
+ extra_args=ExtraArgs, callback=Callback)
+
+
+def download_file(self, Bucket, Key, Filename, ExtraArgs=None,
+ Callback=None, Config=None):
+ """Download an S3 object to a file.
+
+ Usage::
+
+ import boto3
+ s3 = boto3.resource('s3')
+ s3.meta.client.download_file('mybucket', 'hello.txt', '/tmp/hello.txt')
+
+ Similar behavior as S3Transfer's download_file() method,
+ except that parameters are capitalized. Detailed examples can be found at
+ :ref:`S3Transfer's Usage <ref_s3transfer_usage>`.
:type Bucket: str
:param Bucket: The name of the bucket to download from.
@@ -165,26 +165,26 @@ def download_file(self, Bucket, Key, Filename, ExtraArgs=None,
:type Config: boto3.s3.transfer.TransferConfig
:param Config: The transfer configuration to be used when performing the
transfer.
- """
- with S3Transfer(self, Config) as transfer:
- return transfer.download_file(
- bucket=Bucket, key=Key, filename=Filename,
- extra_args=ExtraArgs, callback=Callback)
-
-
-def bucket_upload_file(self, Filename, Key,
- ExtraArgs=None, Callback=None, Config=None):
- """Upload a file to an S3 object.
-
- Usage::
-
- import boto3
- s3 = boto3.resource('s3')
- s3.Bucket('mybucket').upload_file('/tmp/hello.txt', 'hello.txt')
-
- Similar behavior as S3Transfer's upload_file() method,
- except that parameters are capitalized. Detailed examples can be found at
- :ref:`S3Transfer's Usage <ref_s3transfer_usage>`.
+ """
+ with S3Transfer(self, Config) as transfer:
+ return transfer.download_file(
+ bucket=Bucket, key=Key, filename=Filename,
+ extra_args=ExtraArgs, callback=Callback)
+
+
+def bucket_upload_file(self, Filename, Key,
+ ExtraArgs=None, Callback=None, Config=None):
+ """Upload a file to an S3 object.
+
+ Usage::
+
+ import boto3
+ s3 = boto3.resource('s3')
+ s3.Bucket('mybucket').upload_file('/tmp/hello.txt', 'hello.txt')
+
+ Similar behavior as S3Transfer's upload_file() method,
+ except that parameters are capitalized. Detailed examples can be found at
+ :ref:`S3Transfer's Usage <ref_s3transfer_usage>`.
:type Filename: str
:param Filename: The path to the file to upload.
@@ -203,25 +203,25 @@ def bucket_upload_file(self, Filename, Key,
:type Config: boto3.s3.transfer.TransferConfig
:param Config: The transfer configuration to be used when performing the
transfer.
- """
- return self.meta.client.upload_file(
- Filename=Filename, Bucket=self.name, Key=Key,
- ExtraArgs=ExtraArgs, Callback=Callback, Config=Config)
-
-
-def bucket_download_file(self, Key, Filename,
- ExtraArgs=None, Callback=None, Config=None):
- """Download an S3 object to a file.
-
- Usage::
-
- import boto3
- s3 = boto3.resource('s3')
- s3.Bucket('mybucket').download_file('hello.txt', '/tmp/hello.txt')
-
- Similar behavior as S3Transfer's download_file() method,
- except that parameters are capitalized. Detailed examples can be found at
- :ref:`S3Transfer's Usage <ref_s3transfer_usage>`.
+ """
+ return self.meta.client.upload_file(
+ Filename=Filename, Bucket=self.name, Key=Key,
+ ExtraArgs=ExtraArgs, Callback=Callback, Config=Config)
+
+
+def bucket_download_file(self, Key, Filename,
+ ExtraArgs=None, Callback=None, Config=None):
+ """Download an S3 object to a file.
+
+ Usage::
+
+ import boto3
+ s3 = boto3.resource('s3')
+ s3.Bucket('mybucket').download_file('hello.txt', '/tmp/hello.txt')
+
+ Similar behavior as S3Transfer's download_file() method,
+ except that parameters are capitalized. Detailed examples can be found at
+ :ref:`S3Transfer's Usage <ref_s3transfer_usage>`.
:type Key: str
:param Key: The name of the key to download from.
@@ -240,25 +240,25 @@ def bucket_download_file(self, Key, Filename,
:type Config: boto3.s3.transfer.TransferConfig
:param Config: The transfer configuration to be used when performing the
transfer.
- """
- return self.meta.client.download_file(
- Bucket=self.name, Key=Key, Filename=Filename,
- ExtraArgs=ExtraArgs, Callback=Callback, Config=Config)
-
-
-def object_upload_file(self, Filename,
- ExtraArgs=None, Callback=None, Config=None):
- """Upload a file to an S3 object.
-
- Usage::
-
- import boto3
- s3 = boto3.resource('s3')
- s3.Object('mybucket', 'hello.txt').upload_file('/tmp/hello.txt')
-
- Similar behavior as S3Transfer's upload_file() method,
- except that parameters are capitalized. Detailed examples can be found at
- :ref:`S3Transfer's Usage <ref_s3transfer_usage>`.
+ """
+ return self.meta.client.download_file(
+ Bucket=self.name, Key=Key, Filename=Filename,
+ ExtraArgs=ExtraArgs, Callback=Callback, Config=Config)
+
+
+def object_upload_file(self, Filename,
+ ExtraArgs=None, Callback=None, Config=None):
+ """Upload a file to an S3 object.
+
+ Usage::
+
+ import boto3
+ s3 = boto3.resource('s3')
+ s3.Object('mybucket', 'hello.txt').upload_file('/tmp/hello.txt')
+
+ Similar behavior as S3Transfer's upload_file() method,
+ except that parameters are capitalized. Detailed examples can be found at
+ :ref:`S3Transfer's Usage <ref_s3transfer_usage>`.
:type Filename: str
:param Filename: The path to the file to upload.
@@ -274,25 +274,25 @@ def object_upload_file(self, Filename,
:type Config: boto3.s3.transfer.TransferConfig
:param Config: The transfer configuration to be used when performing the
transfer.
- """
- return self.meta.client.upload_file(
- Filename=Filename, Bucket=self.bucket_name, Key=self.key,
- ExtraArgs=ExtraArgs, Callback=Callback, Config=Config)
-
-
-def object_download_file(self, Filename,
- ExtraArgs=None, Callback=None, Config=None):
- """Download an S3 object to a file.
-
- Usage::
-
- import boto3
- s3 = boto3.resource('s3')
- s3.Object('mybucket', 'hello.txt').download_file('/tmp/hello.txt')
-
- Similar behavior as S3Transfer's download_file() method,
- except that parameters are capitalized. Detailed examples can be found at
- :ref:`S3Transfer's Usage <ref_s3transfer_usage>`.
+ """
+ return self.meta.client.upload_file(
+ Filename=Filename, Bucket=self.bucket_name, Key=self.key,
+ ExtraArgs=ExtraArgs, Callback=Callback, Config=Config)
+
+
+def object_download_file(self, Filename,
+ ExtraArgs=None, Callback=None, Config=None):
+ """Download an S3 object to a file.
+
+ Usage::
+
+ import boto3
+ s3 = boto3.resource('s3')
+ s3.Object('mybucket', 'hello.txt').download_file('/tmp/hello.txt')
+
+ Similar behavior as S3Transfer's download_file() method,
+ except that parameters are capitalized. Detailed examples can be found at
+ :ref:`S3Transfer's Usage <ref_s3transfer_usage>`.
:type Filename: str
:param Filename: The path to the file to download to.
@@ -308,454 +308,454 @@ def object_download_file(self, Filename,
:type Config: boto3.s3.transfer.TransferConfig
:param Config: The transfer configuration to be used when performing the
transfer.
- """
- return self.meta.client.download_file(
- Bucket=self.bucket_name, Key=self.key, Filename=Filename,
- ExtraArgs=ExtraArgs, Callback=Callback, Config=Config)
-
-
-def copy(self, CopySource, Bucket, Key, ExtraArgs=None, Callback=None,
- SourceClient=None, Config=None):
- """Copy an object from one S3 location to another.
-
- This is a managed transfer which will perform a multipart copy in
- multiple threads if necessary.
-
- Usage::
-
- import boto3
- s3 = boto3.resource('s3')
- copy_source = {
- 'Bucket': 'mybucket',
- 'Key': 'mykey'
- }
- s3.meta.client.copy(copy_source, 'otherbucket', 'otherkey')
-
- :type CopySource: dict
- :param CopySource: The name of the source bucket, key name of the
- source object, and optional version ID of the source object. The
- dictionary format is:
- ``{'Bucket': 'bucket', 'Key': 'key', 'VersionId': 'id'}``. Note
- that the ``VersionId`` key is optional and may be omitted.
-
- :type Bucket: str
- :param Bucket: The name of the bucket to copy to
-
- :type Key: str
- :param Key: The name of the key to copy to
-
- :type ExtraArgs: dict
- :param ExtraArgs: Extra arguments that may be passed to the
- client operation
-
+ """
+ return self.meta.client.download_file(
+ Bucket=self.bucket_name, Key=self.key, Filename=Filename,
+ ExtraArgs=ExtraArgs, Callback=Callback, Config=Config)
+
+
+def copy(self, CopySource, Bucket, Key, ExtraArgs=None, Callback=None,
+ SourceClient=None, Config=None):
+ """Copy an object from one S3 location to another.
+
+ This is a managed transfer which will perform a multipart copy in
+ multiple threads if necessary.
+
+ Usage::
+
+ import boto3
+ s3 = boto3.resource('s3')
+ copy_source = {
+ 'Bucket': 'mybucket',
+ 'Key': 'mykey'
+ }
+ s3.meta.client.copy(copy_source, 'otherbucket', 'otherkey')
+
+ :type CopySource: dict
+ :param CopySource: The name of the source bucket, key name of the
+ source object, and optional version ID of the source object. The
+ dictionary format is:
+ ``{'Bucket': 'bucket', 'Key': 'key', 'VersionId': 'id'}``. Note
+ that the ``VersionId`` key is optional and may be omitted.
+
+ :type Bucket: str
+ :param Bucket: The name of the bucket to copy to
+
+ :type Key: str
+ :param Key: The name of the key to copy to
+
+ :type ExtraArgs: dict
+ :param ExtraArgs: Extra arguments that may be passed to the
+ client operation
+
:type Callback: function
- :param Callback: A method which takes a number of bytes transferred to
- be periodically called during the copy.
-
- :type SourceClient: botocore or boto3 Client
- :param SourceClient: The client to be used for operation that
- may happen at the source object. For example, this client is
- used for the head_object that determines the size of the copy.
- If no client is provided, the current client is used as the client
- for the source object.
-
- :type Config: boto3.s3.transfer.TransferConfig
- :param Config: The transfer configuration to be used when performing the
- copy.
- """
- subscribers = None
- if Callback is not None:
- subscribers = [ProgressCallbackInvoker(Callback)]
-
- config = Config
- if config is None:
- config = TransferConfig()
-
- with create_transfer_manager(self, config) as manager:
- future = manager.copy(
- copy_source=CopySource, bucket=Bucket, key=Key,
- extra_args=ExtraArgs, subscribers=subscribers,
- source_client=SourceClient)
- return future.result()
-
-
-def bucket_copy(self, CopySource, Key, ExtraArgs=None, Callback=None,
- SourceClient=None, Config=None):
- """Copy an object from one S3 location to an object in this bucket.
-
- This is a managed transfer which will perform a multipart copy in
- multiple threads if necessary.
-
- Usage::
-
- import boto3
- s3 = boto3.resource('s3')
- copy_source = {
- 'Bucket': 'mybucket',
- 'Key': 'mykey'
- }
- bucket = s3.Bucket('otherbucket')
- bucket.copy(copy_source, 'otherkey')
-
- :type CopySource: dict
- :param CopySource: The name of the source bucket, key name of the
- source object, and optional version ID of the source object. The
- dictionary format is:
- ``{'Bucket': 'bucket', 'Key': 'key', 'VersionId': 'id'}``. Note
- that the ``VersionId`` key is optional and may be omitted.
-
- :type Key: str
- :param Key: The name of the key to copy to
-
- :type ExtraArgs: dict
- :param ExtraArgs: Extra arguments that may be passed to the
- client operation
-
+ :param Callback: A method which takes a number of bytes transferred to
+ be periodically called during the copy.
+
+ :type SourceClient: botocore or boto3 Client
+ :param SourceClient: The client to be used for operation that
+ may happen at the source object. For example, this client is
+ used for the head_object that determines the size of the copy.
+ If no client is provided, the current client is used as the client
+ for the source object.
+
+ :type Config: boto3.s3.transfer.TransferConfig
+ :param Config: The transfer configuration to be used when performing the
+ copy.
+ """
+ subscribers = None
+ if Callback is not None:
+ subscribers = [ProgressCallbackInvoker(Callback)]
+
+ config = Config
+ if config is None:
+ config = TransferConfig()
+
+ with create_transfer_manager(self, config) as manager:
+ future = manager.copy(
+ copy_source=CopySource, bucket=Bucket, key=Key,
+ extra_args=ExtraArgs, subscribers=subscribers,
+ source_client=SourceClient)
+ return future.result()
+
+
+def bucket_copy(self, CopySource, Key, ExtraArgs=None, Callback=None,
+ SourceClient=None, Config=None):
+ """Copy an object from one S3 location to an object in this bucket.
+
+ This is a managed transfer which will perform a multipart copy in
+ multiple threads if necessary.
+
+ Usage::
+
+ import boto3
+ s3 = boto3.resource('s3')
+ copy_source = {
+ 'Bucket': 'mybucket',
+ 'Key': 'mykey'
+ }
+ bucket = s3.Bucket('otherbucket')
+ bucket.copy(copy_source, 'otherkey')
+
+ :type CopySource: dict
+ :param CopySource: The name of the source bucket, key name of the
+ source object, and optional version ID of the source object. The
+ dictionary format is:
+ ``{'Bucket': 'bucket', 'Key': 'key', 'VersionId': 'id'}``. Note
+ that the ``VersionId`` key is optional and may be omitted.
+
+ :type Key: str
+ :param Key: The name of the key to copy to
+
+ :type ExtraArgs: dict
+ :param ExtraArgs: Extra arguments that may be passed to the
+ client operation
+
:type Callback: function
- :param Callback: A method which takes a number of bytes transferred to
- be periodically called during the copy.
-
- :type SourceClient: botocore or boto3 Client
- :param SourceClient: The client to be used for operation that
- may happen at the source object. For example, this client is
- used for the head_object that determines the size of the copy.
- If no client is provided, the current client is used as the client
- for the source object.
-
- :type Config: boto3.s3.transfer.TransferConfig
- :param Config: The transfer configuration to be used when performing the
- copy.
- """
- return self.meta.client.copy(
- CopySource=CopySource, Bucket=self.name, Key=Key, ExtraArgs=ExtraArgs,
- Callback=Callback, SourceClient=SourceClient, Config=Config)
-
-
-def object_copy(self, CopySource, ExtraArgs=None, Callback=None,
- SourceClient=None, Config=None):
- """Copy an object from one S3 location to this object.
-
- This is a managed transfer which will perform a multipart copy in
- multiple threads if necessary.
-
- Usage::
-
- import boto3
- s3 = boto3.resource('s3')
- copy_source = {
- 'Bucket': 'mybucket',
- 'Key': 'mykey'
- }
- bucket = s3.Bucket('otherbucket')
- obj = bucket.Object('otherkey')
- obj.copy(copy_source)
-
- :type CopySource: dict
- :param CopySource: The name of the source bucket, key name of the
- source object, and optional version ID of the source object. The
- dictionary format is:
- ``{'Bucket': 'bucket', 'Key': 'key', 'VersionId': 'id'}``. Note
- that the ``VersionId`` key is optional and may be omitted.
-
- :type ExtraArgs: dict
- :param ExtraArgs: Extra arguments that may be passed to the
- client operation
-
+ :param Callback: A method which takes a number of bytes transferred to
+ be periodically called during the copy.
+
+ :type SourceClient: botocore or boto3 Client
+ :param SourceClient: The client to be used for operation that
+ may happen at the source object. For example, this client is
+ used for the head_object that determines the size of the copy.
+ If no client is provided, the current client is used as the client
+ for the source object.
+
+ :type Config: boto3.s3.transfer.TransferConfig
+ :param Config: The transfer configuration to be used when performing the
+ copy.
+ """
+ return self.meta.client.copy(
+ CopySource=CopySource, Bucket=self.name, Key=Key, ExtraArgs=ExtraArgs,
+ Callback=Callback, SourceClient=SourceClient, Config=Config)
+
+
+def object_copy(self, CopySource, ExtraArgs=None, Callback=None,
+ SourceClient=None, Config=None):
+ """Copy an object from one S3 location to this object.
+
+ This is a managed transfer which will perform a multipart copy in
+ multiple threads if necessary.
+
+ Usage::
+
+ import boto3
+ s3 = boto3.resource('s3')
+ copy_source = {
+ 'Bucket': 'mybucket',
+ 'Key': 'mykey'
+ }
+ bucket = s3.Bucket('otherbucket')
+ obj = bucket.Object('otherkey')
+ obj.copy(copy_source)
+
+ :type CopySource: dict
+ :param CopySource: The name of the source bucket, key name of the
+ source object, and optional version ID of the source object. The
+ dictionary format is:
+ ``{'Bucket': 'bucket', 'Key': 'key', 'VersionId': 'id'}``. Note
+ that the ``VersionId`` key is optional and may be omitted.
+
+ :type ExtraArgs: dict
+ :param ExtraArgs: Extra arguments that may be passed to the
+ client operation
+
:type Callback: function
- :param Callback: A method which takes a number of bytes transferred to
- be periodically called during the copy.
-
- :type SourceClient: botocore or boto3 Client
- :param SourceClient: The client to be used for operation that
- may happen at the source object. For example, this client is
- used for the head_object that determines the size of the copy.
- If no client is provided, the current client is used as the client
- for the source object.
-
- :type Config: boto3.s3.transfer.TransferConfig
- :param Config: The transfer configuration to be used when performing the
- copy.
- """
- return self.meta.client.copy(
- CopySource=CopySource, Bucket=self.bucket_name, Key=self.key,
- ExtraArgs=ExtraArgs, Callback=Callback, SourceClient=SourceClient,
- Config=Config)
-
-
-def upload_fileobj(self, Fileobj, Bucket, Key, ExtraArgs=None,
- Callback=None, Config=None):
- """Upload a file-like object to S3.
-
- The file-like object must be in binary mode.
-
- This is a managed transfer which will perform a multipart upload in
- multiple threads if necessary.
-
- Usage::
-
- import boto3
- s3 = boto3.client('s3')
-
- with open('filename', 'rb') as data:
- s3.upload_fileobj(data, 'mybucket', 'mykey')
-
- :type Fileobj: a file-like object
- :param Fileobj: A file-like object to upload. At a minimum, it must
- implement the `read` method, and must return bytes.
-
- :type Bucket: str
- :param Bucket: The name of the bucket to upload to.
-
- :type Key: str
- :param Key: The name of the key to upload to.
-
- :type ExtraArgs: dict
- :param ExtraArgs: Extra arguments that may be passed to the
- client operation.
-
+ :param Callback: A method which takes a number of bytes transferred to
+ be periodically called during the copy.
+
+ :type SourceClient: botocore or boto3 Client
+ :param SourceClient: The client to be used for operation that
+ may happen at the source object. For example, this client is
+ used for the head_object that determines the size of the copy.
+ If no client is provided, the current client is used as the client
+ for the source object.
+
+ :type Config: boto3.s3.transfer.TransferConfig
+ :param Config: The transfer configuration to be used when performing the
+ copy.
+ """
+ return self.meta.client.copy(
+ CopySource=CopySource, Bucket=self.bucket_name, Key=self.key,
+ ExtraArgs=ExtraArgs, Callback=Callback, SourceClient=SourceClient,
+ Config=Config)
+
+
+def upload_fileobj(self, Fileobj, Bucket, Key, ExtraArgs=None,
+ Callback=None, Config=None):
+ """Upload a file-like object to S3.
+
+ The file-like object must be in binary mode.
+
+ This is a managed transfer which will perform a multipart upload in
+ multiple threads if necessary.
+
+ Usage::
+
+ import boto3
+ s3 = boto3.client('s3')
+
+ with open('filename', 'rb') as data:
+ s3.upload_fileobj(data, 'mybucket', 'mykey')
+
+ :type Fileobj: a file-like object
+ :param Fileobj: A file-like object to upload. At a minimum, it must
+ implement the `read` method, and must return bytes.
+
+ :type Bucket: str
+ :param Bucket: The name of the bucket to upload to.
+
+ :type Key: str
+ :param Key: The name of the key to upload to.
+
+ :type ExtraArgs: dict
+ :param ExtraArgs: Extra arguments that may be passed to the
+ client operation.
+
:type Callback: function
- :param Callback: A method which takes a number of bytes transferred to
- be periodically called during the upload.
-
- :type Config: boto3.s3.transfer.TransferConfig
- :param Config: The transfer configuration to be used when performing the
- upload.
- """
- if not hasattr(Fileobj, 'read'):
- raise ValueError('Fileobj must implement read')
-
- subscribers = None
- if Callback is not None:
- subscribers = [ProgressCallbackInvoker(Callback)]
-
- config = Config
- if config is None:
- config = TransferConfig()
-
- with create_transfer_manager(self, config) as manager:
- future = manager.upload(
- fileobj=Fileobj, bucket=Bucket, key=Key,
- extra_args=ExtraArgs, subscribers=subscribers)
- return future.result()
-
-
-def bucket_upload_fileobj(self, Fileobj, Key, ExtraArgs=None,
- Callback=None, Config=None):
- """Upload a file-like object to this bucket.
-
- The file-like object must be in binary mode.
-
- This is a managed transfer which will perform a multipart upload in
- multiple threads if necessary.
-
- Usage::
-
- import boto3
- s3 = boto3.resource('s3')
- bucket = s3.Bucket('mybucket')
-
- with open('filename', 'rb') as data:
- bucket.upload_fileobj(data, 'mykey')
-
- :type Fileobj: a file-like object
- :param Fileobj: A file-like object to upload. At a minimum, it must
- implement the `read` method, and must return bytes.
-
- :type Key: str
- :param Key: The name of the key to upload to.
-
- :type ExtraArgs: dict
- :param ExtraArgs: Extra arguments that may be passed to the
- client operation.
-
+ :param Callback: A method which takes a number of bytes transferred to
+ be periodically called during the upload.
+
+ :type Config: boto3.s3.transfer.TransferConfig
+ :param Config: The transfer configuration to be used when performing the
+ upload.
+ """
+ if not hasattr(Fileobj, 'read'):
+ raise ValueError('Fileobj must implement read')
+
+ subscribers = None
+ if Callback is not None:
+ subscribers = [ProgressCallbackInvoker(Callback)]
+
+ config = Config
+ if config is None:
+ config = TransferConfig()
+
+ with create_transfer_manager(self, config) as manager:
+ future = manager.upload(
+ fileobj=Fileobj, bucket=Bucket, key=Key,
+ extra_args=ExtraArgs, subscribers=subscribers)
+ return future.result()
+
+
+def bucket_upload_fileobj(self, Fileobj, Key, ExtraArgs=None,
+ Callback=None, Config=None):
+ """Upload a file-like object to this bucket.
+
+ The file-like object must be in binary mode.
+
+ This is a managed transfer which will perform a multipart upload in
+ multiple threads if necessary.
+
+ Usage::
+
+ import boto3
+ s3 = boto3.resource('s3')
+ bucket = s3.Bucket('mybucket')
+
+ with open('filename', 'rb') as data:
+ bucket.upload_fileobj(data, 'mykey')
+
+ :type Fileobj: a file-like object
+ :param Fileobj: A file-like object to upload. At a minimum, it must
+ implement the `read` method, and must return bytes.
+
+ :type Key: str
+ :param Key: The name of the key to upload to.
+
+ :type ExtraArgs: dict
+ :param ExtraArgs: Extra arguments that may be passed to the
+ client operation.
+
:type Callback: function
- :param Callback: A method which takes a number of bytes transferred to
- be periodically called during the upload.
-
- :type Config: boto3.s3.transfer.TransferConfig
- :param Config: The transfer configuration to be used when performing the
- upload.
- """
- return self.meta.client.upload_fileobj(
- Fileobj=Fileobj, Bucket=self.name, Key=Key, ExtraArgs=ExtraArgs,
- Callback=Callback, Config=Config)
-
-
-def object_upload_fileobj(self, Fileobj, ExtraArgs=None, Callback=None,
- Config=None):
- """Upload a file-like object to this object.
-
- The file-like object must be in binary mode.
-
- This is a managed transfer which will perform a multipart upload in
- multiple threads if necessary.
-
- Usage::
-
- import boto3
- s3 = boto3.resource('s3')
- bucket = s3.Bucket('mybucket')
- obj = bucket.Object('mykey')
-
- with open('filename', 'rb') as data:
- obj.upload_fileobj(data)
-
- :type Fileobj: a file-like object
- :param Fileobj: A file-like object to upload. At a minimum, it must
- implement the `read` method, and must return bytes.
-
- :type ExtraArgs: dict
- :param ExtraArgs: Extra arguments that may be passed to the
- client operation.
-
+ :param Callback: A method which takes a number of bytes transferred to
+ be periodically called during the upload.
+
+ :type Config: boto3.s3.transfer.TransferConfig
+ :param Config: The transfer configuration to be used when performing the
+ upload.
+ """
+ return self.meta.client.upload_fileobj(
+ Fileobj=Fileobj, Bucket=self.name, Key=Key, ExtraArgs=ExtraArgs,
+ Callback=Callback, Config=Config)
+
+
+def object_upload_fileobj(self, Fileobj, ExtraArgs=None, Callback=None,
+ Config=None):
+ """Upload a file-like object to this object.
+
+ The file-like object must be in binary mode.
+
+ This is a managed transfer which will perform a multipart upload in
+ multiple threads if necessary.
+
+ Usage::
+
+ import boto3
+ s3 = boto3.resource('s3')
+ bucket = s3.Bucket('mybucket')
+ obj = bucket.Object('mykey')
+
+ with open('filename', 'rb') as data:
+ obj.upload_fileobj(data)
+
+ :type Fileobj: a file-like object
+ :param Fileobj: A file-like object to upload. At a minimum, it must
+ implement the `read` method, and must return bytes.
+
+ :type ExtraArgs: dict
+ :param ExtraArgs: Extra arguments that may be passed to the
+ client operation.
+
:type Callback: function
- :param Callback: A method which takes a number of bytes transferred to
- be periodically called during the upload.
-
- :type Config: boto3.s3.transfer.TransferConfig
- :param Config: The transfer configuration to be used when performing the
- upload.
- """
- return self.meta.client.upload_fileobj(
- Fileobj=Fileobj, Bucket=self.bucket_name, Key=self.key,
- ExtraArgs=ExtraArgs, Callback=Callback, Config=Config)
-
-
-def download_fileobj(self, Bucket, Key, Fileobj, ExtraArgs=None,
- Callback=None, Config=None):
- """Download an object from S3 to a file-like object.
-
- The file-like object must be in binary mode.
-
- This is a managed transfer which will perform a multipart download in
- multiple threads if necessary.
-
- Usage::
-
- import boto3
- s3 = boto3.client('s3')
-
- with open('filename', 'wb') as data:
- s3.download_fileobj('mybucket', 'mykey', data)
-
- :type Bucket: str
- :param Bucket: The name of the bucket to download from.
-
- :type Key: str
- :param Key: The name of the key to download from.
-
+ :param Callback: A method which takes a number of bytes transferred to
+ be periodically called during the upload.
+
+ :type Config: boto3.s3.transfer.TransferConfig
+ :param Config: The transfer configuration to be used when performing the
+ upload.
+ """
+ return self.meta.client.upload_fileobj(
+ Fileobj=Fileobj, Bucket=self.bucket_name, Key=self.key,
+ ExtraArgs=ExtraArgs, Callback=Callback, Config=Config)
+
+
+def download_fileobj(self, Bucket, Key, Fileobj, ExtraArgs=None,
+ Callback=None, Config=None):
+ """Download an object from S3 to a file-like object.
+
+ The file-like object must be in binary mode.
+
+ This is a managed transfer which will perform a multipart download in
+ multiple threads if necessary.
+
+ Usage::
+
+ import boto3
+ s3 = boto3.client('s3')
+
+ with open('filename', 'wb') as data:
+ s3.download_fileobj('mybucket', 'mykey', data)
+
+ :type Bucket: str
+ :param Bucket: The name of the bucket to download from.
+
+ :type Key: str
+ :param Key: The name of the key to download from.
+
:type Fileobj: a file-like object
:param Fileobj: A file-like object to download into. At a minimum, it must
implement the `write` method and must accept bytes.
- :type ExtraArgs: dict
- :param ExtraArgs: Extra arguments that may be passed to the
- client operation.
-
+ :type ExtraArgs: dict
+ :param ExtraArgs: Extra arguments that may be passed to the
+ client operation.
+
:type Callback: function
- :param Callback: A method which takes a number of bytes transferred to
- be periodically called during the download.
-
- :type Config: boto3.s3.transfer.TransferConfig
- :param Config: The transfer configuration to be used when performing the
- download.
- """
- if not hasattr(Fileobj, 'write'):
- raise ValueError('Fileobj must implement write')
-
- subscribers = None
- if Callback is not None:
- subscribers = [ProgressCallbackInvoker(Callback)]
-
- config = Config
- if config is None:
- config = TransferConfig()
-
- with create_transfer_manager(self, config) as manager:
- future = manager.download(
- bucket=Bucket, key=Key, fileobj=Fileobj,
- extra_args=ExtraArgs, subscribers=subscribers)
- return future.result()
-
-
-def bucket_download_fileobj(self, Key, Fileobj, ExtraArgs=None,
- Callback=None, Config=None):
- """Download an object from this bucket to a file-like-object.
-
- The file-like object must be in binary mode.
-
- This is a managed transfer which will perform a multipart download in
- multiple threads if necessary.
-
- Usage::
-
- import boto3
- s3 = boto3.resource('s3')
- bucket = s3.Bucket('mybucket')
-
- with open('filename', 'wb') as data:
- bucket.download_fileobj('mykey', data)
-
- :type Fileobj: a file-like object
- :param Fileobj: A file-like object to download into. At a minimum, it must
- implement the `write` method and must accept bytes.
-
- :type Key: str
- :param Key: The name of the key to download from.
-
- :type ExtraArgs: dict
- :param ExtraArgs: Extra arguments that may be passed to the
- client operation.
-
+ :param Callback: A method which takes a number of bytes transferred to
+ be periodically called during the download.
+
+ :type Config: boto3.s3.transfer.TransferConfig
+ :param Config: The transfer configuration to be used when performing the
+ download.
+ """
+ if not hasattr(Fileobj, 'write'):
+ raise ValueError('Fileobj must implement write')
+
+ subscribers = None
+ if Callback is not None:
+ subscribers = [ProgressCallbackInvoker(Callback)]
+
+ config = Config
+ if config is None:
+ config = TransferConfig()
+
+ with create_transfer_manager(self, config) as manager:
+ future = manager.download(
+ bucket=Bucket, key=Key, fileobj=Fileobj,
+ extra_args=ExtraArgs, subscribers=subscribers)
+ return future.result()
+
+
+def bucket_download_fileobj(self, Key, Fileobj, ExtraArgs=None,
+ Callback=None, Config=None):
+ """Download an object from this bucket to a file-like-object.
+
+ The file-like object must be in binary mode.
+
+ This is a managed transfer which will perform a multipart download in
+ multiple threads if necessary.
+
+ Usage::
+
+ import boto3
+ s3 = boto3.resource('s3')
+ bucket = s3.Bucket('mybucket')
+
+ with open('filename', 'wb') as data:
+ bucket.download_fileobj('mykey', data)
+
+ :type Fileobj: a file-like object
+ :param Fileobj: A file-like object to download into. At a minimum, it must
+ implement the `write` method and must accept bytes.
+
+ :type Key: str
+ :param Key: The name of the key to download from.
+
+ :type ExtraArgs: dict
+ :param ExtraArgs: Extra arguments that may be passed to the
+ client operation.
+
:type Callback: function
- :param Callback: A method which takes a number of bytes transferred to
- be periodically called during the download.
-
- :type Config: boto3.s3.transfer.TransferConfig
- :param Config: The transfer configuration to be used when performing the
- download.
- """
- return self.meta.client.download_fileobj(
- Bucket=self.name, Key=Key, Fileobj=Fileobj, ExtraArgs=ExtraArgs,
- Callback=Callback, Config=Config)
-
-
-def object_download_fileobj(self, Fileobj, ExtraArgs=None, Callback=None,
- Config=None):
- """Download this object from S3 to a file-like object.
-
- The file-like object must be in binary mode.
-
- This is a managed transfer which will perform a multipart download in
- multiple threads if necessary.
-
- Usage::
-
- import boto3
- s3 = boto3.resource('s3')
- bucket = s3.Bucket('mybucket')
- obj = bucket.Object('mykey')
-
- with open('filename', 'wb') as data:
- obj.download_fileobj(data)
-
- :type Fileobj: a file-like object
- :param Fileobj: A file-like object to download into. At a minimum, it must
- implement the `write` method and must accept bytes.
-
- :type ExtraArgs: dict
- :param ExtraArgs: Extra arguments that may be passed to the
- client operation.
-
+ :param Callback: A method which takes a number of bytes transferred to
+ be periodically called during the download.
+
+ :type Config: boto3.s3.transfer.TransferConfig
+ :param Config: The transfer configuration to be used when performing the
+ download.
+ """
+ return self.meta.client.download_fileobj(
+ Bucket=self.name, Key=Key, Fileobj=Fileobj, ExtraArgs=ExtraArgs,
+ Callback=Callback, Config=Config)
+
+
+def object_download_fileobj(self, Fileobj, ExtraArgs=None, Callback=None,
+ Config=None):
+ """Download this object from S3 to a file-like object.
+
+ The file-like object must be in binary mode.
+
+ This is a managed transfer which will perform a multipart download in
+ multiple threads if necessary.
+
+ Usage::
+
+ import boto3
+ s3 = boto3.resource('s3')
+ bucket = s3.Bucket('mybucket')
+ obj = bucket.Object('mykey')
+
+ with open('filename', 'wb') as data:
+ obj.download_fileobj(data)
+
+ :type Fileobj: a file-like object
+ :param Fileobj: A file-like object to download into. At a minimum, it must
+ implement the `write` method and must accept bytes.
+
+ :type ExtraArgs: dict
+ :param ExtraArgs: Extra arguments that may be passed to the
+ client operation.
+
:type Callback: function
- :param Callback: A method which takes a number of bytes transferred to
- be periodically called during the download.
-
- :type Config: boto3.s3.transfer.TransferConfig
- :param Config: The transfer configuration to be used when performing the
- download.
- """
- return self.meta.client.download_fileobj(
- Bucket=self.bucket_name, Key=self.key, Fileobj=Fileobj,
- ExtraArgs=ExtraArgs, Callback=Callback, Config=Config)
-
+ :param Callback: A method which takes a number of bytes transferred to
+ be periodically called during the download.
+
+ :type Config: boto3.s3.transfer.TransferConfig
+ :param Config: The transfer configuration to be used when performing the
+ download.
+ """
+ return self.meta.client.download_fileobj(
+ Bucket=self.bucket_name, Key=self.key, Fileobj=Fileobj,
+ ExtraArgs=ExtraArgs, Callback=Callback, Config=Config)
+
diff --git a/contrib/python/boto3/boto3/s3/transfer.py b/contrib/python/boto3/boto3/s3/transfer.py
index 9d11801915..0d2d2fece4 100644
--- a/contrib/python/boto3/boto3/s3/transfer.py
+++ b/contrib/python/boto3/boto3/s3/transfer.py
@@ -1,338 +1,338 @@
-# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-"""Abstractions over S3's upload/download operations.
-
-This module provides high level abstractions for efficient
-uploads/downloads. It handles several things for the user:
-
-* Automatically switching to multipart transfers when
- a file is over a specific size threshold
-* Uploading/downloading a file in parallel
-* Progress callbacks to monitor transfers
-* Retries. While botocore handles retries for streaming uploads,
- it is not possible for it to handle retries for streaming
- downloads. This module handles retries for both cases so
- you don't need to implement any retry logic yourself.
-
-This module has a reasonable set of defaults. It also allows you
-to configure many aspects of the transfer process including:
-
-* Multipart threshold size
-* Max parallel downloads
-* Socket timeouts
-* Retry amounts
-
-There is no support for s3->s3 multipart copies at this
-time.
-
-
-.. _ref_s3transfer_usage:
-
-Usage
-=====
-
-The simplest way to use this module is:
-
-.. code-block:: python
-
- client = boto3.client('s3', 'us-west-2')
- transfer = S3Transfer(client)
- # Upload /tmp/myfile to s3://bucket/key
- transfer.upload_file('/tmp/myfile', 'bucket', 'key')
-
- # Download s3://bucket/key to /tmp/myfile
- transfer.download_file('bucket', 'key', '/tmp/myfile')
-
-The ``upload_file`` and ``download_file`` methods also accept
-``**kwargs``, which will be forwarded through to the corresponding
-client operation. Here are a few examples using ``upload_file``::
-
- # Making the object public
- transfer.upload_file('/tmp/myfile', 'bucket', 'key',
- extra_args={'ACL': 'public-read'})
-
- # Setting metadata
- transfer.upload_file('/tmp/myfile', 'bucket', 'key',
- extra_args={'Metadata': {'a': 'b', 'c': 'd'}})
-
- # Setting content type
- transfer.upload_file('/tmp/myfile.json', 'bucket', 'key',
- extra_args={'ContentType': "application/json"})
-
-
-The ``S3Transfer`` class also supports progress callbacks so you can
-provide transfer progress to users. Both the ``upload_file`` and
-``download_file`` methods take an optional ``callback`` parameter.
-Here's an example of how to print a simple progress percentage
-to the user:
-
-.. code-block:: python
-
- class ProgressPercentage(object):
- def __init__(self, filename):
- self._filename = filename
- self._size = float(os.path.getsize(filename))
- self._seen_so_far = 0
- self._lock = threading.Lock()
-
- def __call__(self, bytes_amount):
- # To simplify we'll assume this is hooked up
- # to a single filename.
- with self._lock:
- self._seen_so_far += bytes_amount
- percentage = (self._seen_so_far / self._size) * 100
- sys.stdout.write(
- "\r%s %s / %s (%.2f%%)" % (
- self._filename, self._seen_so_far, self._size,
- percentage))
- sys.stdout.flush()
-
-
- transfer = S3Transfer(boto3.client('s3', 'us-west-2'))
- # Upload /tmp/myfile to s3://bucket/key and print upload progress.
- transfer.upload_file('/tmp/myfile', 'bucket', 'key',
- callback=ProgressPercentage('/tmp/myfile'))
-
-
-
-You can also provide a TransferConfig object to the S3Transfer
-object that gives you more fine grained control over the
-transfer. For example:
-
-.. code-block:: python
-
- client = boto3.client('s3', 'us-west-2')
- config = TransferConfig(
- multipart_threshold=8 * 1024 * 1024,
- max_concurrency=10,
- num_download_attempts=10,
- )
- transfer = S3Transfer(client, config)
- transfer.upload_file('/tmp/foo', 'bucket', 'key')
-
-
-"""
-from botocore.exceptions import ClientError
-from botocore.compat import six
-from s3transfer.exceptions import RetriesExceededError as \
- S3TransferRetriesExceededError
-from s3transfer.manager import TransferConfig as S3TransferConfig
-from s3transfer.manager import TransferManager
-from s3transfer.futures import NonThreadedExecutor
-from s3transfer.subscribers import BaseSubscriber
-from s3transfer.utils import OSUtils
-
-from boto3.exceptions import RetriesExceededError, S3UploadFailedError
-
-
-KB = 1024
-MB = KB * KB
-
-
-def create_transfer_manager(client, config, osutil=None):
- """Creates a transfer manager based on configuration
-
- :type client: boto3.client
- :param client: The S3 client to use
-
- :type config: boto3.s3.transfer.TransferConfig
- :param config: The transfer config to use
-
- :type osutil: s3transfer.utils.OSUtils
- :param osutil: The os utility to use
-
- :rtype: s3transfer.manager.TransferManager
- :returns: A transfer manager based on parameters provided
- """
- executor_cls = None
- if not config.use_threads:
- executor_cls = NonThreadedExecutor
- return TransferManager(client, config, osutil, executor_cls)
-
-
-class TransferConfig(S3TransferConfig):
- ALIAS = {
- 'max_concurrency': 'max_request_concurrency',
- 'max_io_queue': 'max_io_queue_size'
- }
-
- def __init__(self,
- multipart_threshold=8 * MB,
- max_concurrency=10,
- multipart_chunksize=8 * MB,
- num_download_attempts=5,
- max_io_queue=100,
- io_chunksize=256 * KB,
- use_threads=True):
- """Configuration object for managed S3 transfers
-
- :param multipart_threshold: The transfer size threshold for which
- multipart uploads, downloads, and copies will automatically be
- triggered.
-
- :param max_concurrency: The maximum number of threads that will be
- making requests to perform a transfer. If ``use_threads`` is
- set to ``False``, the value provided is ignored as the transfer
- will only ever use the main thread.
-
- :param multipart_chunksize: The partition size of each part for a
- multipart transfer.
-
- :param num_download_attempts: The number of download attempts that
- will be retried upon errors with downloading an object in S3.
- Note that these retries account for errors that occur when
- streaming down the data from s3 (i.e. socket errors and read
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+"""Abstractions over S3's upload/download operations.
+
+This module provides high level abstractions for efficient
+uploads/downloads. It handles several things for the user:
+
+* Automatically switching to multipart transfers when
+ a file is over a specific size threshold
+* Uploading/downloading a file in parallel
+* Progress callbacks to monitor transfers
+* Retries. While botocore handles retries for streaming uploads,
+ it is not possible for it to handle retries for streaming
+ downloads. This module handles retries for both cases so
+ you don't need to implement any retry logic yourself.
+
+This module has a reasonable set of defaults. It also allows you
+to configure many aspects of the transfer process including:
+
+* Multipart threshold size
+* Max parallel downloads
+* Socket timeouts
+* Retry amounts
+
+There is no support for s3->s3 multipart copies at this
+time.
+
+
+.. _ref_s3transfer_usage:
+
+Usage
+=====
+
+The simplest way to use this module is:
+
+.. code-block:: python
+
+ client = boto3.client('s3', 'us-west-2')
+ transfer = S3Transfer(client)
+ # Upload /tmp/myfile to s3://bucket/key
+ transfer.upload_file('/tmp/myfile', 'bucket', 'key')
+
+ # Download s3://bucket/key to /tmp/myfile
+ transfer.download_file('bucket', 'key', '/tmp/myfile')
+
+The ``upload_file`` and ``download_file`` methods also accept
+``**kwargs``, which will be forwarded through to the corresponding
+client operation. Here are a few examples using ``upload_file``::
+
+ # Making the object public
+ transfer.upload_file('/tmp/myfile', 'bucket', 'key',
+ extra_args={'ACL': 'public-read'})
+
+ # Setting metadata
+ transfer.upload_file('/tmp/myfile', 'bucket', 'key',
+ extra_args={'Metadata': {'a': 'b', 'c': 'd'}})
+
+ # Setting content type
+ transfer.upload_file('/tmp/myfile.json', 'bucket', 'key',
+ extra_args={'ContentType': "application/json"})
+
+
+The ``S3Transfer`` class also supports progress callbacks so you can
+provide transfer progress to users. Both the ``upload_file`` and
+``download_file`` methods take an optional ``callback`` parameter.
+Here's an example of how to print a simple progress percentage
+to the user:
+
+.. code-block:: python
+
+ class ProgressPercentage(object):
+ def __init__(self, filename):
+ self._filename = filename
+ self._size = float(os.path.getsize(filename))
+ self._seen_so_far = 0
+ self._lock = threading.Lock()
+
+ def __call__(self, bytes_amount):
+ # To simplify we'll assume this is hooked up
+ # to a single filename.
+ with self._lock:
+ self._seen_so_far += bytes_amount
+ percentage = (self._seen_so_far / self._size) * 100
+ sys.stdout.write(
+ "\r%s %s / %s (%.2f%%)" % (
+ self._filename, self._seen_so_far, self._size,
+ percentage))
+ sys.stdout.flush()
+
+
+ transfer = S3Transfer(boto3.client('s3', 'us-west-2'))
+ # Upload /tmp/myfile to s3://bucket/key and print upload progress.
+ transfer.upload_file('/tmp/myfile', 'bucket', 'key',
+ callback=ProgressPercentage('/tmp/myfile'))
+
+
+
+You can also provide a TransferConfig object to the S3Transfer
+object that gives you more fine grained control over the
+transfer. For example:
+
+.. code-block:: python
+
+ client = boto3.client('s3', 'us-west-2')
+ config = TransferConfig(
+ multipart_threshold=8 * 1024 * 1024,
+ max_concurrency=10,
+ num_download_attempts=10,
+ )
+ transfer = S3Transfer(client, config)
+ transfer.upload_file('/tmp/foo', 'bucket', 'key')
+
+
+"""
+from botocore.exceptions import ClientError
+from botocore.compat import six
+from s3transfer.exceptions import RetriesExceededError as \
+ S3TransferRetriesExceededError
+from s3transfer.manager import TransferConfig as S3TransferConfig
+from s3transfer.manager import TransferManager
+from s3transfer.futures import NonThreadedExecutor
+from s3transfer.subscribers import BaseSubscriber
+from s3transfer.utils import OSUtils
+
+from boto3.exceptions import RetriesExceededError, S3UploadFailedError
+
+
+KB = 1024
+MB = KB * KB
+
+
+def create_transfer_manager(client, config, osutil=None):
+ """Creates a transfer manager based on configuration
+
+ :type client: boto3.client
+ :param client: The S3 client to use
+
+ :type config: boto3.s3.transfer.TransferConfig
+ :param config: The transfer config to use
+
+ :type osutil: s3transfer.utils.OSUtils
+ :param osutil: The os utility to use
+
+ :rtype: s3transfer.manager.TransferManager
+ :returns: A transfer manager based on parameters provided
+ """
+ executor_cls = None
+ if not config.use_threads:
+ executor_cls = NonThreadedExecutor
+ return TransferManager(client, config, osutil, executor_cls)
+
+
+class TransferConfig(S3TransferConfig):
+ ALIAS = {
+ 'max_concurrency': 'max_request_concurrency',
+ 'max_io_queue': 'max_io_queue_size'
+ }
+
+ def __init__(self,
+ multipart_threshold=8 * MB,
+ max_concurrency=10,
+ multipart_chunksize=8 * MB,
+ num_download_attempts=5,
+ max_io_queue=100,
+ io_chunksize=256 * KB,
+ use_threads=True):
+ """Configuration object for managed S3 transfers
+
+ :param multipart_threshold: The transfer size threshold for which
+ multipart uploads, downloads, and copies will automatically be
+ triggered.
+
+ :param max_concurrency: The maximum number of threads that will be
+ making requests to perform a transfer. If ``use_threads`` is
+ set to ``False``, the value provided is ignored as the transfer
+ will only ever use the main thread.
+
+ :param multipart_chunksize: The partition size of each part for a
+ multipart transfer.
+
+ :param num_download_attempts: The number of download attempts that
+ will be retried upon errors with downloading an object in S3.
+ Note that these retries account for errors that occur when
+ streaming down the data from s3 (i.e. socket errors and read
timeouts that occur after receiving an OK response from s3).
- Other retryable exceptions such as throttling errors and 5xx
- errors are already retried by botocore (this default is 5). This
- does not take into account the number of exceptions retried by
- botocore.
-
- :param max_io_queue: The maximum amount of read parts that can be
- queued in memory to be written for a download. The size of each
- of these read parts is at most the size of ``io_chunksize``.
-
- :param io_chunksize: The max size of each chunk in the io queue.
- Currently, this is size used when ``read`` is called on the
- downloaded stream as well.
-
- :param use_threads: If True, threads will be used when performing
- S3 transfers. If False, no threads will be used in
- performing transfers: all logic will be ran in the main thread.
- """
- super(TransferConfig, self).__init__(
- multipart_threshold=multipart_threshold,
- max_request_concurrency=max_concurrency,
- multipart_chunksize=multipart_chunksize,
- num_download_attempts=num_download_attempts,
- max_io_queue_size=max_io_queue,
- io_chunksize=io_chunksize,
- )
- # Some of the argument names are not the same as the inherited
- # S3TransferConfig so we add aliases so you can still access the
- # old version of the names.
- for alias in self.ALIAS:
- setattr(self, alias, getattr(self, self.ALIAS[alias]))
- self.use_threads = use_threads
-
- def __setattr__(self, name, value):
- # If the alias name is used, make sure we set the name that it points
- # to as that is what actually is used in governing the TransferManager.
- if name in self.ALIAS:
- super(TransferConfig, self).__setattr__(self.ALIAS[name], value)
- # Always set the value of the actual name provided.
- super(TransferConfig, self).__setattr__(name, value)
-
-
-class S3Transfer(object):
- ALLOWED_DOWNLOAD_ARGS = TransferManager.ALLOWED_DOWNLOAD_ARGS
- ALLOWED_UPLOAD_ARGS = TransferManager.ALLOWED_UPLOAD_ARGS
-
- def __init__(self, client=None, config=None, osutil=None, manager=None):
- if not client and not manager:
- raise ValueError(
- 'Either a boto3.Client or s3transfer.manager.TransferManager '
- 'must be provided'
- )
- if manager and any([client, config, osutil]):
- raise ValueError(
- 'Manager cannot be provided with client, config, '
- 'nor osutil. These parameters are mutually exclusive.'
- )
- if config is None:
- config = TransferConfig()
- if osutil is None:
- osutil = OSUtils()
- if manager:
- self._manager = manager
- else:
- self._manager = create_transfer_manager(client, config, osutil)
-
- def upload_file(self, filename, bucket, key,
- callback=None, extra_args=None):
- """Upload a file to an S3 object.
-
- Variants have also been injected into S3 client, Bucket and Object.
- You don't have to use S3Transfer.upload_file() directly.
+ Other retryable exceptions such as throttling errors and 5xx
+ errors are already retried by botocore (this default is 5). This
+ does not take into account the number of exceptions retried by
+ botocore.
+
+ :param max_io_queue: The maximum amount of read parts that can be
+ queued in memory to be written for a download. The size of each
+ of these read parts is at most the size of ``io_chunksize``.
+
+ :param io_chunksize: The max size of each chunk in the io queue.
+ Currently, this is size used when ``read`` is called on the
+ downloaded stream as well.
+
+ :param use_threads: If True, threads will be used when performing
+ S3 transfers. If False, no threads will be used in
+ performing transfers: all logic will be ran in the main thread.
+ """
+ super(TransferConfig, self).__init__(
+ multipart_threshold=multipart_threshold,
+ max_request_concurrency=max_concurrency,
+ multipart_chunksize=multipart_chunksize,
+ num_download_attempts=num_download_attempts,
+ max_io_queue_size=max_io_queue,
+ io_chunksize=io_chunksize,
+ )
+ # Some of the argument names are not the same as the inherited
+ # S3TransferConfig so we add aliases so you can still access the
+ # old version of the names.
+ for alias in self.ALIAS:
+ setattr(self, alias, getattr(self, self.ALIAS[alias]))
+ self.use_threads = use_threads
+
+ def __setattr__(self, name, value):
+ # If the alias name is used, make sure we set the name that it points
+ # to as that is what actually is used in governing the TransferManager.
+ if name in self.ALIAS:
+ super(TransferConfig, self).__setattr__(self.ALIAS[name], value)
+ # Always set the value of the actual name provided.
+ super(TransferConfig, self).__setattr__(name, value)
+
+
+class S3Transfer(object):
+ ALLOWED_DOWNLOAD_ARGS = TransferManager.ALLOWED_DOWNLOAD_ARGS
+ ALLOWED_UPLOAD_ARGS = TransferManager.ALLOWED_UPLOAD_ARGS
+
+ def __init__(self, client=None, config=None, osutil=None, manager=None):
+ if not client and not manager:
+ raise ValueError(
+ 'Either a boto3.Client or s3transfer.manager.TransferManager '
+ 'must be provided'
+ )
+ if manager and any([client, config, osutil]):
+ raise ValueError(
+ 'Manager cannot be provided with client, config, '
+ 'nor osutil. These parameters are mutually exclusive.'
+ )
+ if config is None:
+ config = TransferConfig()
+ if osutil is None:
+ osutil = OSUtils()
+ if manager:
+ self._manager = manager
+ else:
+ self._manager = create_transfer_manager(client, config, osutil)
+
+ def upload_file(self, filename, bucket, key,
+ callback=None, extra_args=None):
+ """Upload a file to an S3 object.
+
+ Variants have also been injected into S3 client, Bucket and Object.
+ You don't have to use S3Transfer.upload_file() directly.
.. seealso::
:py:meth:`S3.Client.upload_file`
:py:meth:`S3.Client.upload_fileobj`
- """
- if not isinstance(filename, six.string_types):
- raise ValueError('Filename must be a string')
-
- subscribers = self._get_subscribers(callback)
- future = self._manager.upload(
- filename, bucket, key, extra_args, subscribers)
- try:
- future.result()
- # If a client error was raised, add the backwards compatibility layer
- # that raises a S3UploadFailedError. These specific errors were only
- # ever thrown for upload_parts but now can be thrown for any related
- # client error.
- except ClientError as e:
- raise S3UploadFailedError(
- "Failed to upload %s to %s: %s" % (
- filename, '/'.join([bucket, key]), e))
-
- def download_file(self, bucket, key, filename, extra_args=None,
- callback=None):
- """Download an S3 object to a file.
-
- Variants have also been injected into S3 client, Bucket and Object.
- You don't have to use S3Transfer.download_file() directly.
+ """
+ if not isinstance(filename, six.string_types):
+ raise ValueError('Filename must be a string')
+
+ subscribers = self._get_subscribers(callback)
+ future = self._manager.upload(
+ filename, bucket, key, extra_args, subscribers)
+ try:
+ future.result()
+ # If a client error was raised, add the backwards compatibility layer
+ # that raises a S3UploadFailedError. These specific errors were only
+ # ever thrown for upload_parts but now can be thrown for any related
+ # client error.
+ except ClientError as e:
+ raise S3UploadFailedError(
+ "Failed to upload %s to %s: %s" % (
+ filename, '/'.join([bucket, key]), e))
+
+ def download_file(self, bucket, key, filename, extra_args=None,
+ callback=None):
+ """Download an S3 object to a file.
+
+ Variants have also been injected into S3 client, Bucket and Object.
+ You don't have to use S3Transfer.download_file() directly.
.. seealso::
:py:meth:`S3.Client.download_file`
:py:meth:`S3.Client.download_fileobj`
- """
- if not isinstance(filename, six.string_types):
- raise ValueError('Filename must be a string')
-
- subscribers = self._get_subscribers(callback)
- future = self._manager.download(
- bucket, key, filename, extra_args, subscribers)
- try:
- future.result()
- # This is for backwards compatibility where when retries are
- # exceeded we need to throw the same error from boto3 instead of
- # s3transfer's built in RetriesExceededError as current users are
- # catching the boto3 one instead of the s3transfer exception to do
- # their own retries.
- except S3TransferRetriesExceededError as e:
- raise RetriesExceededError(e.last_exception)
-
- def _get_subscribers(self, callback):
- if not callback:
- return None
- return [ProgressCallbackInvoker(callback)]
-
- def __enter__(self):
- return self
-
- def __exit__(self, *args):
- self._manager.__exit__(*args)
-
-
-class ProgressCallbackInvoker(BaseSubscriber):
- """A back-compat wrapper to invoke a provided callback via a subscriber
-
- :param callback: A callable that takes a single positional argument for
- how many bytes were transferred.
- """
- def __init__(self, callback):
- self._callback = callback
-
- def on_progress(self, bytes_transferred, **kwargs):
- self._callback(bytes_transferred)
+ """
+ if not isinstance(filename, six.string_types):
+ raise ValueError('Filename must be a string')
+
+ subscribers = self._get_subscribers(callback)
+ future = self._manager.download(
+ bucket, key, filename, extra_args, subscribers)
+ try:
+ future.result()
+ # This is for backwards compatibility where when retries are
+ # exceeded we need to throw the same error from boto3 instead of
+ # s3transfer's built in RetriesExceededError as current users are
+ # catching the boto3 one instead of the s3transfer exception to do
+ # their own retries.
+ except S3TransferRetriesExceededError as e:
+ raise RetriesExceededError(e.last_exception)
+
+ def _get_subscribers(self, callback):
+ if not callback:
+ return None
+ return [ProgressCallbackInvoker(callback)]
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *args):
+ self._manager.__exit__(*args)
+
+
+class ProgressCallbackInvoker(BaseSubscriber):
+ """A back-compat wrapper to invoke a provided callback via a subscriber
+
+ :param callback: A callable that takes a single positional argument for
+ how many bytes were transferred.
+ """
+ def __init__(self, callback):
+ self._callback = callback
+
+ def on_progress(self, bytes_transferred, **kwargs):
+ self._callback(bytes_transferred)
diff --git a/contrib/python/boto3/boto3/session.py b/contrib/python/boto3/boto3/session.py
index fcc49901ee..3b19bcdb2e 100644
--- a/contrib/python/boto3/boto3/session.py
+++ b/contrib/python/boto3/boto3/session.py
@@ -1,453 +1,453 @@
-# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-
-import copy
-import os
-
-import botocore.session
-from botocore.client import Config
-from botocore.exceptions import DataNotFoundError, UnknownServiceError
-
-import boto3
-import boto3.utils
-from boto3.exceptions import ResourceNotExistsError, UnknownAPIVersionError
-
-from .resources.factory import ResourceFactory
-
-
-class Session(object):
- """
- A session stores configuration state and allows you to create service
- clients and resources.
-
- :type aws_access_key_id: string
- :param aws_access_key_id: AWS access key ID
- :type aws_secret_access_key: string
- :param aws_secret_access_key: AWS secret access key
- :type aws_session_token: string
- :param aws_session_token: AWS temporary session token
- :type region_name: string
- :param region_name: Default region when creating new connections
- :type botocore_session: botocore.session.Session
- :param botocore_session: Use this Botocore session instead of creating
- a new default one.
- :type profile_name: string
- :param profile_name: The name of a profile to use. If not given, then
- the default profile is used.
- """
- def __init__(self, aws_access_key_id=None, aws_secret_access_key=None,
- aws_session_token=None, region_name=None,
- botocore_session=None, profile_name=None):
- if botocore_session is not None:
- self._session = botocore_session
- else:
- # Create a new default session
- self._session = botocore.session.get_session()
-
- # Setup custom user-agent string if it isn't already customized
- if self._session.user_agent_name == 'Botocore':
- botocore_info = 'Botocore/{0}'.format(
- self._session.user_agent_version)
- if self._session.user_agent_extra:
- self._session.user_agent_extra += ' ' + botocore_info
- else:
- self._session.user_agent_extra = botocore_info
- self._session.user_agent_name = 'Boto3'
- self._session.user_agent_version = boto3.__version__
-
- if profile_name is not None:
- self._session.set_config_variable('profile', profile_name)
-
- if aws_access_key_id or aws_secret_access_key or aws_session_token:
- self._session.set_credentials(
- aws_access_key_id, aws_secret_access_key, aws_session_token)
-
- if region_name is not None:
- self._session.set_config_variable('region', region_name)
-
- self.resource_factory = ResourceFactory(
- self._session.get_component('event_emitter'))
- self._setup_loader()
- self._register_default_handlers()
-
- def __repr__(self):
- return '{0}(region_name={1})'.format(
- self.__class__.__name__,
- repr(self._session.get_config_variable('region')))
-
- @property
- def profile_name(self):
- """
- The **read-only** profile name.
- """
- return self._session.profile or 'default'
-
- @property
- def region_name(self):
- """
- The **read-only** region name.
- """
- return self._session.get_config_variable('region')
-
- @property
- def events(self):
- """
- The event emitter for a session
- """
- return self._session.get_component('event_emitter')
-
- @property
- def available_profiles(self):
- """
- The profiles available to the session credentials
- """
- return self._session.available_profiles
-
- def _setup_loader(self):
- """
- Setup loader paths so that we can load resources.
- """
- self._loader = self._session.get_component('data_loader')
- self._loader.search_paths.append(
- os.path.join(os.path.dirname(__file__), 'data'))
-
- def get_available_services(self):
- """
- Get a list of available services that can be loaded as low-level
- clients via :py:meth:`Session.client`.
-
- :rtype: list
- :return: List of service names
- """
- return self._session.get_available_services()
-
- def get_available_resources(self):
- """
- Get a list of available services that can be loaded as resource
- clients via :py:meth:`Session.resource`.
-
- :rtype: list
- :return: List of service names
- """
- return self._loader.list_available_services(type_name='resources-1')
-
- def get_available_partitions(self):
- """Lists the available partitions
-
- :rtype: list
- :return: Returns a list of partition names (e.g., ["aws", "aws-cn"])
- """
- return self._session.get_available_partitions()
-
- def get_available_regions(self, service_name, partition_name='aws',
- allow_non_regional=False):
- """Lists the region and endpoint names of a particular partition.
-
- :type service_name: string
- :param service_name: Name of a service to list endpoint for (e.g., s3).
-
- :type partition_name: string
- :param partition_name: Name of the partition to limit endpoints to.
- (e.g., aws for the public AWS endpoints, aws-cn for AWS China
- endpoints, aws-us-gov for AWS GovCloud (US) Endpoints, etc.)
-
- :type allow_non_regional: bool
- :param allow_non_regional: Set to True to include endpoints that are
- not regional endpoints (e.g., s3-external-1,
- fips-us-gov-west-1, etc).
-
- :return: Returns a list of endpoint names (e.g., ["us-east-1"]).
- """
- return self._session.get_available_regions(
- service_name=service_name, partition_name=partition_name,
- allow_non_regional=allow_non_regional)
-
- def get_credentials(self):
- """
- Return the :class:`botocore.credential.Credential` object
- associated with this session. If the credentials have not
- yet been loaded, this will attempt to load them. If they
- have already been loaded, this will return the cached
- credentials.
- """
- return self._session.get_credentials()
-
- def client(self, service_name, region_name=None, api_version=None,
- use_ssl=True, verify=None, endpoint_url=None,
- aws_access_key_id=None, aws_secret_access_key=None,
- aws_session_token=None, config=None):
- """
- Create a low-level service client by name.
-
- :type service_name: string
- :param service_name: The name of a service, e.g. 's3' or 'ec2'. You
- can get a list of available services via
- :py:meth:`get_available_services`.
-
- :type region_name: string
- :param region_name: The name of the region associated with the client.
- A client is associated with a single region.
-
- :type api_version: string
- :param api_version: The API version to use. By default, botocore will
- use the latest API version when creating a client. You only need
- to specify this parameter if you want to use a previous API version
- of the client.
-
- :type use_ssl: boolean
- :param use_ssl: Whether or not to use SSL. By default, SSL is used.
- Note that not all services support non-ssl connections.
-
- :type verify: boolean/string
- :param verify: Whether or not to verify SSL certificates. By default
- SSL certificates are verified. You can provide the following
- values:
-
- * False - do not validate SSL certificates. SSL will still be
- used (unless use_ssl is False), but SSL certificates
- will not be verified.
- * path/to/cert/bundle.pem - A filename of the CA cert bundle to
- uses. You can specify this argument if you want to use a
- different CA cert bundle than the one used by botocore.
-
- :type endpoint_url: string
- :param endpoint_url: The complete URL to use for the constructed
- client. Normally, botocore will automatically construct the
- appropriate URL to use when communicating with a service. You
- can specify a complete URL (including the "http/https" scheme)
- to override this behavior. If this value is provided,
- then ``use_ssl`` is ignored.
-
- :type aws_access_key_id: string
- :param aws_access_key_id: The access key to use when creating
- the client. This is entirely optional, and if not provided,
- the credentials configured for the session will automatically
- be used. You only need to provide this argument if you want
- to override the credentials used for this specific client.
-
- :type aws_secret_access_key: string
- :param aws_secret_access_key: The secret key to use when creating
- the client. Same semantics as aws_access_key_id above.
-
- :type aws_session_token: string
- :param aws_session_token: The session token to use when creating
- the client. Same semantics as aws_access_key_id above.
-
- :type config: botocore.client.Config
- :param config: Advanced client configuration options. If region_name
- is specified in the client config, its value will take precedence
- over environment variables and configuration values, but not over
- a region_name value passed explicitly to the method. See
- `botocore config documentation
+# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+
+import copy
+import os
+
+import botocore.session
+from botocore.client import Config
+from botocore.exceptions import DataNotFoundError, UnknownServiceError
+
+import boto3
+import boto3.utils
+from boto3.exceptions import ResourceNotExistsError, UnknownAPIVersionError
+
+from .resources.factory import ResourceFactory
+
+
+class Session(object):
+ """
+ A session stores configuration state and allows you to create service
+ clients and resources.
+
+ :type aws_access_key_id: string
+ :param aws_access_key_id: AWS access key ID
+ :type aws_secret_access_key: string
+ :param aws_secret_access_key: AWS secret access key
+ :type aws_session_token: string
+ :param aws_session_token: AWS temporary session token
+ :type region_name: string
+ :param region_name: Default region when creating new connections
+ :type botocore_session: botocore.session.Session
+ :param botocore_session: Use this Botocore session instead of creating
+ a new default one.
+ :type profile_name: string
+ :param profile_name: The name of a profile to use. If not given, then
+ the default profile is used.
+ """
+ def __init__(self, aws_access_key_id=None, aws_secret_access_key=None,
+ aws_session_token=None, region_name=None,
+ botocore_session=None, profile_name=None):
+ if botocore_session is not None:
+ self._session = botocore_session
+ else:
+ # Create a new default session
+ self._session = botocore.session.get_session()
+
+ # Setup custom user-agent string if it isn't already customized
+ if self._session.user_agent_name == 'Botocore':
+ botocore_info = 'Botocore/{0}'.format(
+ self._session.user_agent_version)
+ if self._session.user_agent_extra:
+ self._session.user_agent_extra += ' ' + botocore_info
+ else:
+ self._session.user_agent_extra = botocore_info
+ self._session.user_agent_name = 'Boto3'
+ self._session.user_agent_version = boto3.__version__
+
+ if profile_name is not None:
+ self._session.set_config_variable('profile', profile_name)
+
+ if aws_access_key_id or aws_secret_access_key or aws_session_token:
+ self._session.set_credentials(
+ aws_access_key_id, aws_secret_access_key, aws_session_token)
+
+ if region_name is not None:
+ self._session.set_config_variable('region', region_name)
+
+ self.resource_factory = ResourceFactory(
+ self._session.get_component('event_emitter'))
+ self._setup_loader()
+ self._register_default_handlers()
+
+ def __repr__(self):
+ return '{0}(region_name={1})'.format(
+ self.__class__.__name__,
+ repr(self._session.get_config_variable('region')))
+
+ @property
+ def profile_name(self):
+ """
+ The **read-only** profile name.
+ """
+ return self._session.profile or 'default'
+
+ @property
+ def region_name(self):
+ """
+ The **read-only** region name.
+ """
+ return self._session.get_config_variable('region')
+
+ @property
+ def events(self):
+ """
+ The event emitter for a session
+ """
+ return self._session.get_component('event_emitter')
+
+ @property
+ def available_profiles(self):
+ """
+ The profiles available to the session credentials
+ """
+ return self._session.available_profiles
+
+ def _setup_loader(self):
+ """
+ Setup loader paths so that we can load resources.
+ """
+ self._loader = self._session.get_component('data_loader')
+ self._loader.search_paths.append(
+ os.path.join(os.path.dirname(__file__), 'data'))
+
+ def get_available_services(self):
+ """
+ Get a list of available services that can be loaded as low-level
+ clients via :py:meth:`Session.client`.
+
+ :rtype: list
+ :return: List of service names
+ """
+ return self._session.get_available_services()
+
+ def get_available_resources(self):
+ """
+ Get a list of available services that can be loaded as resource
+ clients via :py:meth:`Session.resource`.
+
+ :rtype: list
+ :return: List of service names
+ """
+ return self._loader.list_available_services(type_name='resources-1')
+
+ def get_available_partitions(self):
+ """Lists the available partitions
+
+ :rtype: list
+ :return: Returns a list of partition names (e.g., ["aws", "aws-cn"])
+ """
+ return self._session.get_available_partitions()
+
+ def get_available_regions(self, service_name, partition_name='aws',
+ allow_non_regional=False):
+ """Lists the region and endpoint names of a particular partition.
+
+ :type service_name: string
+ :param service_name: Name of a service to list endpoint for (e.g., s3).
+
+ :type partition_name: string
+ :param partition_name: Name of the partition to limit endpoints to.
+ (e.g., aws for the public AWS endpoints, aws-cn for AWS China
+ endpoints, aws-us-gov for AWS GovCloud (US) Endpoints, etc.)
+
+ :type allow_non_regional: bool
+ :param allow_non_regional: Set to True to include endpoints that are
+ not regional endpoints (e.g., s3-external-1,
+ fips-us-gov-west-1, etc).
+
+ :return: Returns a list of endpoint names (e.g., ["us-east-1"]).
+ """
+ return self._session.get_available_regions(
+ service_name=service_name, partition_name=partition_name,
+ allow_non_regional=allow_non_regional)
+
+ def get_credentials(self):
+ """
+ Return the :class:`botocore.credential.Credential` object
+ associated with this session. If the credentials have not
+ yet been loaded, this will attempt to load them. If they
+ have already been loaded, this will return the cached
+ credentials.
+ """
+ return self._session.get_credentials()
+
+ def client(self, service_name, region_name=None, api_version=None,
+ use_ssl=True, verify=None, endpoint_url=None,
+ aws_access_key_id=None, aws_secret_access_key=None,
+ aws_session_token=None, config=None):
+ """
+ Create a low-level service client by name.
+
+ :type service_name: string
+ :param service_name: The name of a service, e.g. 's3' or 'ec2'. You
+ can get a list of available services via
+ :py:meth:`get_available_services`.
+
+ :type region_name: string
+ :param region_name: The name of the region associated with the client.
+ A client is associated with a single region.
+
+ :type api_version: string
+ :param api_version: The API version to use. By default, botocore will
+ use the latest API version when creating a client. You only need
+ to specify this parameter if you want to use a previous API version
+ of the client.
+
+ :type use_ssl: boolean
+ :param use_ssl: Whether or not to use SSL. By default, SSL is used.
+ Note that not all services support non-ssl connections.
+
+ :type verify: boolean/string
+ :param verify: Whether or not to verify SSL certificates. By default
+ SSL certificates are verified. You can provide the following
+ values:
+
+ * False - do not validate SSL certificates. SSL will still be
+ used (unless use_ssl is False), but SSL certificates
+ will not be verified.
+ * path/to/cert/bundle.pem - A filename of the CA cert bundle to
+ uses. You can specify this argument if you want to use a
+ different CA cert bundle than the one used by botocore.
+
+ :type endpoint_url: string
+ :param endpoint_url: The complete URL to use for the constructed
+ client. Normally, botocore will automatically construct the
+ appropriate URL to use when communicating with a service. You
+ can specify a complete URL (including the "http/https" scheme)
+ to override this behavior. If this value is provided,
+ then ``use_ssl`` is ignored.
+
+ :type aws_access_key_id: string
+ :param aws_access_key_id: The access key to use when creating
+ the client. This is entirely optional, and if not provided,
+ the credentials configured for the session will automatically
+ be used. You only need to provide this argument if you want
+ to override the credentials used for this specific client.
+
+ :type aws_secret_access_key: string
+ :param aws_secret_access_key: The secret key to use when creating
+ the client. Same semantics as aws_access_key_id above.
+
+ :type aws_session_token: string
+ :param aws_session_token: The session token to use when creating
+ the client. Same semantics as aws_access_key_id above.
+
+ :type config: botocore.client.Config
+ :param config: Advanced client configuration options. If region_name
+ is specified in the client config, its value will take precedence
+ over environment variables and configuration values, but not over
+ a region_name value passed explicitly to the method. See
+ `botocore config documentation
<https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html>`_
- for more details.
-
- :return: Service client instance
-
- """
- return self._session.create_client(
- service_name, region_name=region_name, api_version=api_version,
- use_ssl=use_ssl, verify=verify, endpoint_url=endpoint_url,
- aws_access_key_id=aws_access_key_id,
- aws_secret_access_key=aws_secret_access_key,
- aws_session_token=aws_session_token, config=config)
-
- def resource(self, service_name, region_name=None, api_version=None,
- use_ssl=True, verify=None, endpoint_url=None,
- aws_access_key_id=None, aws_secret_access_key=None,
- aws_session_token=None, config=None):
- """
- Create a resource service client by name.
-
- :type service_name: string
- :param service_name: The name of a service, e.g. 's3' or 'ec2'. You
- can get a list of available services via
- :py:meth:`get_available_resources`.
-
- :type region_name: string
- :param region_name: The name of the region associated with the client.
- A client is associated with a single region.
-
- :type api_version: string
- :param api_version: The API version to use. By default, botocore will
- use the latest API version when creating a client. You only need
- to specify this parameter if you want to use a previous API version
- of the client.
-
- :type use_ssl: boolean
- :param use_ssl: Whether or not to use SSL. By default, SSL is used.
- Note that not all services support non-ssl connections.
-
- :type verify: boolean/string
- :param verify: Whether or not to verify SSL certificates. By default
- SSL certificates are verified. You can provide the following
- values:
-
- * False - do not validate SSL certificates. SSL will still be
- used (unless use_ssl is False), but SSL certificates
- will not be verified.
- * path/to/cert/bundle.pem - A filename of the CA cert bundle to
- uses. You can specify this argument if you want to use a
- different CA cert bundle than the one used by botocore.
-
- :type endpoint_url: string
- :param endpoint_url: The complete URL to use for the constructed
- client. Normally, botocore will automatically construct the
- appropriate URL to use when communicating with a service. You
- can specify a complete URL (including the "http/https" scheme)
- to override this behavior. If this value is provided,
- then ``use_ssl`` is ignored.
-
- :type aws_access_key_id: string
- :param aws_access_key_id: The access key to use when creating
- the client. This is entirely optional, and if not provided,
- the credentials configured for the session will automatically
- be used. You only need to provide this argument if you want
- to override the credentials used for this specific client.
-
- :type aws_secret_access_key: string
- :param aws_secret_access_key: The secret key to use when creating
- the client. Same semantics as aws_access_key_id above.
-
- :type aws_session_token: string
- :param aws_session_token: The session token to use when creating
- the client. Same semantics as aws_access_key_id above.
-
- :type config: botocore.client.Config
- :param config: Advanced client configuration options. If region_name
- is specified in the client config, its value will take precedence
- over environment variables and configuration values, but not over
- a region_name value passed explicitly to the method. If
- user_agent_extra is specified in the client config, it overrides
- the default user_agent_extra provided by the resource API. See
- `botocore config documentation
+ for more details.
+
+ :return: Service client instance
+
+ """
+ return self._session.create_client(
+ service_name, region_name=region_name, api_version=api_version,
+ use_ssl=use_ssl, verify=verify, endpoint_url=endpoint_url,
+ aws_access_key_id=aws_access_key_id,
+ aws_secret_access_key=aws_secret_access_key,
+ aws_session_token=aws_session_token, config=config)
+
+ def resource(self, service_name, region_name=None, api_version=None,
+ use_ssl=True, verify=None, endpoint_url=None,
+ aws_access_key_id=None, aws_secret_access_key=None,
+ aws_session_token=None, config=None):
+ """
+ Create a resource service client by name.
+
+ :type service_name: string
+ :param service_name: The name of a service, e.g. 's3' or 'ec2'. You
+ can get a list of available services via
+ :py:meth:`get_available_resources`.
+
+ :type region_name: string
+ :param region_name: The name of the region associated with the client.
+ A client is associated with a single region.
+
+ :type api_version: string
+ :param api_version: The API version to use. By default, botocore will
+ use the latest API version when creating a client. You only need
+ to specify this parameter if you want to use a previous API version
+ of the client.
+
+ :type use_ssl: boolean
+ :param use_ssl: Whether or not to use SSL. By default, SSL is used.
+ Note that not all services support non-ssl connections.
+
+ :type verify: boolean/string
+ :param verify: Whether or not to verify SSL certificates. By default
+ SSL certificates are verified. You can provide the following
+ values:
+
+ * False - do not validate SSL certificates. SSL will still be
+ used (unless use_ssl is False), but SSL certificates
+ will not be verified.
+ * path/to/cert/bundle.pem - A filename of the CA cert bundle to
+ uses. You can specify this argument if you want to use a
+ different CA cert bundle than the one used by botocore.
+
+ :type endpoint_url: string
+ :param endpoint_url: The complete URL to use for the constructed
+ client. Normally, botocore will automatically construct the
+ appropriate URL to use when communicating with a service. You
+ can specify a complete URL (including the "http/https" scheme)
+ to override this behavior. If this value is provided,
+ then ``use_ssl`` is ignored.
+
+ :type aws_access_key_id: string
+ :param aws_access_key_id: The access key to use when creating
+ the client. This is entirely optional, and if not provided,
+ the credentials configured for the session will automatically
+ be used. You only need to provide this argument if you want
+ to override the credentials used for this specific client.
+
+ :type aws_secret_access_key: string
+ :param aws_secret_access_key: The secret key to use when creating
+ the client. Same semantics as aws_access_key_id above.
+
+ :type aws_session_token: string
+ :param aws_session_token: The session token to use when creating
+ the client. Same semantics as aws_access_key_id above.
+
+ :type config: botocore.client.Config
+ :param config: Advanced client configuration options. If region_name
+ is specified in the client config, its value will take precedence
+ over environment variables and configuration values, but not over
+ a region_name value passed explicitly to the method. If
+ user_agent_extra is specified in the client config, it overrides
+ the default user_agent_extra provided by the resource API. See
+ `botocore config documentation
<https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html>`_
- for more details.
-
- :return: Subclass of :py:class:`~boto3.resources.base.ServiceResource`
- """
- try:
- resource_model = self._loader.load_service_model(
- service_name, 'resources-1', api_version)
- except UnknownServiceError:
- available = self.get_available_resources()
- has_low_level_client = (
- service_name in self.get_available_services())
- raise ResourceNotExistsError(service_name, available,
- has_low_level_client)
- except DataNotFoundError:
- # This is because we've provided an invalid API version.
- available_api_versions = self._loader.list_api_versions(
- service_name, 'resources-1')
- raise UnknownAPIVersionError(
- service_name, api_version, ', '.join(available_api_versions))
-
- if api_version is None:
- # Even though botocore's load_service_model() can handle
- # using the latest api_version if not provided, we need
- # to track this api_version in boto3 in order to ensure
- # we're pairing a resource model with a client model
- # of the same API version. It's possible for the latest
- # API version of a resource model in boto3 to not be
- # the same API version as a service model in botocore.
- # So we need to look up the api_version if one is not
- # provided to ensure we load the same API version of the
- # client.
- #
- # Note: This is relying on the fact that
- # loader.load_service_model(..., api_version=None)
- # and loader.determine_latest_version(..., 'resources-1')
- # both load the same api version of the file.
- api_version = self._loader.determine_latest_version(
- service_name, 'resources-1')
-
- # Creating a new resource instance requires the low-level client
- # and service model, the resource version and resource JSON data.
- # We pass these to the factory and get back a class, which is
- # instantiated on top of the low-level client.
- if config is not None:
- if config.user_agent_extra is None:
- config = copy.deepcopy(config)
- config.user_agent_extra = 'Resource'
- else:
- config = Config(user_agent_extra='Resource')
- client = self.client(
- service_name, region_name=region_name, api_version=api_version,
- use_ssl=use_ssl, verify=verify, endpoint_url=endpoint_url,
- aws_access_key_id=aws_access_key_id,
- aws_secret_access_key=aws_secret_access_key,
- aws_session_token=aws_session_token, config=config)
- service_model = client.meta.service_model
-
- # Create a ServiceContext object to serve as a reference to
- # important read-only information about the general service.
- service_context = boto3.utils.ServiceContext(
+ for more details.
+
+ :return: Subclass of :py:class:`~boto3.resources.base.ServiceResource`
+ """
+ try:
+ resource_model = self._loader.load_service_model(
+ service_name, 'resources-1', api_version)
+ except UnknownServiceError:
+ available = self.get_available_resources()
+ has_low_level_client = (
+ service_name in self.get_available_services())
+ raise ResourceNotExistsError(service_name, available,
+ has_low_level_client)
+ except DataNotFoundError:
+ # This is because we've provided an invalid API version.
+ available_api_versions = self._loader.list_api_versions(
+ service_name, 'resources-1')
+ raise UnknownAPIVersionError(
+ service_name, api_version, ', '.join(available_api_versions))
+
+ if api_version is None:
+ # Even though botocore's load_service_model() can handle
+ # using the latest api_version if not provided, we need
+ # to track this api_version in boto3 in order to ensure
+ # we're pairing a resource model with a client model
+ # of the same API version. It's possible for the latest
+ # API version of a resource model in boto3 to not be
+ # the same API version as a service model in botocore.
+ # So we need to look up the api_version if one is not
+ # provided to ensure we load the same API version of the
+ # client.
+ #
+ # Note: This is relying on the fact that
+ # loader.load_service_model(..., api_version=None)
+ # and loader.determine_latest_version(..., 'resources-1')
+ # both load the same api version of the file.
+ api_version = self._loader.determine_latest_version(
+ service_name, 'resources-1')
+
+ # Creating a new resource instance requires the low-level client
+ # and service model, the resource version and resource JSON data.
+ # We pass these to the factory and get back a class, which is
+ # instantiated on top of the low-level client.
+ if config is not None:
+ if config.user_agent_extra is None:
+ config = copy.deepcopy(config)
+ config.user_agent_extra = 'Resource'
+ else:
+ config = Config(user_agent_extra='Resource')
+ client = self.client(
+ service_name, region_name=region_name, api_version=api_version,
+ use_ssl=use_ssl, verify=verify, endpoint_url=endpoint_url,
+ aws_access_key_id=aws_access_key_id,
+ aws_secret_access_key=aws_secret_access_key,
+ aws_session_token=aws_session_token, config=config)
+ service_model = client.meta.service_model
+
+ # Create a ServiceContext object to serve as a reference to
+ # important read-only information about the general service.
+ service_context = boto3.utils.ServiceContext(
service_name=service_name, service_model=service_model,
resource_json_definitions=resource_model['resources'],
service_waiter_model=boto3.utils.LazyLoadedWaiterModel(
self._session, service_name, api_version
)
- )
-
- # Create the service resource class.
- cls = self.resource_factory.load_from_definition(
- resource_name=service_name,
- single_resource_json_definition=resource_model['service'],
- service_context=service_context
- )
-
- return cls(client=client)
-
- def _register_default_handlers(self):
-
- # S3 customizations
- self._session.register(
- 'creating-client-class.s3',
- boto3.utils.lazy_call(
- 'boto3.s3.inject.inject_s3_transfer_methods'))
- self._session.register(
- 'creating-resource-class.s3.Bucket',
- boto3.utils.lazy_call(
- 'boto3.s3.inject.inject_bucket_methods'))
- self._session.register(
- 'creating-resource-class.s3.Object',
- boto3.utils.lazy_call(
- 'boto3.s3.inject.inject_object_methods'))
- self._session.register(
- 'creating-resource-class.s3.ObjectSummary',
- boto3.utils.lazy_call(
- 'boto3.s3.inject.inject_object_summary_methods'))
-
- # DynamoDb customizations
- self._session.register(
- 'creating-resource-class.dynamodb',
- boto3.utils.lazy_call(
- 'boto3.dynamodb.transform.register_high_level_interface'),
- unique_id='high-level-dynamodb')
- self._session.register(
- 'creating-resource-class.dynamodb.Table',
- boto3.utils.lazy_call(
- 'boto3.dynamodb.table.register_table_methods'),
- unique_id='high-level-dynamodb-table')
-
- # EC2 Customizations
- self._session.register(
- 'creating-resource-class.ec2.ServiceResource',
- boto3.utils.lazy_call(
- 'boto3.ec2.createtags.inject_create_tags'))
-
- self._session.register(
- 'creating-resource-class.ec2.Instance',
- boto3.utils.lazy_call(
- 'boto3.ec2.deletetags.inject_delete_tags',
- event_emitter=self.events))
+ )
+
+ # Create the service resource class.
+ cls = self.resource_factory.load_from_definition(
+ resource_name=service_name,
+ single_resource_json_definition=resource_model['service'],
+ service_context=service_context
+ )
+
+ return cls(client=client)
+
+ def _register_default_handlers(self):
+
+ # S3 customizations
+ self._session.register(
+ 'creating-client-class.s3',
+ boto3.utils.lazy_call(
+ 'boto3.s3.inject.inject_s3_transfer_methods'))
+ self._session.register(
+ 'creating-resource-class.s3.Bucket',
+ boto3.utils.lazy_call(
+ 'boto3.s3.inject.inject_bucket_methods'))
+ self._session.register(
+ 'creating-resource-class.s3.Object',
+ boto3.utils.lazy_call(
+ 'boto3.s3.inject.inject_object_methods'))
+ self._session.register(
+ 'creating-resource-class.s3.ObjectSummary',
+ boto3.utils.lazy_call(
+ 'boto3.s3.inject.inject_object_summary_methods'))
+
+ # DynamoDb customizations
+ self._session.register(
+ 'creating-resource-class.dynamodb',
+ boto3.utils.lazy_call(
+ 'boto3.dynamodb.transform.register_high_level_interface'),
+ unique_id='high-level-dynamodb')
+ self._session.register(
+ 'creating-resource-class.dynamodb.Table',
+ boto3.utils.lazy_call(
+ 'boto3.dynamodb.table.register_table_methods'),
+ unique_id='high-level-dynamodb-table')
+
+ # EC2 Customizations
+ self._session.register(
+ 'creating-resource-class.ec2.ServiceResource',
+ boto3.utils.lazy_call(
+ 'boto3.ec2.createtags.inject_create_tags'))
+
+ self._session.register(
+ 'creating-resource-class.ec2.Instance',
+ boto3.utils.lazy_call(
+ 'boto3.ec2.deletetags.inject_delete_tags',
+ event_emitter=self.events))
diff --git a/contrib/python/boto3/boto3/utils.py b/contrib/python/boto3/boto3/utils.py
index 5d258f90ca..826d39d67a 100644
--- a/contrib/python/boto3/boto3/utils.py
+++ b/contrib/python/boto3/boto3/utils.py
@@ -1,93 +1,93 @@
-# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-import sys
-from collections import namedtuple
-
-
-_ServiceContext = namedtuple(
- 'ServiceContext',
- ['service_name', 'service_model', 'service_waiter_model',
- 'resource_json_definitions']
-)
-
-
-class ServiceContext(_ServiceContext):
- """Provides important service-wide, read-only information about a service
-
- :type service_name: str
- :param service_name: The name of the service
-
- :type service_model: :py:class:`botocore.model.ServiceModel`
- :param service_model: The model of the service.
-
- :type service_waiter_model: :py:class:`botocore.waiter.WaiterModel` or
- a waiter model-like object such as
- :py:class:`boto3.utils.LazyLoadedWaiterModel`
- :param service_waiter_model: The waiter model of the service.
-
- :type resource_json_definitions: dict
- :param resource_json_definitions: The loaded json models of all resource
- shapes for a service. It is equivalient of loading a
- ``resource-1.json`` and retrieving the value at the key "resources".
- """
- pass
-
-
-def import_module(name):
- """Import module given a name.
-
- Does not support relative imports.
-
- """
- __import__(name)
- return sys.modules[name]
-
-
-def lazy_call(full_name, **kwargs):
- parent_kwargs = kwargs
-
- def _handler(**kwargs):
- module, function_name = full_name.rsplit('.', 1)
- module = import_module(module)
- kwargs.update(parent_kwargs)
- return getattr(module, function_name)(**kwargs)
-
- return _handler
-
-
-def inject_attribute(class_attributes, name, value):
- if name in class_attributes:
- raise RuntimeError(
- 'Cannot inject class attribute "%s", attribute '
- 'already exists in class dict.' % name)
- else:
- class_attributes[name] = value
-
-
-class LazyLoadedWaiterModel(object):
- """A lazily loaded waiter model
-
- This does not load the service waiter model until an attempt is made
- to retrieve the waiter model for a specific waiter. This is helpful
- in docstring generation where we do not need to actually need to grab
- the waiter-2.json until it is accessed through a ``get_waiter`` call
- when the docstring is generated/accessed.
- """
- def __init__(self, bc_session, service_name, api_version):
- self._session = bc_session
- self._service_name = service_name
- self._api_version = api_version
-
- def get_waiter(self, waiter_name):
- return self._session.get_waiter_model(
- self._service_name, self._api_version).get_waiter(waiter_name)
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+import sys
+from collections import namedtuple
+
+
+_ServiceContext = namedtuple(
+ 'ServiceContext',
+ ['service_name', 'service_model', 'service_waiter_model',
+ 'resource_json_definitions']
+)
+
+
+class ServiceContext(_ServiceContext):
+ """Provides important service-wide, read-only information about a service
+
+ :type service_name: str
+ :param service_name: The name of the service
+
+ :type service_model: :py:class:`botocore.model.ServiceModel`
+ :param service_model: The model of the service.
+
+ :type service_waiter_model: :py:class:`botocore.waiter.WaiterModel` or
+ a waiter model-like object such as
+ :py:class:`boto3.utils.LazyLoadedWaiterModel`
+ :param service_waiter_model: The waiter model of the service.
+
+ :type resource_json_definitions: dict
+ :param resource_json_definitions: The loaded json models of all resource
+ shapes for a service. It is equivalient of loading a
+ ``resource-1.json`` and retrieving the value at the key "resources".
+ """
+ pass
+
+
+def import_module(name):
+ """Import module given a name.
+
+ Does not support relative imports.
+
+ """
+ __import__(name)
+ return sys.modules[name]
+
+
+def lazy_call(full_name, **kwargs):
+ parent_kwargs = kwargs
+
+ def _handler(**kwargs):
+ module, function_name = full_name.rsplit('.', 1)
+ module = import_module(module)
+ kwargs.update(parent_kwargs)
+ return getattr(module, function_name)(**kwargs)
+
+ return _handler
+
+
+def inject_attribute(class_attributes, name, value):
+ if name in class_attributes:
+ raise RuntimeError(
+ 'Cannot inject class attribute "%s", attribute '
+ 'already exists in class dict.' % name)
+ else:
+ class_attributes[name] = value
+
+
+class LazyLoadedWaiterModel(object):
+ """A lazily loaded waiter model
+
+ This does not load the service waiter model until an attempt is made
+ to retrieve the waiter model for a specific waiter. This is helpful
+ in docstring generation where we do not need to actually need to grab
+ the waiter-2.json until it is accessed through a ``get_waiter`` call
+ when the docstring is generated/accessed.
+ """
+ def __init__(self, bc_session, service_name, api_version):
+ self._session = bc_session
+ self._service_name = service_name
+ self._api_version = api_version
+
+ def get_waiter(self, waiter_name):
+ return self._session.get_waiter_model(
+ self._service_name, self._api_version).get_waiter(waiter_name)
diff --git a/contrib/python/boto3/ya.make b/contrib/python/boto3/ya.make
index 7d0f9b8662..bd12ae12b7 100644
--- a/contrib/python/boto3/ya.make
+++ b/contrib/python/boto3/ya.make
@@ -1,11 +1,11 @@
PY23_LIBRARY()
-
+
OWNER(g:python-contrib)
VERSION(1.17.112)
-
+
LICENSE(Apache-2.0)
-
+
PEERDIR(
contrib/python/botocore
contrib/python/jmespath
@@ -14,20 +14,20 @@ PEERDIR(
NO_LINT()
-PY_SRCS(
- TOP_LEVEL
- boto3/__init__.py
+PY_SRCS(
+ TOP_LEVEL
+ boto3/__init__.py
boto3/compat.py
boto3/docs/__init__.py
boto3/docs/action.py
boto3/docs/attr.py
- boto3/docs/base.py
+ boto3/docs/base.py
boto3/docs/client.py
- boto3/docs/collection.py
+ boto3/docs/collection.py
boto3/docs/docstring.py
- boto3/docs/method.py
+ boto3/docs/method.py
boto3/docs/resource.py
- boto3/docs/service.py
+ boto3/docs/service.py
boto3/docs/subresource.py
boto3/docs/utils.py
boto3/docs/waiter.py
@@ -42,19 +42,19 @@ PY_SRCS(
boto3/exceptions.py
boto3/resources/__init__.py
boto3/resources/action.py
- boto3/resources/base.py
- boto3/resources/collection.py
- boto3/resources/factory.py
+ boto3/resources/base.py
+ boto3/resources/collection.py
+ boto3/resources/factory.py
boto3/resources/model.py
- boto3/resources/params.py
- boto3/resources/response.py
+ boto3/resources/params.py
+ boto3/resources/response.py
boto3/s3/__init__.py
boto3/s3/inject.py
boto3/s3/transfer.py
- boto3/session.py
+ boto3/session.py
boto3/utils.py
-)
-
+)
+
RESOURCE_FILES(
PREFIX contrib/python/boto3/
.dist-info/METADATA
@@ -79,4 +79,4 @@ RESOURCE_FILES(
boto3/examples/s3.rst
)
-END()
+END()
diff --git a/contrib/python/botocore/botocore/__init__.py b/contrib/python/botocore/botocore/__init__.py
index 7d99f860ae..bafd38dc3f 100644
--- a/contrib/python/botocore/botocore/__init__.py
+++ b/contrib/python/botocore/botocore/__init__.py
@@ -1,98 +1,98 @@
-# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/
-# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-
-import os
-import re
-import logging
-
+# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/
+# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+
+import os
+import re
+import logging
+
__version__ = '1.20.112'
-
-
-class NullHandler(logging.Handler):
- def emit(self, record):
- pass
-
-# Configure default logger to do nothing
-log = logging.getLogger('botocore')
-log.addHandler(NullHandler())
-
-
-_first_cap_regex = re.compile('(.)([A-Z][a-z]+)')
-_end_cap_regex = re.compile('([a-z0-9])([A-Z])')
+
+
+class NullHandler(logging.Handler):
+ def emit(self, record):
+ pass
+
+# Configure default logger to do nothing
+log = logging.getLogger('botocore')
+log.addHandler(NullHandler())
+
+
+_first_cap_regex = re.compile('(.)([A-Z][a-z]+)')
+_end_cap_regex = re.compile('([a-z0-9])([A-Z])')
# The regex below handles the special case where some acronym
-# name is pluralized, e.g GatewayARNs, ListWebACLs, SomeCNAMEs.
+# name is pluralized, e.g GatewayARNs, ListWebACLs, SomeCNAMEs.
_special_case_transform = re.compile('[A-Z]{2,}s$')
-# Prepopulate the cache with special cases that don't match
-# our regular transformation.
-_xform_cache = {
- ('CreateCachediSCSIVolume', '_'): 'create_cached_iscsi_volume',
- ('CreateCachediSCSIVolume', '-'): 'create-cached-iscsi-volume',
- ('DescribeCachediSCSIVolumes', '_'): 'describe_cached_iscsi_volumes',
- ('DescribeCachediSCSIVolumes', '-'): 'describe-cached-iscsi-volumes',
- ('DescribeStorediSCSIVolumes', '_'): 'describe_stored_iscsi_volumes',
- ('DescribeStorediSCSIVolumes', '-'): 'describe-stored-iscsi-volumes',
- ('CreateStorediSCSIVolume', '_'): 'create_stored_iscsi_volume',
- ('CreateStorediSCSIVolume', '-'): 'create-stored-iscsi-volume',
- ('ListHITsForQualificationType', '_'): 'list_hits_for_qualification_type',
- ('ListHITsForQualificationType', '-'): 'list-hits-for-qualification-type',
+# Prepopulate the cache with special cases that don't match
+# our regular transformation.
+_xform_cache = {
+ ('CreateCachediSCSIVolume', '_'): 'create_cached_iscsi_volume',
+ ('CreateCachediSCSIVolume', '-'): 'create-cached-iscsi-volume',
+ ('DescribeCachediSCSIVolumes', '_'): 'describe_cached_iscsi_volumes',
+ ('DescribeCachediSCSIVolumes', '-'): 'describe-cached-iscsi-volumes',
+ ('DescribeStorediSCSIVolumes', '_'): 'describe_stored_iscsi_volumes',
+ ('DescribeStorediSCSIVolumes', '-'): 'describe-stored-iscsi-volumes',
+ ('CreateStorediSCSIVolume', '_'): 'create_stored_iscsi_volume',
+ ('CreateStorediSCSIVolume', '-'): 'create-stored-iscsi-volume',
+ ('ListHITsForQualificationType', '_'): 'list_hits_for_qualification_type',
+ ('ListHITsForQualificationType', '-'): 'list-hits-for-qualification-type',
('ExecutePartiQLStatement', '_'): 'execute_partiql_statement',
('ExecutePartiQLStatement', '-'): 'execute-partiql-statement',
('ExecutePartiQLTransaction', '_'): 'execute_partiql_transaction',
('ExecutePartiQLTransaction', '-'): 'execute-partiql-transaction',
('ExecutePartiQLBatch', '_'): 'execute_partiql_batch',
('ExecutePartiQLBatch', '-'): 'execute-partiql-batch',
-}
-# The items in this dict represent partial renames to apply globally to all
-# services which might have a matching argument or operation. This way a
-# common mis-translation can be fixed without having to call out each
-# individual case.
-ScalarTypes = ('string', 'integer', 'boolean', 'timestamp', 'float', 'double')
-
+}
+# The items in this dict represent partial renames to apply globally to all
+# services which might have a matching argument or operation. This way a
+# common mis-translation can be fixed without having to call out each
+# individual case.
+ScalarTypes = ('string', 'integer', 'boolean', 'timestamp', 'float', 'double')
+
BOTOCORE_ROOT = os.path.dirname(__file__)
-
-
-# Used to specify anonymous (unsigned) request signature
-class UNSIGNED(object):
- def __copy__(self):
- return self
-
- def __deepcopy__(self, memodict):
- return self
-
-
-UNSIGNED = UNSIGNED()
-
-
+
+
+# Used to specify anonymous (unsigned) request signature
+class UNSIGNED(object):
+ def __copy__(self):
+ return self
+
+ def __deepcopy__(self, memodict):
+ return self
+
+
+UNSIGNED = UNSIGNED()
+
+
def xform_name(name, sep='_', _xform_cache=_xform_cache):
- """Convert camel case to a "pythonic" name.
-
- If the name contains the ``sep`` character, then it is
- returned unchanged.
-
- """
- if sep in name:
- # If the sep is in the name, assume that it's already
- # transformed and return the string unchanged.
- return name
- key = (name, sep)
- if key not in _xform_cache:
- if _special_case_transform.search(name) is not None:
- is_special = _special_case_transform.search(name)
- matched = is_special.group()
- # Replace something like ARNs, ACLs with _arns, _acls.
- name = name[:-len(matched)] + sep + matched.lower()
- s1 = _first_cap_regex.sub(r'\1' + sep + r'\2', name)
+ """Convert camel case to a "pythonic" name.
+
+ If the name contains the ``sep`` character, then it is
+ returned unchanged.
+
+ """
+ if sep in name:
+ # If the sep is in the name, assume that it's already
+ # transformed and return the string unchanged.
+ return name
+ key = (name, sep)
+ if key not in _xform_cache:
+ if _special_case_transform.search(name) is not None:
+ is_special = _special_case_transform.search(name)
+ matched = is_special.group()
+ # Replace something like ARNs, ACLs with _arns, _acls.
+ name = name[:-len(matched)] + sep + matched.lower()
+ s1 = _first_cap_regex.sub(r'\1' + sep + r'\2', name)
transformed = _end_cap_regex.sub(r'\1' + sep + r'\2', s1).lower()
- _xform_cache[key] = transformed
- return _xform_cache[key]
+ _xform_cache[key] = transformed
+ return _xform_cache[key]
diff --git a/contrib/python/botocore/botocore/args.py b/contrib/python/botocore/botocore/args.py
index edf7176142..3baaa559dc 100644
--- a/contrib/python/botocore/botocore/args.py
+++ b/contrib/python/botocore/botocore/args.py
@@ -1,36 +1,36 @@
-# Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-"""Internal module to help with normalizing botocore client args.
-
-This module (and all function/classes within this module) should be
-considered internal, and *not* a public API.
-
-"""
-import copy
-import logging
+# Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+"""Internal module to help with normalizing botocore client args.
+
+This module (and all function/classes within this module) should be
+considered internal, and *not* a public API.
+
+"""
+import copy
+import logging
import socket
-
+
import botocore.exceptions
-import botocore.serialize
+import botocore.serialize
import botocore.utils
-from botocore.signers import RequestSigner
-from botocore.config import Config
-from botocore.endpoint import EndpointCreator
-
-
-logger = logging.getLogger(__name__)
-
-
+from botocore.signers import RequestSigner
+from botocore.config import Config
+from botocore.endpoint import EndpointCreator
+
+
+logger = logging.getLogger(__name__)
+
+
VALID_REGIONAL_ENDPOINTS_CONFIG = [
'legacy',
'regional',
@@ -55,95 +55,95 @@ LEGACY_GLOBAL_STS_REGIONS = [
]
-class ClientArgsCreator(object):
- def __init__(self, event_emitter, user_agent, response_parser_factory,
+class ClientArgsCreator(object):
+ def __init__(self, event_emitter, user_agent, response_parser_factory,
loader, exceptions_factory, config_store):
- self._event_emitter = event_emitter
- self._user_agent = user_agent
- self._response_parser_factory = response_parser_factory
- self._loader = loader
- self._exceptions_factory = exceptions_factory
+ self._event_emitter = event_emitter
+ self._user_agent = user_agent
+ self._response_parser_factory = response_parser_factory
+ self._loader = loader
+ self._exceptions_factory = exceptions_factory
self._config_store = config_store
-
- def get_client_args(self, service_model, region_name, is_secure,
- endpoint_url, verify, credentials, scoped_config,
- client_config, endpoint_bridge):
- final_args = self.compute_client_args(
- service_model, client_config, endpoint_bridge, region_name,
- endpoint_url, is_secure, scoped_config)
-
- service_name = final_args['service_name']
- parameter_validation = final_args['parameter_validation']
- endpoint_config = final_args['endpoint_config']
- protocol = final_args['protocol']
- config_kwargs = final_args['config_kwargs']
- s3_config = final_args['s3_config']
- partition = endpoint_config['metadata'].get('partition', None)
+
+ def get_client_args(self, service_model, region_name, is_secure,
+ endpoint_url, verify, credentials, scoped_config,
+ client_config, endpoint_bridge):
+ final_args = self.compute_client_args(
+ service_model, client_config, endpoint_bridge, region_name,
+ endpoint_url, is_secure, scoped_config)
+
+ service_name = final_args['service_name']
+ parameter_validation = final_args['parameter_validation']
+ endpoint_config = final_args['endpoint_config']
+ protocol = final_args['protocol']
+ config_kwargs = final_args['config_kwargs']
+ s3_config = final_args['s3_config']
+ partition = endpoint_config['metadata'].get('partition', None)
socket_options = final_args['socket_options']
-
- signing_region = endpoint_config['signing_region']
- endpoint_region_name = endpoint_config['region_name']
-
- event_emitter = copy.copy(self._event_emitter)
- signer = RequestSigner(
+
+ signing_region = endpoint_config['signing_region']
+ endpoint_region_name = endpoint_config['region_name']
+
+ event_emitter = copy.copy(self._event_emitter)
+ signer = RequestSigner(
service_model.service_id, signing_region,
- endpoint_config['signing_name'],
- endpoint_config['signature_version'],
+ endpoint_config['signing_name'],
+ endpoint_config['signature_version'],
credentials, event_emitter
)
-
- config_kwargs['s3'] = s3_config
- new_config = Config(**config_kwargs)
- endpoint_creator = EndpointCreator(event_emitter)
-
- endpoint = endpoint_creator.create_endpoint(
- service_model, region_name=endpoint_region_name,
- endpoint_url=endpoint_config['endpoint_url'], verify=verify,
- response_parser_factory=self._response_parser_factory,
- max_pool_connections=new_config.max_pool_connections,
- proxies=new_config.proxies,
+
+ config_kwargs['s3'] = s3_config
+ new_config = Config(**config_kwargs)
+ endpoint_creator = EndpointCreator(event_emitter)
+
+ endpoint = endpoint_creator.create_endpoint(
+ service_model, region_name=endpoint_region_name,
+ endpoint_url=endpoint_config['endpoint_url'], verify=verify,
+ response_parser_factory=self._response_parser_factory,
+ max_pool_connections=new_config.max_pool_connections,
+ proxies=new_config.proxies,
timeout=(new_config.connect_timeout, new_config.read_timeout),
socket_options=socket_options,
client_cert=new_config.client_cert,
proxies_config=new_config.proxies_config)
-
- serializer = botocore.serialize.create_serializer(
- protocol, parameter_validation)
- response_parser = botocore.parsers.create_parser(protocol)
- return {
- 'serializer': serializer,
- 'endpoint': endpoint,
- 'response_parser': response_parser,
- 'event_emitter': event_emitter,
- 'request_signer': signer,
- 'service_model': service_model,
- 'loader': self._loader,
- 'client_config': new_config,
- 'partition': partition,
- 'exceptions_factory': self._exceptions_factory
- }
-
- def compute_client_args(self, service_model, client_config,
- endpoint_bridge, region_name, endpoint_url,
- is_secure, scoped_config):
- service_name = service_model.endpoint_prefix
- protocol = service_model.metadata['protocol']
- parameter_validation = True
- if client_config and not client_config.parameter_validation:
- parameter_validation = False
- elif scoped_config:
+
+ serializer = botocore.serialize.create_serializer(
+ protocol, parameter_validation)
+ response_parser = botocore.parsers.create_parser(protocol)
+ return {
+ 'serializer': serializer,
+ 'endpoint': endpoint,
+ 'response_parser': response_parser,
+ 'event_emitter': event_emitter,
+ 'request_signer': signer,
+ 'service_model': service_model,
+ 'loader': self._loader,
+ 'client_config': new_config,
+ 'partition': partition,
+ 'exceptions_factory': self._exceptions_factory
+ }
+
+ def compute_client_args(self, service_model, client_config,
+ endpoint_bridge, region_name, endpoint_url,
+ is_secure, scoped_config):
+ service_name = service_model.endpoint_prefix
+ protocol = service_model.metadata['protocol']
+ parameter_validation = True
+ if client_config and not client_config.parameter_validation:
+ parameter_validation = False
+ elif scoped_config:
raw_value = scoped_config.get('parameter_validation')
if raw_value is not None:
parameter_validation = botocore.utils.ensure_boolean(raw_value)
-
- # Override the user agent if specified in the client config.
- user_agent = self._user_agent
- if client_config is not None:
- if client_config.user_agent is not None:
- user_agent = client_config.user_agent
- if client_config.user_agent_extra is not None:
- user_agent += ' %s' % client_config.user_agent_extra
-
+
+ # Override the user agent if specified in the client config.
+ user_agent = self._user_agent
+ if client_config is not None:
+ if client_config.user_agent is not None:
+ user_agent = client_config.user_agent
+ if client_config.user_agent_extra is not None:
+ user_agent += ' %s' % client_config.user_agent_extra
+
s3_config = self.compute_s3_config(client_config)
endpoint_config = self._compute_endpoint_config(
service_name=service_name,
@@ -153,56 +153,56 @@ class ClientArgsCreator(object):
endpoint_bridge=endpoint_bridge,
s3_config=s3_config,
)
- # Create a new client config to be passed to the client based
- # on the final values. We do not want the user to be able
- # to try to modify an existing client with a client config.
- config_kwargs = dict(
- region_name=endpoint_config['region_name'],
- signature_version=endpoint_config['signature_version'],
- user_agent=user_agent)
- if client_config is not None:
- config_kwargs.update(
- connect_timeout=client_config.connect_timeout,
- read_timeout=client_config.read_timeout,
- max_pool_connections=client_config.max_pool_connections,
- proxies=client_config.proxies,
+ # Create a new client config to be passed to the client based
+ # on the final values. We do not want the user to be able
+ # to try to modify an existing client with a client config.
+ config_kwargs = dict(
+ region_name=endpoint_config['region_name'],
+ signature_version=endpoint_config['signature_version'],
+ user_agent=user_agent)
+ if client_config is not None:
+ config_kwargs.update(
+ connect_timeout=client_config.connect_timeout,
+ read_timeout=client_config.read_timeout,
+ max_pool_connections=client_config.max_pool_connections,
+ proxies=client_config.proxies,
proxies_config=client_config.proxies_config,
retries=client_config.retries,
client_cert=client_config.client_cert,
inject_host_prefix=client_config.inject_host_prefix,
- )
+ )
self._compute_retry_config(config_kwargs)
s3_config = self.compute_s3_config(client_config)
- return {
- 'service_name': service_name,
- 'parameter_validation': parameter_validation,
- 'user_agent': user_agent,
- 'endpoint_config': endpoint_config,
- 'protocol': protocol,
- 'config_kwargs': config_kwargs,
- 's3_config': s3_config,
+ return {
+ 'service_name': service_name,
+ 'parameter_validation': parameter_validation,
+ 'user_agent': user_agent,
+ 'endpoint_config': endpoint_config,
+ 'protocol': protocol,
+ 'config_kwargs': config_kwargs,
+ 's3_config': s3_config,
'socket_options': self._compute_socket_options(scoped_config)
- }
-
+ }
+
def compute_s3_config(self, client_config):
s3_configuration = self._config_store.get_config_variable('s3')
-
- # Next specific client config values takes precedence over
- # specific values in the scoped config.
- if client_config is not None:
- if client_config.s3 is not None:
- if s3_configuration is None:
- s3_configuration = client_config.s3
- else:
- # The current s3_configuration dictionary may be
- # from a source that only should be read from so
- # we want to be safe and just make a copy of it to modify
- # before it actually gets updated.
- s3_configuration = s3_configuration.copy()
- s3_configuration.update(client_config.s3)
-
- return s3_configuration
-
+
+ # Next specific client config values takes precedence over
+ # specific values in the scoped config.
+ if client_config is not None:
+ if client_config.s3 is not None:
+ if s3_configuration is None:
+ s3_configuration = client_config.s3
+ else:
+ # The current s3_configuration dictionary may be
+ # from a source that only should be read from so
+ # we want to be safe and just make a copy of it to modify
+ # before it actually gets updated.
+ s3_configuration = s3_configuration.copy()
+ s3_configuration.update(client_config.s3)
+
+ return s3_configuration
+
def _compute_endpoint_config(self, service_name, region_name, endpoint_url,
is_secure, endpoint_bridge, s3_config):
resolve_endpoint_kwargs = {
@@ -218,7 +218,7 @@ class ClientArgsCreator(object):
if service_name == 'sts':
return self._compute_sts_endpoint_config(**resolve_endpoint_kwargs)
return self._resolve_endpoint(**resolve_endpoint_kwargs)
-
+
def _compute_s3_endpoint_config(self, s3_config,
**resolve_endpoint_kwargs):
force_s3_global = self._should_force_s3_global(
@@ -255,12 +255,12 @@ class ClientArgsCreator(object):
def _set_region_if_custom_s3_endpoint(self, endpoint_config,
endpoint_bridge):
- # If a user is providing a custom URL, the endpoint resolver will
- # refuse to infer a signing region. If we want to default to s3v4,
- # we have to account for this.
+ # If a user is providing a custom URL, the endpoint resolver will
+ # refuse to infer a signing region. If we want to default to s3v4,
+ # we have to account for this.
if endpoint_config['signing_region'] is None \
and endpoint_config['region_name'] is None:
- endpoint = endpoint_bridge.resolve('s3')
+ endpoint = endpoint_bridge.resolve('s3')
endpoint_config['signing_region'] = endpoint['signing_region']
endpoint_config['region_name'] = endpoint['region_name']
diff --git a/contrib/python/botocore/botocore/auth.py b/contrib/python/botocore/botocore/auth.py
index 6b4f376314..80f8e8bd30 100644
--- a/contrib/python/botocore/botocore/auth.py
+++ b/contrib/python/botocore/botocore/auth.py
@@ -1,54 +1,54 @@
-# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/
-# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-import base64
+# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/
+# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+import base64
import calendar
-import datetime
+import datetime
import functools
from email.utils import formatdate
from hashlib import sha1, sha256
-import hmac
+import hmac
from io import BytesIO
-import logging
-from operator import itemgetter
-import time
-
+import logging
+from operator import itemgetter
+import time
+
from botocore.compat import(
encodebytes, ensure_unicode, HTTPHeaders, json, parse_qs, quote,
six, unquote, urlsplit, urlunsplit, HAS_CRT, MD5_AVAILABLE
)
-from botocore.exceptions import NoCredentialsError
-from botocore.utils import normalize_url_path, percent_encode_sequence
-
-logger = logging.getLogger(__name__)
-
-
-EMPTY_SHA256_HASH = (
- 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855')
-# This is the buffer size used when calculating sha256 checksums.
-# Experimenting with various buffer sizes showed that this value generally
-# gave the best result (in terms of performance).
-PAYLOAD_BUFFER = 1024 * 1024
-ISO8601 = '%Y-%m-%dT%H:%M:%SZ'
-SIGV4_TIMESTAMP = '%Y%m%dT%H%M%SZ'
-SIGNED_HEADERS_BLACKLIST = [
- 'expect',
- 'user-agent',
- 'x-amzn-trace-id',
-]
-UNSIGNED_PAYLOAD = 'UNSIGNED-PAYLOAD'
-
-
+from botocore.exceptions import NoCredentialsError
+from botocore.utils import normalize_url_path, percent_encode_sequence
+
+logger = logging.getLogger(__name__)
+
+
+EMPTY_SHA256_HASH = (
+ 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855')
+# This is the buffer size used when calculating sha256 checksums.
+# Experimenting with various buffer sizes showed that this value generally
+# gave the best result (in terms of performance).
+PAYLOAD_BUFFER = 1024 * 1024
+ISO8601 = '%Y-%m-%dT%H:%M:%SZ'
+SIGV4_TIMESTAMP = '%Y%m%dT%H%M%SZ'
+SIGNED_HEADERS_BLACKLIST = [
+ 'expect',
+ 'user-agent',
+ 'x-amzn-trace-id',
+]
+UNSIGNED_PAYLOAD = 'UNSIGNED-PAYLOAD'
+
+
def _host_from_url(url):
# Given URL, derive value for host header. Ensure that value:
# 1) is lowercase
@@ -79,148 +79,148 @@ def _get_body_as_dict(request):
return data
-class BaseSigner(object):
- REQUIRES_REGION = False
-
- def add_auth(self, request):
- raise NotImplementedError("add_auth")
-
-
-class SigV2Auth(BaseSigner):
- """
- Sign a request with Signature V2.
- """
-
- def __init__(self, credentials):
- self.credentials = credentials
-
- def calc_signature(self, request, params):
- logger.debug("Calculating signature using v2 auth.")
- split = urlsplit(request.url)
- path = split.path
- if len(path) == 0:
- path = '/'
- string_to_sign = '%s\n%s\n%s\n' % (request.method,
- split.netloc,
- path)
- lhmac = hmac.new(self.credentials.secret_key.encode('utf-8'),
- digestmod=sha256)
- pairs = []
- for key in sorted(params):
- # Any previous signature should not be a part of this
- # one, so we skip that particular key. This prevents
- # issues during retries.
- if key == 'Signature':
- continue
- value = six.text_type(params[key])
- pairs.append(quote(key.encode('utf-8'), safe='') + '=' +
- quote(value.encode('utf-8'), safe='-_~'))
- qs = '&'.join(pairs)
- string_to_sign += qs
- logger.debug('String to sign: %s', string_to_sign)
- lhmac.update(string_to_sign.encode('utf-8'))
- b64 = base64.b64encode(lhmac.digest()).strip().decode('utf-8')
- return (qs, b64)
-
- def add_auth(self, request):
- # The auth handler is the last thing called in the
- # preparation phase of a prepared request.
- # Because of this we have to parse the query params
- # from the request body so we can update them with
- # the sigv2 auth params.
- if self.credentials is None:
+class BaseSigner(object):
+ REQUIRES_REGION = False
+
+ def add_auth(self, request):
+ raise NotImplementedError("add_auth")
+
+
+class SigV2Auth(BaseSigner):
+ """
+ Sign a request with Signature V2.
+ """
+
+ def __init__(self, credentials):
+ self.credentials = credentials
+
+ def calc_signature(self, request, params):
+ logger.debug("Calculating signature using v2 auth.")
+ split = urlsplit(request.url)
+ path = split.path
+ if len(path) == 0:
+ path = '/'
+ string_to_sign = '%s\n%s\n%s\n' % (request.method,
+ split.netloc,
+ path)
+ lhmac = hmac.new(self.credentials.secret_key.encode('utf-8'),
+ digestmod=sha256)
+ pairs = []
+ for key in sorted(params):
+ # Any previous signature should not be a part of this
+ # one, so we skip that particular key. This prevents
+ # issues during retries.
+ if key == 'Signature':
+ continue
+ value = six.text_type(params[key])
+ pairs.append(quote(key.encode('utf-8'), safe='') + '=' +
+ quote(value.encode('utf-8'), safe='-_~'))
+ qs = '&'.join(pairs)
+ string_to_sign += qs
+ logger.debug('String to sign: %s', string_to_sign)
+ lhmac.update(string_to_sign.encode('utf-8'))
+ b64 = base64.b64encode(lhmac.digest()).strip().decode('utf-8')
+ return (qs, b64)
+
+ def add_auth(self, request):
+ # The auth handler is the last thing called in the
+ # preparation phase of a prepared request.
+ # Because of this we have to parse the query params
+ # from the request body so we can update them with
+ # the sigv2 auth params.
+ if self.credentials is None:
raise NoCredentialsError()
- if request.data:
- # POST
- params = request.data
- else:
- # GET
- params = request.params
- params['AWSAccessKeyId'] = self.credentials.access_key
- params['SignatureVersion'] = '2'
- params['SignatureMethod'] = 'HmacSHA256'
- params['Timestamp'] = time.strftime(ISO8601, time.gmtime())
- if self.credentials.token:
- params['SecurityToken'] = self.credentials.token
- qs, signature = self.calc_signature(request, params)
- params['Signature'] = signature
- return request
-
-
-class SigV3Auth(BaseSigner):
- def __init__(self, credentials):
- self.credentials = credentials
-
- def add_auth(self, request):
- if self.credentials is None:
+ if request.data:
+ # POST
+ params = request.data
+ else:
+ # GET
+ params = request.params
+ params['AWSAccessKeyId'] = self.credentials.access_key
+ params['SignatureVersion'] = '2'
+ params['SignatureMethod'] = 'HmacSHA256'
+ params['Timestamp'] = time.strftime(ISO8601, time.gmtime())
+ if self.credentials.token:
+ params['SecurityToken'] = self.credentials.token
+ qs, signature = self.calc_signature(request, params)
+ params['Signature'] = signature
+ return request
+
+
+class SigV3Auth(BaseSigner):
+ def __init__(self, credentials):
+ self.credentials = credentials
+
+ def add_auth(self, request):
+ if self.credentials is None:
raise NoCredentialsError()
- if 'Date' in request.headers:
- del request.headers['Date']
- request.headers['Date'] = formatdate(usegmt=True)
- if self.credentials.token:
- if 'X-Amz-Security-Token' in request.headers:
- del request.headers['X-Amz-Security-Token']
- request.headers['X-Amz-Security-Token'] = self.credentials.token
- new_hmac = hmac.new(self.credentials.secret_key.encode('utf-8'),
- digestmod=sha256)
- new_hmac.update(request.headers['Date'].encode('utf-8'))
- encoded_signature = encodebytes(new_hmac.digest()).strip()
- signature = ('AWS3-HTTPS AWSAccessKeyId=%s,Algorithm=%s,Signature=%s' %
- (self.credentials.access_key, 'HmacSHA256',
- encoded_signature.decode('utf-8')))
- if 'X-Amzn-Authorization' in request.headers:
- del request.headers['X-Amzn-Authorization']
- request.headers['X-Amzn-Authorization'] = signature
-
-
-class SigV4Auth(BaseSigner):
- """
- Sign a request with Signature V4.
- """
- REQUIRES_REGION = True
-
- def __init__(self, credentials, service_name, region_name):
- self.credentials = credentials
- # We initialize these value here so the unit tests can have
- # valid values. But these will get overriden in ``add_auth``
- # later for real requests.
- self._region_name = region_name
- self._service_name = service_name
-
- def _sign(self, key, msg, hex=False):
- if hex:
- sig = hmac.new(key, msg.encode('utf-8'), sha256).hexdigest()
- else:
- sig = hmac.new(key, msg.encode('utf-8'), sha256).digest()
- return sig
-
- def headers_to_sign(self, request):
- """
- Select the headers from the request that need to be included
- in the StringToSign.
- """
- header_map = HTTPHeaders()
- for name, value in request.headers.items():
- lname = name.lower()
- if lname not in SIGNED_HEADERS_BLACKLIST:
- header_map[lname] = value
- if 'host' not in header_map:
+ if 'Date' in request.headers:
+ del request.headers['Date']
+ request.headers['Date'] = formatdate(usegmt=True)
+ if self.credentials.token:
+ if 'X-Amz-Security-Token' in request.headers:
+ del request.headers['X-Amz-Security-Token']
+ request.headers['X-Amz-Security-Token'] = self.credentials.token
+ new_hmac = hmac.new(self.credentials.secret_key.encode('utf-8'),
+ digestmod=sha256)
+ new_hmac.update(request.headers['Date'].encode('utf-8'))
+ encoded_signature = encodebytes(new_hmac.digest()).strip()
+ signature = ('AWS3-HTTPS AWSAccessKeyId=%s,Algorithm=%s,Signature=%s' %
+ (self.credentials.access_key, 'HmacSHA256',
+ encoded_signature.decode('utf-8')))
+ if 'X-Amzn-Authorization' in request.headers:
+ del request.headers['X-Amzn-Authorization']
+ request.headers['X-Amzn-Authorization'] = signature
+
+
+class SigV4Auth(BaseSigner):
+ """
+ Sign a request with Signature V4.
+ """
+ REQUIRES_REGION = True
+
+ def __init__(self, credentials, service_name, region_name):
+ self.credentials = credentials
+ # We initialize these value here so the unit tests can have
+ # valid values. But these will get overriden in ``add_auth``
+ # later for real requests.
+ self._region_name = region_name
+ self._service_name = service_name
+
+ def _sign(self, key, msg, hex=False):
+ if hex:
+ sig = hmac.new(key, msg.encode('utf-8'), sha256).hexdigest()
+ else:
+ sig = hmac.new(key, msg.encode('utf-8'), sha256).digest()
+ return sig
+
+ def headers_to_sign(self, request):
+ """
+ Select the headers from the request that need to be included
+ in the StringToSign.
+ """
+ header_map = HTTPHeaders()
+ for name, value in request.headers.items():
+ lname = name.lower()
+ if lname not in SIGNED_HEADERS_BLACKLIST:
+ header_map[lname] = value
+ if 'host' not in header_map:
# TODO: We should set the host ourselves, instead of relying on our
# HTTP client to set it for us.
header_map['host'] = _host_from_url(request.url)
- return header_map
-
- def canonical_query_string(self, request):
- # The query string can come from two parts. One is the
- # params attribute of the request. The other is from the request
- # url (in which case we have to re-split the url into its components
- # and parse out the query string component).
- if request.params:
- return self._canonical_query_string_params(request.params)
- else:
- return self._canonical_query_string_url(urlsplit(request.url))
-
- def _canonical_query_string_params(self, params):
+ return header_map
+
+ def canonical_query_string(self, request):
+ # The query string can come from two parts. One is the
+ # params attribute of the request. The other is from the request
+ # url (in which case we have to re-split the url into its components
+ # and parse out the query string component).
+ if request.params:
+ return self._canonical_query_string_params(request.params)
+ else:
+ return self._canonical_query_string_url(urlsplit(request.url))
+
+ def _canonical_query_string_params(self, params):
# [(key, value), (key2, value2)]
key_val_pairs = []
for key in params:
@@ -234,255 +234,255 @@ class SigV4Auth(BaseSigner):
sorted_key_vals.append('%s=%s' % (key, value))
canonical_query_string = '&'.join(sorted_key_vals)
return canonical_query_string
-
- def _canonical_query_string_url(self, parts):
- canonical_query_string = ''
- if parts.query:
- # [(key, value), (key2, value2)]
- key_val_pairs = []
- for pair in parts.query.split('&'):
- key, _, value = pair.partition('=')
- key_val_pairs.append((key, value))
- sorted_key_vals = []
+
+ def _canonical_query_string_url(self, parts):
+ canonical_query_string = ''
+ if parts.query:
+ # [(key, value), (key2, value2)]
+ key_val_pairs = []
+ for pair in parts.query.split('&'):
+ key, _, value = pair.partition('=')
+ key_val_pairs.append((key, value))
+ sorted_key_vals = []
# Sort by the URI-encoded key names, and in the case of
- # repeated keys, then sort by the value.
- for key, value in sorted(key_val_pairs):
- sorted_key_vals.append('%s=%s' % (key, value))
- canonical_query_string = '&'.join(sorted_key_vals)
- return canonical_query_string
-
- def canonical_headers(self, headers_to_sign):
- """
- Return the headers that need to be included in the StringToSign
- in their canonical form by converting all header keys to lower
- case, sorting them in alphabetical order and then joining
- them into a string, separated by newlines.
- """
- headers = []
- sorted_header_names = sorted(set(headers_to_sign))
- for key in sorted_header_names:
- value = ','.join(self._header_value(v) for v in
+ # repeated keys, then sort by the value.
+ for key, value in sorted(key_val_pairs):
+ sorted_key_vals.append('%s=%s' % (key, value))
+ canonical_query_string = '&'.join(sorted_key_vals)
+ return canonical_query_string
+
+ def canonical_headers(self, headers_to_sign):
+ """
+ Return the headers that need to be included in the StringToSign
+ in their canonical form by converting all header keys to lower
+ case, sorting them in alphabetical order and then joining
+ them into a string, separated by newlines.
+ """
+ headers = []
+ sorted_header_names = sorted(set(headers_to_sign))
+ for key in sorted_header_names:
+ value = ','.join(self._header_value(v) for v in
headers_to_sign.get_all(key))
- headers.append('%s:%s' % (key, ensure_unicode(value)))
- return '\n'.join(headers)
-
- def _header_value(self, value):
- # From the sigv4 docs:
- # Lowercase(HeaderName) + ':' + Trimall(HeaderValue)
- #
- # The Trimall function removes excess white space before and after
- # values, and converts sequential spaces to a single space.
- return ' '.join(value.split())
-
- def signed_headers(self, headers_to_sign):
- l = ['%s' % n.lower().strip() for n in set(headers_to_sign)]
- l = sorted(l)
- return ';'.join(l)
-
- def payload(self, request):
- if not self._should_sha256_sign_payload(request):
- # When payload signing is disabled, we use this static string in
- # place of the payload checksum.
- return UNSIGNED_PAYLOAD
+ headers.append('%s:%s' % (key, ensure_unicode(value)))
+ return '\n'.join(headers)
+
+ def _header_value(self, value):
+ # From the sigv4 docs:
+ # Lowercase(HeaderName) + ':' + Trimall(HeaderValue)
+ #
+ # The Trimall function removes excess white space before and after
+ # values, and converts sequential spaces to a single space.
+ return ' '.join(value.split())
+
+ def signed_headers(self, headers_to_sign):
+ l = ['%s' % n.lower().strip() for n in set(headers_to_sign)]
+ l = sorted(l)
+ return ';'.join(l)
+
+ def payload(self, request):
+ if not self._should_sha256_sign_payload(request):
+ # When payload signing is disabled, we use this static string in
+ # place of the payload checksum.
+ return UNSIGNED_PAYLOAD
request_body = request.body
if request_body and hasattr(request_body, 'seek'):
position = request_body.tell()
read_chunksize = functools.partial(request_body.read,
- PAYLOAD_BUFFER)
- checksum = sha256()
- for chunk in iter(read_chunksize, b''):
- checksum.update(chunk)
- hex_checksum = checksum.hexdigest()
+ PAYLOAD_BUFFER)
+ checksum = sha256()
+ for chunk in iter(read_chunksize, b''):
+ checksum.update(chunk)
+ hex_checksum = checksum.hexdigest()
request_body.seek(position)
- return hex_checksum
+ return hex_checksum
elif request_body:
- # The request serialization has ensured that
- # request.body is a bytes() type.
+ # The request serialization has ensured that
+ # request.body is a bytes() type.
return sha256(request_body).hexdigest()
- else:
- return EMPTY_SHA256_HASH
-
- def _should_sha256_sign_payload(self, request):
- # Payloads will always be signed over insecure connections.
- if not request.url.startswith('https'):
- return True
-
- # Certain operations may have payload signing disabled by default.
- # Since we don't have access to the operation model, we pass in this
- # bit of metadata through the request context.
- return request.context.get('payload_signing_enabled', True)
-
- def canonical_request(self, request):
- cr = [request.method.upper()]
- path = self._normalize_url_path(urlsplit(request.url).path)
- cr.append(path)
- cr.append(self.canonical_query_string(request))
- headers_to_sign = self.headers_to_sign(request)
- cr.append(self.canonical_headers(headers_to_sign) + '\n')
- cr.append(self.signed_headers(headers_to_sign))
- if 'X-Amz-Content-SHA256' in request.headers:
- body_checksum = request.headers['X-Amz-Content-SHA256']
- else:
- body_checksum = self.payload(request)
- cr.append(body_checksum)
- return '\n'.join(cr)
-
- def _normalize_url_path(self, path):
- normalized_path = quote(normalize_url_path(path), safe='/~')
- return normalized_path
-
- def scope(self, request):
- scope = [self.credentials.access_key]
- scope.append(request.context['timestamp'][0:8])
- scope.append(self._region_name)
- scope.append(self._service_name)
- scope.append('aws4_request')
- return '/'.join(scope)
-
- def credential_scope(self, request):
- scope = []
- scope.append(request.context['timestamp'][0:8])
- scope.append(self._region_name)
- scope.append(self._service_name)
- scope.append('aws4_request')
- return '/'.join(scope)
-
- def string_to_sign(self, request, canonical_request):
- """
- Return the canonical StringToSign as well as a dict
- containing the original version of all headers that
- were included in the StringToSign.
- """
- sts = ['AWS4-HMAC-SHA256']
- sts.append(request.context['timestamp'])
- sts.append(self.credential_scope(request))
- sts.append(sha256(canonical_request.encode('utf-8')).hexdigest())
- return '\n'.join(sts)
-
- def signature(self, string_to_sign, request):
- key = self.credentials.secret_key
- k_date = self._sign(('AWS4' + key).encode('utf-8'),
- request.context['timestamp'][0:8])
- k_region = self._sign(k_date, self._region_name)
- k_service = self._sign(k_region, self._service_name)
- k_signing = self._sign(k_service, 'aws4_request')
- return self._sign(k_signing, string_to_sign, hex=True)
-
- def add_auth(self, request):
- if self.credentials is None:
+ else:
+ return EMPTY_SHA256_HASH
+
+ def _should_sha256_sign_payload(self, request):
+ # Payloads will always be signed over insecure connections.
+ if not request.url.startswith('https'):
+ return True
+
+ # Certain operations may have payload signing disabled by default.
+ # Since we don't have access to the operation model, we pass in this
+ # bit of metadata through the request context.
+ return request.context.get('payload_signing_enabled', True)
+
+ def canonical_request(self, request):
+ cr = [request.method.upper()]
+ path = self._normalize_url_path(urlsplit(request.url).path)
+ cr.append(path)
+ cr.append(self.canonical_query_string(request))
+ headers_to_sign = self.headers_to_sign(request)
+ cr.append(self.canonical_headers(headers_to_sign) + '\n')
+ cr.append(self.signed_headers(headers_to_sign))
+ if 'X-Amz-Content-SHA256' in request.headers:
+ body_checksum = request.headers['X-Amz-Content-SHA256']
+ else:
+ body_checksum = self.payload(request)
+ cr.append(body_checksum)
+ return '\n'.join(cr)
+
+ def _normalize_url_path(self, path):
+ normalized_path = quote(normalize_url_path(path), safe='/~')
+ return normalized_path
+
+ def scope(self, request):
+ scope = [self.credentials.access_key]
+ scope.append(request.context['timestamp'][0:8])
+ scope.append(self._region_name)
+ scope.append(self._service_name)
+ scope.append('aws4_request')
+ return '/'.join(scope)
+
+ def credential_scope(self, request):
+ scope = []
+ scope.append(request.context['timestamp'][0:8])
+ scope.append(self._region_name)
+ scope.append(self._service_name)
+ scope.append('aws4_request')
+ return '/'.join(scope)
+
+ def string_to_sign(self, request, canonical_request):
+ """
+ Return the canonical StringToSign as well as a dict
+ containing the original version of all headers that
+ were included in the StringToSign.
+ """
+ sts = ['AWS4-HMAC-SHA256']
+ sts.append(request.context['timestamp'])
+ sts.append(self.credential_scope(request))
+ sts.append(sha256(canonical_request.encode('utf-8')).hexdigest())
+ return '\n'.join(sts)
+
+ def signature(self, string_to_sign, request):
+ key = self.credentials.secret_key
+ k_date = self._sign(('AWS4' + key).encode('utf-8'),
+ request.context['timestamp'][0:8])
+ k_region = self._sign(k_date, self._region_name)
+ k_service = self._sign(k_region, self._service_name)
+ k_signing = self._sign(k_service, 'aws4_request')
+ return self._sign(k_signing, string_to_sign, hex=True)
+
+ def add_auth(self, request):
+ if self.credentials is None:
raise NoCredentialsError()
- datetime_now = datetime.datetime.utcnow()
- request.context['timestamp'] = datetime_now.strftime(SIGV4_TIMESTAMP)
- # This could be a retry. Make sure the previous
- # authorization header is removed first.
- self._modify_request_before_signing(request)
- canonical_request = self.canonical_request(request)
- logger.debug("Calculating signature using v4 auth.")
- logger.debug('CanonicalRequest:\n%s', canonical_request)
- string_to_sign = self.string_to_sign(request, canonical_request)
- logger.debug('StringToSign:\n%s', string_to_sign)
- signature = self.signature(string_to_sign, request)
- logger.debug('Signature:\n%s', signature)
-
- self._inject_signature_to_request(request, signature)
-
- def _inject_signature_to_request(self, request, signature):
- l = ['AWS4-HMAC-SHA256 Credential=%s' % self.scope(request)]
- headers_to_sign = self.headers_to_sign(request)
- l.append('SignedHeaders=%s' % self.signed_headers(headers_to_sign))
- l.append('Signature=%s' % signature)
- request.headers['Authorization'] = ', '.join(l)
- return request
-
- def _modify_request_before_signing(self, request):
- if 'Authorization' in request.headers:
- del request.headers['Authorization']
- self._set_necessary_date_headers(request)
- if self.credentials.token:
- if 'X-Amz-Security-Token' in request.headers:
- del request.headers['X-Amz-Security-Token']
- request.headers['X-Amz-Security-Token'] = self.credentials.token
-
- if not request.context.get('payload_signing_enabled', True):
- if 'X-Amz-Content-SHA256' in request.headers:
- del request.headers['X-Amz-Content-SHA256']
- request.headers['X-Amz-Content-SHA256'] = UNSIGNED_PAYLOAD
-
- def _set_necessary_date_headers(self, request):
- # The spec allows for either the Date _or_ the X-Amz-Date value to be
- # used so we check both. If there's a Date header, we use the date
- # header. Otherwise we use the X-Amz-Date header.
- if 'Date' in request.headers:
- del request.headers['Date']
- datetime_timestamp = datetime.datetime.strptime(
- request.context['timestamp'], SIGV4_TIMESTAMP)
- request.headers['Date'] = formatdate(
- int(calendar.timegm(datetime_timestamp.timetuple())))
- if 'X-Amz-Date' in request.headers:
- del request.headers['X-Amz-Date']
- else:
- if 'X-Amz-Date' in request.headers:
- del request.headers['X-Amz-Date']
- request.headers['X-Amz-Date'] = request.context['timestamp']
-
-
-class S3SigV4Auth(SigV4Auth):
- def _modify_request_before_signing(self, request):
- super(S3SigV4Auth, self)._modify_request_before_signing(request)
- if 'X-Amz-Content-SHA256' in request.headers:
- del request.headers['X-Amz-Content-SHA256']
-
- request.headers['X-Amz-Content-SHA256'] = self.payload(request)
-
- def _should_sha256_sign_payload(self, request):
- # S3 allows optional body signing, so to minimize the performance
- # impact, we opt to not SHA256 sign the body on streaming uploads,
- # provided that we're on https.
- client_config = request.context.get('client_config')
- s3_config = getattr(client_config, 's3', None)
-
- # The config could be None if it isn't set, or if the customer sets it
- # to None.
- if s3_config is None:
- s3_config = {}
-
- # The explicit configuration takes precedence over any implicit
- # configuration.
- sign_payload = s3_config.get('payload_signing_enabled', None)
- if sign_payload is not None:
- return sign_payload
-
- # We require that both content-md5 be present and https be enabled
- # to implicitly disable body signing. The combination of TLS and
- # content-md5 is sufficiently secure and durable for us to be
- # confident in the request without body signing.
- if not request.url.startswith('https') or \
- 'Content-MD5' not in request.headers:
- return True
-
- # If the input is streaming we disable body signing by default.
- if request.context.get('has_streaming_input', False):
- return False
-
- # If the S3-specific checks had no results, delegate to the generic
- # checks.
- return super(S3SigV4Auth, self)._should_sha256_sign_payload(request)
-
- def _normalize_url_path(self, path):
- # For S3, we do not normalize the path.
- return path
-
-
-class SigV4QueryAuth(SigV4Auth):
- DEFAULT_EXPIRES = 3600
-
- def __init__(self, credentials, service_name, region_name,
- expires=DEFAULT_EXPIRES):
- super(SigV4QueryAuth, self).__init__(credentials, service_name,
- region_name)
- self._expires = expires
-
- def _modify_request_before_signing(self, request):
+ datetime_now = datetime.datetime.utcnow()
+ request.context['timestamp'] = datetime_now.strftime(SIGV4_TIMESTAMP)
+ # This could be a retry. Make sure the previous
+ # authorization header is removed first.
+ self._modify_request_before_signing(request)
+ canonical_request = self.canonical_request(request)
+ logger.debug("Calculating signature using v4 auth.")
+ logger.debug('CanonicalRequest:\n%s', canonical_request)
+ string_to_sign = self.string_to_sign(request, canonical_request)
+ logger.debug('StringToSign:\n%s', string_to_sign)
+ signature = self.signature(string_to_sign, request)
+ logger.debug('Signature:\n%s', signature)
+
+ self._inject_signature_to_request(request, signature)
+
+ def _inject_signature_to_request(self, request, signature):
+ l = ['AWS4-HMAC-SHA256 Credential=%s' % self.scope(request)]
+ headers_to_sign = self.headers_to_sign(request)
+ l.append('SignedHeaders=%s' % self.signed_headers(headers_to_sign))
+ l.append('Signature=%s' % signature)
+ request.headers['Authorization'] = ', '.join(l)
+ return request
+
+ def _modify_request_before_signing(self, request):
+ if 'Authorization' in request.headers:
+ del request.headers['Authorization']
+ self._set_necessary_date_headers(request)
+ if self.credentials.token:
+ if 'X-Amz-Security-Token' in request.headers:
+ del request.headers['X-Amz-Security-Token']
+ request.headers['X-Amz-Security-Token'] = self.credentials.token
+
+ if not request.context.get('payload_signing_enabled', True):
+ if 'X-Amz-Content-SHA256' in request.headers:
+ del request.headers['X-Amz-Content-SHA256']
+ request.headers['X-Amz-Content-SHA256'] = UNSIGNED_PAYLOAD
+
+ def _set_necessary_date_headers(self, request):
+ # The spec allows for either the Date _or_ the X-Amz-Date value to be
+ # used so we check both. If there's a Date header, we use the date
+ # header. Otherwise we use the X-Amz-Date header.
+ if 'Date' in request.headers:
+ del request.headers['Date']
+ datetime_timestamp = datetime.datetime.strptime(
+ request.context['timestamp'], SIGV4_TIMESTAMP)
+ request.headers['Date'] = formatdate(
+ int(calendar.timegm(datetime_timestamp.timetuple())))
+ if 'X-Amz-Date' in request.headers:
+ del request.headers['X-Amz-Date']
+ else:
+ if 'X-Amz-Date' in request.headers:
+ del request.headers['X-Amz-Date']
+ request.headers['X-Amz-Date'] = request.context['timestamp']
+
+
+class S3SigV4Auth(SigV4Auth):
+ def _modify_request_before_signing(self, request):
+ super(S3SigV4Auth, self)._modify_request_before_signing(request)
+ if 'X-Amz-Content-SHA256' in request.headers:
+ del request.headers['X-Amz-Content-SHA256']
+
+ request.headers['X-Amz-Content-SHA256'] = self.payload(request)
+
+ def _should_sha256_sign_payload(self, request):
+ # S3 allows optional body signing, so to minimize the performance
+ # impact, we opt to not SHA256 sign the body on streaming uploads,
+ # provided that we're on https.
+ client_config = request.context.get('client_config')
+ s3_config = getattr(client_config, 's3', None)
+
+ # The config could be None if it isn't set, or if the customer sets it
+ # to None.
+ if s3_config is None:
+ s3_config = {}
+
+ # The explicit configuration takes precedence over any implicit
+ # configuration.
+ sign_payload = s3_config.get('payload_signing_enabled', None)
+ if sign_payload is not None:
+ return sign_payload
+
+ # We require that both content-md5 be present and https be enabled
+ # to implicitly disable body signing. The combination of TLS and
+ # content-md5 is sufficiently secure and durable for us to be
+ # confident in the request without body signing.
+ if not request.url.startswith('https') or \
+ 'Content-MD5' not in request.headers:
+ return True
+
+ # If the input is streaming we disable body signing by default.
+ if request.context.get('has_streaming_input', False):
+ return False
+
+ # If the S3-specific checks had no results, delegate to the generic
+ # checks.
+ return super(S3SigV4Auth, self)._should_sha256_sign_payload(request)
+
+ def _normalize_url_path(self, path):
+ # For S3, we do not normalize the path.
+ return path
+
+
+class SigV4QueryAuth(SigV4Auth):
+ DEFAULT_EXPIRES = 3600
+
+ def __init__(self, credentials, service_name, region_name,
+ expires=DEFAULT_EXPIRES):
+ super(SigV4QueryAuth, self).__init__(credentials, service_name,
+ region_name)
+ self._expires = expires
+
+ def _modify_request_before_signing(self, request):
# We automatically set this header, so if it's the auto-set value we
# want to get rid of it since it doesn't make sense for presigned urls.
content_type = request.headers.get('content-type')
@@ -492,369 +492,369 @@ class SigV4QueryAuth(SigV4Auth):
if content_type == blacklisted_content_type:
del request.headers['content-type']
- # Note that we're not including X-Amz-Signature.
- # From the docs: "The Canonical Query String must include all the query
- # parameters from the preceding table except for X-Amz-Signature.
- signed_headers = self.signed_headers(self.headers_to_sign(request))
-
- auth_params = {
- 'X-Amz-Algorithm': 'AWS4-HMAC-SHA256',
- 'X-Amz-Credential': self.scope(request),
- 'X-Amz-Date': request.context['timestamp'],
- 'X-Amz-Expires': self._expires,
- 'X-Amz-SignedHeaders': signed_headers,
- }
- if self.credentials.token is not None:
- auth_params['X-Amz-Security-Token'] = self.credentials.token
- # Now parse the original query string to a dict, inject our new query
- # params, and serialize back to a query string.
- url_parts = urlsplit(request.url)
- # parse_qs makes each value a list, but in our case we know we won't
- # have repeated keys so we know we have single element lists which we
- # can convert back to scalar values.
- query_dict = dict(
- [(k, v[0]) for k, v in
- parse_qs(url_parts.query, keep_blank_values=True).items()])
- # The spec is particular about this. It *has* to be:
- # https://<endpoint>?<operation params>&<auth params>
- # You can't mix the two types of params together, i.e just keep doing
- # new_query_params.update(op_params)
- # new_query_params.update(auth_params)
- # percent_encode_sequence(new_query_params)
- operation_params = ''
- if request.data:
- # We also need to move the body params into the query string. To
- # do this, we first have to convert it to a dict.
+ # Note that we're not including X-Amz-Signature.
+ # From the docs: "The Canonical Query String must include all the query
+ # parameters from the preceding table except for X-Amz-Signature.
+ signed_headers = self.signed_headers(self.headers_to_sign(request))
+
+ auth_params = {
+ 'X-Amz-Algorithm': 'AWS4-HMAC-SHA256',
+ 'X-Amz-Credential': self.scope(request),
+ 'X-Amz-Date': request.context['timestamp'],
+ 'X-Amz-Expires': self._expires,
+ 'X-Amz-SignedHeaders': signed_headers,
+ }
+ if self.credentials.token is not None:
+ auth_params['X-Amz-Security-Token'] = self.credentials.token
+ # Now parse the original query string to a dict, inject our new query
+ # params, and serialize back to a query string.
+ url_parts = urlsplit(request.url)
+ # parse_qs makes each value a list, but in our case we know we won't
+ # have repeated keys so we know we have single element lists which we
+ # can convert back to scalar values.
+ query_dict = dict(
+ [(k, v[0]) for k, v in
+ parse_qs(url_parts.query, keep_blank_values=True).items()])
+ # The spec is particular about this. It *has* to be:
+ # https://<endpoint>?<operation params>&<auth params>
+ # You can't mix the two types of params together, i.e just keep doing
+ # new_query_params.update(op_params)
+ # new_query_params.update(auth_params)
+ # percent_encode_sequence(new_query_params)
+ operation_params = ''
+ if request.data:
+ # We also need to move the body params into the query string. To
+ # do this, we first have to convert it to a dict.
query_dict.update(_get_body_as_dict(request))
- request.data = ''
- if query_dict:
- operation_params = percent_encode_sequence(query_dict) + '&'
- new_query_string = (operation_params +
- percent_encode_sequence(auth_params))
- # url_parts is a tuple (and therefore immutable) so we need to create
- # a new url_parts with the new query string.
- # <part> - <index>
- # scheme - 0
- # netloc - 1
- # path - 2
- # query - 3 <-- we're replacing this.
- # fragment - 4
- p = url_parts
- new_url_parts = (p[0], p[1], p[2], new_query_string, p[4])
- request.url = urlunsplit(new_url_parts)
-
- def _inject_signature_to_request(self, request, signature):
- # Rather than calculating an "Authorization" header, for the query
- # param quth, we just append an 'X-Amz-Signature' param to the end
- # of the query string.
- request.url += '&X-Amz-Signature=%s' % signature
-
-
-class S3SigV4QueryAuth(SigV4QueryAuth):
- """S3 SigV4 auth using query parameters.
-
- This signer will sign a request using query parameters and signature
- version 4, i.e a "presigned url" signer.
-
- Based off of:
-
- http://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-query-string-auth.html
-
- """
- def _normalize_url_path(self, path):
- # For S3, we do not normalize the path.
- return path
-
- def payload(self, request):
- # From the doc link above:
- # "You don't include a payload hash in the Canonical Request, because
- # when you create a presigned URL, you don't know anything about the
- # payload. Instead, you use a constant string "UNSIGNED-PAYLOAD".
- return UNSIGNED_PAYLOAD
-
-
-class S3SigV4PostAuth(SigV4Auth):
- """
- Presigns a s3 post
-
- Implementation doc here:
- http://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-UsingHTTPPOST.html
- """
- def add_auth(self, request):
- datetime_now = datetime.datetime.utcnow()
- request.context['timestamp'] = datetime_now.strftime(SIGV4_TIMESTAMP)
-
- fields = {}
- if request.context.get('s3-presign-post-fields', None) is not None:
- fields = request.context['s3-presign-post-fields']
-
- policy = {}
- conditions = []
- if request.context.get('s3-presign-post-policy', None) is not None:
- policy = request.context['s3-presign-post-policy']
- if policy.get('conditions', None) is not None:
- conditions = policy['conditions']
-
- policy['conditions'] = conditions
-
- fields['x-amz-algorithm'] = 'AWS4-HMAC-SHA256'
- fields['x-amz-credential'] = self.scope(request)
- fields['x-amz-date'] = request.context['timestamp']
-
- conditions.append({'x-amz-algorithm': 'AWS4-HMAC-SHA256'})
- conditions.append({'x-amz-credential': self.scope(request)})
- conditions.append({'x-amz-date': request.context['timestamp']})
-
- if self.credentials.token is not None:
- fields['x-amz-security-token'] = self.credentials.token
- conditions.append({'x-amz-security-token': self.credentials.token})
-
- # Dump the base64 encoded policy into the fields dictionary.
- fields['policy'] = base64.b64encode(
- json.dumps(policy).encode('utf-8')).decode('utf-8')
-
- fields['x-amz-signature'] = self.signature(fields['policy'], request)
-
- request.context['s3-presign-post-fields'] = fields
- request.context['s3-presign-post-policy'] = policy
-
-
-class HmacV1Auth(BaseSigner):
-
- # List of Query String Arguments of Interest
- QSAOfInterest = ['accelerate', 'acl', 'cors', 'defaultObjectAcl',
- 'location', 'logging', 'partNumber', 'policy',
- 'requestPayment', 'torrent',
- 'versioning', 'versionId', 'versions', 'website',
- 'uploads', 'uploadId', 'response-content-type',
- 'response-content-language', 'response-expires',
- 'response-cache-control', 'response-content-disposition',
- 'response-content-encoding', 'delete', 'lifecycle',
- 'tagging', 'restore', 'storageClass', 'notification',
- 'replication', 'requestPayment', 'analytics', 'metrics',
+ request.data = ''
+ if query_dict:
+ operation_params = percent_encode_sequence(query_dict) + '&'
+ new_query_string = (operation_params +
+ percent_encode_sequence(auth_params))
+ # url_parts is a tuple (and therefore immutable) so we need to create
+ # a new url_parts with the new query string.
+ # <part> - <index>
+ # scheme - 0
+ # netloc - 1
+ # path - 2
+ # query - 3 <-- we're replacing this.
+ # fragment - 4
+ p = url_parts
+ new_url_parts = (p[0], p[1], p[2], new_query_string, p[4])
+ request.url = urlunsplit(new_url_parts)
+
+ def _inject_signature_to_request(self, request, signature):
+ # Rather than calculating an "Authorization" header, for the query
+ # param quth, we just append an 'X-Amz-Signature' param to the end
+ # of the query string.
+ request.url += '&X-Amz-Signature=%s' % signature
+
+
+class S3SigV4QueryAuth(SigV4QueryAuth):
+ """S3 SigV4 auth using query parameters.
+
+ This signer will sign a request using query parameters and signature
+ version 4, i.e a "presigned url" signer.
+
+ Based off of:
+
+ http://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-query-string-auth.html
+
+ """
+ def _normalize_url_path(self, path):
+ # For S3, we do not normalize the path.
+ return path
+
+ def payload(self, request):
+ # From the doc link above:
+ # "You don't include a payload hash in the Canonical Request, because
+ # when you create a presigned URL, you don't know anything about the
+ # payload. Instead, you use a constant string "UNSIGNED-PAYLOAD".
+ return UNSIGNED_PAYLOAD
+
+
+class S3SigV4PostAuth(SigV4Auth):
+ """
+ Presigns a s3 post
+
+ Implementation doc here:
+ http://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-UsingHTTPPOST.html
+ """
+ def add_auth(self, request):
+ datetime_now = datetime.datetime.utcnow()
+ request.context['timestamp'] = datetime_now.strftime(SIGV4_TIMESTAMP)
+
+ fields = {}
+ if request.context.get('s3-presign-post-fields', None) is not None:
+ fields = request.context['s3-presign-post-fields']
+
+ policy = {}
+ conditions = []
+ if request.context.get('s3-presign-post-policy', None) is not None:
+ policy = request.context['s3-presign-post-policy']
+ if policy.get('conditions', None) is not None:
+ conditions = policy['conditions']
+
+ policy['conditions'] = conditions
+
+ fields['x-amz-algorithm'] = 'AWS4-HMAC-SHA256'
+ fields['x-amz-credential'] = self.scope(request)
+ fields['x-amz-date'] = request.context['timestamp']
+
+ conditions.append({'x-amz-algorithm': 'AWS4-HMAC-SHA256'})
+ conditions.append({'x-amz-credential': self.scope(request)})
+ conditions.append({'x-amz-date': request.context['timestamp']})
+
+ if self.credentials.token is not None:
+ fields['x-amz-security-token'] = self.credentials.token
+ conditions.append({'x-amz-security-token': self.credentials.token})
+
+ # Dump the base64 encoded policy into the fields dictionary.
+ fields['policy'] = base64.b64encode(
+ json.dumps(policy).encode('utf-8')).decode('utf-8')
+
+ fields['x-amz-signature'] = self.signature(fields['policy'], request)
+
+ request.context['s3-presign-post-fields'] = fields
+ request.context['s3-presign-post-policy'] = policy
+
+
+class HmacV1Auth(BaseSigner):
+
+ # List of Query String Arguments of Interest
+ QSAOfInterest = ['accelerate', 'acl', 'cors', 'defaultObjectAcl',
+ 'location', 'logging', 'partNumber', 'policy',
+ 'requestPayment', 'torrent',
+ 'versioning', 'versionId', 'versions', 'website',
+ 'uploads', 'uploadId', 'response-content-type',
+ 'response-content-language', 'response-expires',
+ 'response-cache-control', 'response-content-disposition',
+ 'response-content-encoding', 'delete', 'lifecycle',
+ 'tagging', 'restore', 'storageClass', 'notification',
+ 'replication', 'requestPayment', 'analytics', 'metrics',
'inventory', 'select', 'select-type']
-
- def __init__(self, credentials, service_name=None, region_name=None):
- self.credentials = credentials
-
- def sign_string(self, string_to_sign):
- new_hmac = hmac.new(self.credentials.secret_key.encode('utf-8'),
- digestmod=sha1)
- new_hmac.update(string_to_sign.encode('utf-8'))
- return encodebytes(new_hmac.digest()).strip().decode('utf-8')
-
- def canonical_standard_headers(self, headers):
- interesting_headers = ['content-md5', 'content-type', 'date']
- hoi = []
- if 'Date' in headers:
- del headers['Date']
- headers['Date'] = self._get_date()
- for ih in interesting_headers:
- found = False
- for key in headers:
- lk = key.lower()
- if headers[key] is not None and lk == ih:
- hoi.append(headers[key].strip())
- found = True
- if not found:
- hoi.append('')
- return '\n'.join(hoi)
-
- def canonical_custom_headers(self, headers):
- hoi = []
- custom_headers = {}
- for key in headers:
- lk = key.lower()
- if headers[key] is not None:
- if lk.startswith('x-amz-'):
- custom_headers[lk] = ','.join(v.strip() for v in
- headers.get_all(key))
- sorted_header_keys = sorted(custom_headers.keys())
- for key in sorted_header_keys:
- hoi.append("%s:%s" % (key, custom_headers[key]))
- return '\n'.join(hoi)
-
- def unquote_v(self, nv):
- """
- TODO: Do we need this?
- """
- if len(nv) == 1:
- return nv
- else:
- return (nv[0], unquote(nv[1]))
-
- def canonical_resource(self, split, auth_path=None):
- # don't include anything after the first ? in the resource...
- # unless it is one of the QSA of interest, defined above
- # NOTE:
- # The path in the canonical resource should always be the
- # full path including the bucket name, even for virtual-hosting
- # style addressing. The ``auth_path`` keeps track of the full
- # path for the canonical resource and would be passed in if
- # the client was using virtual-hosting style.
- if auth_path is not None:
- buf = auth_path
- else:
- buf = split.path
- if split.query:
- qsa = split.query.split('&')
- qsa = [a.split('=', 1) for a in qsa]
- qsa = [self.unquote_v(a) for a in qsa
- if a[0] in self.QSAOfInterest]
- if len(qsa) > 0:
- qsa.sort(key=itemgetter(0))
- qsa = ['='.join(a) for a in qsa]
- buf += '?'
- buf += '&'.join(qsa)
- return buf
-
- def canonical_string(self, method, split, headers, expires=None,
- auth_path=None):
- cs = method.upper() + '\n'
- cs += self.canonical_standard_headers(headers) + '\n'
- custom_headers = self.canonical_custom_headers(headers)
- if custom_headers:
- cs += custom_headers + '\n'
- cs += self.canonical_resource(split, auth_path=auth_path)
- return cs
-
- def get_signature(self, method, split, headers, expires=None,
- auth_path=None):
- if self.credentials.token:
- del headers['x-amz-security-token']
- headers['x-amz-security-token'] = self.credentials.token
- string_to_sign = self.canonical_string(method,
- split,
- headers,
- auth_path=auth_path)
- logger.debug('StringToSign:\n%s', string_to_sign)
- return self.sign_string(string_to_sign)
-
- def add_auth(self, request):
- if self.credentials is None:
- raise NoCredentialsError
- logger.debug("Calculating signature using hmacv1 auth.")
- split = urlsplit(request.url)
- logger.debug('HTTP request method: %s', request.method)
- signature = self.get_signature(request.method, split,
- request.headers,
- auth_path=request.auth_path)
- self._inject_signature(request, signature)
-
- def _get_date(self):
- return formatdate(usegmt=True)
-
- def _inject_signature(self, request, signature):
- if 'Authorization' in request.headers:
- # We have to do this because request.headers is not
- # normal dictionary. It has the (unintuitive) behavior
- # of aggregating repeated setattr calls for the same
- # key value. For example:
- # headers['foo'] = 'a'; headers['foo'] = 'b'
- # list(headers) will print ['foo', 'foo'].
- del request.headers['Authorization']
- request.headers['Authorization'] = (
- "AWS %s:%s" % (self.credentials.access_key, signature))
-
-
-class HmacV1QueryAuth(HmacV1Auth):
- """
- Generates a presigned request for s3.
-
- Spec from this document:
-
- http://docs.aws.amazon.com/AmazonS3/latest/dev/RESTAuthentication.html
- #RESTAuthenticationQueryStringAuth
-
- """
- DEFAULT_EXPIRES = 3600
-
- def __init__(self, credentials, expires=DEFAULT_EXPIRES):
- self.credentials = credentials
- self._expires = expires
-
- def _get_date(self):
- return str(int(time.time() + int(self._expires)))
-
- def _inject_signature(self, request, signature):
- query_dict = {}
- query_dict['AWSAccessKeyId'] = self.credentials.access_key
- query_dict['Signature'] = signature
-
- for header_key in request.headers:
- lk = header_key.lower()
- # For query string requests, Expires is used instead of the
- # Date header.
- if header_key == 'Date':
- query_dict['Expires'] = request.headers['Date']
- # We only want to include relevant headers in the query string.
- # These can be anything that starts with x-amz, is Content-MD5,
- # or is Content-Type.
- elif lk.startswith('x-amz-') or lk in ['content-md5',
- 'content-type']:
- query_dict[lk] = request.headers[lk]
- # Combine all of the identified headers into an encoded
- # query string
- new_query_string = percent_encode_sequence(query_dict)
-
- # Create a new url with the presigned url.
- p = urlsplit(request.url)
- if p[3]:
- # If there was a pre-existing query string, we should
- # add that back before injecting the new query string.
- new_query_string = '%s&%s' % (p[3], new_query_string)
- new_url_parts = (p[0], p[1], p[2], new_query_string, p[4])
- request.url = urlunsplit(new_url_parts)
-
-
-class HmacV1PostAuth(HmacV1Auth):
- """
- Generates a presigned post for s3.
-
- Spec from this document:
-
- http://docs.aws.amazon.com/AmazonS3/latest/dev/UsingHTTPPOST.html
- """
- def add_auth(self, request):
- fields = {}
- if request.context.get('s3-presign-post-fields', None) is not None:
- fields = request.context['s3-presign-post-fields']
-
- policy = {}
- conditions = []
- if request.context.get('s3-presign-post-policy', None) is not None:
- policy = request.context['s3-presign-post-policy']
- if policy.get('conditions', None) is not None:
- conditions = policy['conditions']
-
- policy['conditions'] = conditions
-
- fields['AWSAccessKeyId'] = self.credentials.access_key
-
- if self.credentials.token is not None:
- fields['x-amz-security-token'] = self.credentials.token
- conditions.append({'x-amz-security-token': self.credentials.token})
-
- # Dump the base64 encoded policy into the fields dictionary.
- fields['policy'] = base64.b64encode(
- json.dumps(policy).encode('utf-8')).decode('utf-8')
-
- fields['signature'] = self.sign_string(fields['policy'])
-
- request.context['s3-presign-post-fields'] = fields
- request.context['s3-presign-post-policy'] = policy
-
-
-AUTH_TYPE_MAPS = {
- 'v2': SigV2Auth,
- 'v3': SigV3Auth,
- 'v3https': SigV3Auth,
- 's3': HmacV1Auth,
- 's3-query': HmacV1QueryAuth,
- 's3-presign-post': HmacV1PostAuth,
- 's3v4-presign-post': S3SigV4PostAuth,
+
+ def __init__(self, credentials, service_name=None, region_name=None):
+ self.credentials = credentials
+
+ def sign_string(self, string_to_sign):
+ new_hmac = hmac.new(self.credentials.secret_key.encode('utf-8'),
+ digestmod=sha1)
+ new_hmac.update(string_to_sign.encode('utf-8'))
+ return encodebytes(new_hmac.digest()).strip().decode('utf-8')
+
+ def canonical_standard_headers(self, headers):
+ interesting_headers = ['content-md5', 'content-type', 'date']
+ hoi = []
+ if 'Date' in headers:
+ del headers['Date']
+ headers['Date'] = self._get_date()
+ for ih in interesting_headers:
+ found = False
+ for key in headers:
+ lk = key.lower()
+ if headers[key] is not None and lk == ih:
+ hoi.append(headers[key].strip())
+ found = True
+ if not found:
+ hoi.append('')
+ return '\n'.join(hoi)
+
+ def canonical_custom_headers(self, headers):
+ hoi = []
+ custom_headers = {}
+ for key in headers:
+ lk = key.lower()
+ if headers[key] is not None:
+ if lk.startswith('x-amz-'):
+ custom_headers[lk] = ','.join(v.strip() for v in
+ headers.get_all(key))
+ sorted_header_keys = sorted(custom_headers.keys())
+ for key in sorted_header_keys:
+ hoi.append("%s:%s" % (key, custom_headers[key]))
+ return '\n'.join(hoi)
+
+ def unquote_v(self, nv):
+ """
+ TODO: Do we need this?
+ """
+ if len(nv) == 1:
+ return nv
+ else:
+ return (nv[0], unquote(nv[1]))
+
+ def canonical_resource(self, split, auth_path=None):
+ # don't include anything after the first ? in the resource...
+ # unless it is one of the QSA of interest, defined above
+ # NOTE:
+ # The path in the canonical resource should always be the
+ # full path including the bucket name, even for virtual-hosting
+ # style addressing. The ``auth_path`` keeps track of the full
+ # path for the canonical resource and would be passed in if
+ # the client was using virtual-hosting style.
+ if auth_path is not None:
+ buf = auth_path
+ else:
+ buf = split.path
+ if split.query:
+ qsa = split.query.split('&')
+ qsa = [a.split('=', 1) for a in qsa]
+ qsa = [self.unquote_v(a) for a in qsa
+ if a[0] in self.QSAOfInterest]
+ if len(qsa) > 0:
+ qsa.sort(key=itemgetter(0))
+ qsa = ['='.join(a) for a in qsa]
+ buf += '?'
+ buf += '&'.join(qsa)
+ return buf
+
+ def canonical_string(self, method, split, headers, expires=None,
+ auth_path=None):
+ cs = method.upper() + '\n'
+ cs += self.canonical_standard_headers(headers) + '\n'
+ custom_headers = self.canonical_custom_headers(headers)
+ if custom_headers:
+ cs += custom_headers + '\n'
+ cs += self.canonical_resource(split, auth_path=auth_path)
+ return cs
+
+ def get_signature(self, method, split, headers, expires=None,
+ auth_path=None):
+ if self.credentials.token:
+ del headers['x-amz-security-token']
+ headers['x-amz-security-token'] = self.credentials.token
+ string_to_sign = self.canonical_string(method,
+ split,
+ headers,
+ auth_path=auth_path)
+ logger.debug('StringToSign:\n%s', string_to_sign)
+ return self.sign_string(string_to_sign)
+
+ def add_auth(self, request):
+ if self.credentials is None:
+ raise NoCredentialsError
+ logger.debug("Calculating signature using hmacv1 auth.")
+ split = urlsplit(request.url)
+ logger.debug('HTTP request method: %s', request.method)
+ signature = self.get_signature(request.method, split,
+ request.headers,
+ auth_path=request.auth_path)
+ self._inject_signature(request, signature)
+
+ def _get_date(self):
+ return formatdate(usegmt=True)
+
+ def _inject_signature(self, request, signature):
+ if 'Authorization' in request.headers:
+ # We have to do this because request.headers is not
+ # normal dictionary. It has the (unintuitive) behavior
+ # of aggregating repeated setattr calls for the same
+ # key value. For example:
+ # headers['foo'] = 'a'; headers['foo'] = 'b'
+ # list(headers) will print ['foo', 'foo'].
+ del request.headers['Authorization']
+ request.headers['Authorization'] = (
+ "AWS %s:%s" % (self.credentials.access_key, signature))
+
+
+class HmacV1QueryAuth(HmacV1Auth):
+ """
+ Generates a presigned request for s3.
+
+ Spec from this document:
+
+ http://docs.aws.amazon.com/AmazonS3/latest/dev/RESTAuthentication.html
+ #RESTAuthenticationQueryStringAuth
+
+ """
+ DEFAULT_EXPIRES = 3600
+
+ def __init__(self, credentials, expires=DEFAULT_EXPIRES):
+ self.credentials = credentials
+ self._expires = expires
+
+ def _get_date(self):
+ return str(int(time.time() + int(self._expires)))
+
+ def _inject_signature(self, request, signature):
+ query_dict = {}
+ query_dict['AWSAccessKeyId'] = self.credentials.access_key
+ query_dict['Signature'] = signature
+
+ for header_key in request.headers:
+ lk = header_key.lower()
+ # For query string requests, Expires is used instead of the
+ # Date header.
+ if header_key == 'Date':
+ query_dict['Expires'] = request.headers['Date']
+ # We only want to include relevant headers in the query string.
+ # These can be anything that starts with x-amz, is Content-MD5,
+ # or is Content-Type.
+ elif lk.startswith('x-amz-') or lk in ['content-md5',
+ 'content-type']:
+ query_dict[lk] = request.headers[lk]
+ # Combine all of the identified headers into an encoded
+ # query string
+ new_query_string = percent_encode_sequence(query_dict)
+
+ # Create a new url with the presigned url.
+ p = urlsplit(request.url)
+ if p[3]:
+ # If there was a pre-existing query string, we should
+ # add that back before injecting the new query string.
+ new_query_string = '%s&%s' % (p[3], new_query_string)
+ new_url_parts = (p[0], p[1], p[2], new_query_string, p[4])
+ request.url = urlunsplit(new_url_parts)
+
+
+class HmacV1PostAuth(HmacV1Auth):
+ """
+ Generates a presigned post for s3.
+
+ Spec from this document:
+
+ http://docs.aws.amazon.com/AmazonS3/latest/dev/UsingHTTPPOST.html
+ """
+ def add_auth(self, request):
+ fields = {}
+ if request.context.get('s3-presign-post-fields', None) is not None:
+ fields = request.context['s3-presign-post-fields']
+
+ policy = {}
+ conditions = []
+ if request.context.get('s3-presign-post-policy', None) is not None:
+ policy = request.context['s3-presign-post-policy']
+ if policy.get('conditions', None) is not None:
+ conditions = policy['conditions']
+
+ policy['conditions'] = conditions
+
+ fields['AWSAccessKeyId'] = self.credentials.access_key
+
+ if self.credentials.token is not None:
+ fields['x-amz-security-token'] = self.credentials.token
+ conditions.append({'x-amz-security-token': self.credentials.token})
+
+ # Dump the base64 encoded policy into the fields dictionary.
+ fields['policy'] = base64.b64encode(
+ json.dumps(policy).encode('utf-8')).decode('utf-8')
+
+ fields['signature'] = self.sign_string(fields['policy'])
+
+ request.context['s3-presign-post-fields'] = fields
+ request.context['s3-presign-post-policy'] = policy
+
+
+AUTH_TYPE_MAPS = {
+ 'v2': SigV2Auth,
+ 'v3': SigV3Auth,
+ 'v3https': SigV3Auth,
+ 's3': HmacV1Auth,
+ 's3-query': HmacV1QueryAuth,
+ 's3-presign-post': HmacV1PostAuth,
+ 's3v4-presign-post': S3SigV4PostAuth,
}
-
+
# Define v4 signers depending on if CRT is present
if HAS_CRT:
from botocore.crt.auth import CRT_AUTH_TYPE_MAPS
diff --git a/contrib/python/botocore/botocore/awsrequest.py b/contrib/python/botocore/botocore/awsrequest.py
index 8248415406..f47f0cc01e 100644
--- a/contrib/python/botocore/botocore/awsrequest.py
+++ b/contrib/python/botocore/botocore/awsrequest.py
@@ -1,22 +1,22 @@
-# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/
-# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
+# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/
+# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
import io
-import sys
-import logging
-import functools
-import socket
-
+import sys
+import logging
+import functools
+import socket
+
import urllib3.util
from urllib3.connection import VerifiedHTTPSConnection
from urllib3.connection import HTTPConnection
@@ -24,201 +24,201 @@ from urllib3.connectionpool import HTTPConnectionPool
from urllib3.connectionpool import HTTPSConnectionPool
import botocore.utils
-from botocore.compat import six
+from botocore.compat import six
from botocore.compat import HTTPHeaders, HTTPResponse, urlunsplit, urlsplit, \
urlencode, MutableMapping
-from botocore.exceptions import UnseekableStreamError
-
-
-logger = logging.getLogger(__name__)
-
-
-class AWSHTTPResponse(HTTPResponse):
- # The *args, **kwargs is used because the args are slightly
- # different in py2.6 than in py2.7/py3.
- def __init__(self, *args, **kwargs):
- self._status_tuple = kwargs.pop('status_tuple')
- HTTPResponse.__init__(self, *args, **kwargs)
-
- def _read_status(self):
- if self._status_tuple is not None:
- status_tuple = self._status_tuple
- self._status_tuple = None
- return status_tuple
- else:
- return HTTPResponse._read_status(self)
-
-
+from botocore.exceptions import UnseekableStreamError
+
+
+logger = logging.getLogger(__name__)
+
+
+class AWSHTTPResponse(HTTPResponse):
+ # The *args, **kwargs is used because the args are slightly
+ # different in py2.6 than in py2.7/py3.
+ def __init__(self, *args, **kwargs):
+ self._status_tuple = kwargs.pop('status_tuple')
+ HTTPResponse.__init__(self, *args, **kwargs)
+
+ def _read_status(self):
+ if self._status_tuple is not None:
+ status_tuple = self._status_tuple
+ self._status_tuple = None
+ return status_tuple
+ else:
+ return HTTPResponse._read_status(self)
+
+
class AWSConnection(object):
"""Mixin for HTTPConnection that supports Expect 100-continue.
-
+
This when mixed with a subclass of httplib.HTTPConnection (though
- technically we subclass from urllib3, which subclasses
- httplib.HTTPConnection) and we only override this class to support Expect
- 100-continue, which we need for S3. As far as I can tell, this is
- general purpose enough to not be specific to S3, but I'm being
- tentative and keeping it in botocore because I've only tested
- this against AWS services.
-
- """
- def __init__(self, *args, **kwargs):
+ technically we subclass from urllib3, which subclasses
+ httplib.HTTPConnection) and we only override this class to support Expect
+ 100-continue, which we need for S3. As far as I can tell, this is
+ general purpose enough to not be specific to S3, but I'm being
+ tentative and keeping it in botocore because I've only tested
+ this against AWS services.
+
+ """
+ def __init__(self, *args, **kwargs):
super(AWSConnection, self).__init__(*args, **kwargs)
- self._original_response_cls = self.response_class
- # We'd ideally hook into httplib's states, but they're all
- # __mangled_vars so we use our own state var. This variable is set
- # when we receive an early response from the server. If this value is
- # set to True, any calls to send() are noops. This value is reset to
- # false every time _send_request is called. This is to workaround the
- # fact that py2.6 (and only py2.6) has a separate send() call for the
- # body in _send_request, as opposed to endheaders(), which is where the
- # body is sent in all versions > 2.6.
- self._response_received = False
- self._expect_header_set = False
-
- def close(self):
+ self._original_response_cls = self.response_class
+ # We'd ideally hook into httplib's states, but they're all
+ # __mangled_vars so we use our own state var. This variable is set
+ # when we receive an early response from the server. If this value is
+ # set to True, any calls to send() are noops. This value is reset to
+ # false every time _send_request is called. This is to workaround the
+ # fact that py2.6 (and only py2.6) has a separate send() call for the
+ # body in _send_request, as opposed to endheaders(), which is where the
+ # body is sent in all versions > 2.6.
+ self._response_received = False
+ self._expect_header_set = False
+
+ def close(self):
super(AWSConnection, self).close()
- # Reset all of our instance state we were tracking.
- self._response_received = False
- self._expect_header_set = False
- self.response_class = self._original_response_cls
-
- def _send_request(self, method, url, body, headers, *args, **kwargs):
- self._response_received = False
- if headers.get('Expect', b'') == b'100-continue':
- self._expect_header_set = True
- else:
- self._expect_header_set = False
- self.response_class = self._original_response_cls
+ # Reset all of our instance state we were tracking.
+ self._response_received = False
+ self._expect_header_set = False
+ self.response_class = self._original_response_cls
+
+ def _send_request(self, method, url, body, headers, *args, **kwargs):
+ self._response_received = False
+ if headers.get('Expect', b'') == b'100-continue':
+ self._expect_header_set = True
+ else:
+ self._expect_header_set = False
+ self.response_class = self._original_response_cls
rval = super(AWSConnection, self)._send_request(
method, url, body, headers, *args, **kwargs)
- self._expect_header_set = False
- return rval
-
- def _convert_to_bytes(self, mixed_buffer):
- # Take a list of mixed str/bytes and convert it
- # all into a single bytestring.
- # Any six.text_types will be encoded as utf-8.
- bytes_buffer = []
- for chunk in mixed_buffer:
- if isinstance(chunk, six.text_type):
- bytes_buffer.append(chunk.encode('utf-8'))
- else:
- bytes_buffer.append(chunk)
- msg = b"\r\n".join(bytes_buffer)
- return msg
-
- def _send_output(self, message_body=None, *args, **kwargs):
- self._buffer.extend((b"", b""))
- msg = self._convert_to_bytes(self._buffer)
- del self._buffer[:]
- # If msg and message_body are sent in a single send() call,
- # it will avoid performance problems caused by the interaction
- # between delayed ack and the Nagle algorithm.
- if isinstance(message_body, bytes):
- msg += message_body
- message_body = None
- self.send(msg)
- if self._expect_header_set:
- # This is our custom behavior. If the Expect header was
- # set, it will trigger this custom behavior.
- logger.debug("Waiting for 100 Continue response.")
- # Wait for 1 second for the server to send a response.
+ self._expect_header_set = False
+ return rval
+
+ def _convert_to_bytes(self, mixed_buffer):
+ # Take a list of mixed str/bytes and convert it
+ # all into a single bytestring.
+ # Any six.text_types will be encoded as utf-8.
+ bytes_buffer = []
+ for chunk in mixed_buffer:
+ if isinstance(chunk, six.text_type):
+ bytes_buffer.append(chunk.encode('utf-8'))
+ else:
+ bytes_buffer.append(chunk)
+ msg = b"\r\n".join(bytes_buffer)
+ return msg
+
+ def _send_output(self, message_body=None, *args, **kwargs):
+ self._buffer.extend((b"", b""))
+ msg = self._convert_to_bytes(self._buffer)
+ del self._buffer[:]
+ # If msg and message_body are sent in a single send() call,
+ # it will avoid performance problems caused by the interaction
+ # between delayed ack and the Nagle algorithm.
+ if isinstance(message_body, bytes):
+ msg += message_body
+ message_body = None
+ self.send(msg)
+ if self._expect_header_set:
+ # This is our custom behavior. If the Expect header was
+ # set, it will trigger this custom behavior.
+ logger.debug("Waiting for 100 Continue response.")
+ # Wait for 1 second for the server to send a response.
if urllib3.util.wait_for_read(self.sock, 1):
- self._handle_expect_response(message_body)
- return
- else:
- # From the RFC:
- # Because of the presence of older implementations, the
- # protocol allows ambiguous situations in which a client may
- # send "Expect: 100-continue" without receiving either a 417
- # (Expectation Failed) status or a 100 (Continue) status.
- # Therefore, when a client sends this header field to an origin
- # server (possibly via a proxy) from which it has never seen a
- # 100 (Continue) status, the client SHOULD NOT wait for an
- # indefinite period before sending the request body.
- logger.debug("No response seen from server, continuing to "
- "send the response body.")
- if message_body is not None:
- # message_body was not a string (i.e. it is a file), and
- # we must run the risk of Nagle.
- self.send(message_body)
-
- def _consume_headers(self, fp):
- # Most servers (including S3) will just return
- # the CLRF after the 100 continue response. However,
- # some servers (I've specifically seen this for squid when
- # used as a straight HTTP proxy) will also inject a
- # Connection: keep-alive header. To account for this
- # we'll read until we read '\r\n', and ignore any headers
- # that come immediately after the 100 continue response.
- current = None
- while current != b'\r\n':
- current = fp.readline()
-
- def _handle_expect_response(self, message_body):
- # This is called when we sent the request headers containing
- # an Expect: 100-continue header and received a response.
- # We now need to figure out what to do.
- fp = self.sock.makefile('rb', 0)
- try:
- maybe_status_line = fp.readline()
- parts = maybe_status_line.split(None, 2)
- if self._is_100_continue_status(maybe_status_line):
- self._consume_headers(fp)
- logger.debug("100 Continue response seen, "
- "now sending request body.")
- self._send_message_body(message_body)
- elif len(parts) == 3 and parts[0].startswith(b'HTTP/'):
- # From the RFC:
- # Requirements for HTTP/1.1 origin servers:
- #
- # - Upon receiving a request which includes an Expect
- # request-header field with the "100-continue"
- # expectation, an origin server MUST either respond with
- # 100 (Continue) status and continue to read from the
- # input stream, or respond with a final status code.
- #
- # So if we don't get a 100 Continue response, then
- # whatever the server has sent back is the final response
- # and don't send the message_body.
- logger.debug("Received a non 100 Continue response "
- "from the server, NOT sending request body.")
- status_tuple = (parts[0].decode('ascii'),
- int(parts[1]), parts[2].decode('ascii'))
- response_class = functools.partial(
- AWSHTTPResponse, status_tuple=status_tuple)
- self.response_class = response_class
- self._response_received = True
- finally:
- fp.close()
-
- def _send_message_body(self, message_body):
- if message_body is not None:
- self.send(message_body)
-
- def send(self, str):
- if self._response_received:
- logger.debug("send() called, but reseponse already received. "
- "Not sending data.")
- return
+ self._handle_expect_response(message_body)
+ return
+ else:
+ # From the RFC:
+ # Because of the presence of older implementations, the
+ # protocol allows ambiguous situations in which a client may
+ # send "Expect: 100-continue" without receiving either a 417
+ # (Expectation Failed) status or a 100 (Continue) status.
+ # Therefore, when a client sends this header field to an origin
+ # server (possibly via a proxy) from which it has never seen a
+ # 100 (Continue) status, the client SHOULD NOT wait for an
+ # indefinite period before sending the request body.
+ logger.debug("No response seen from server, continuing to "
+ "send the response body.")
+ if message_body is not None:
+ # message_body was not a string (i.e. it is a file), and
+ # we must run the risk of Nagle.
+ self.send(message_body)
+
+ def _consume_headers(self, fp):
+ # Most servers (including S3) will just return
+ # the CLRF after the 100 continue response. However,
+ # some servers (I've specifically seen this for squid when
+ # used as a straight HTTP proxy) will also inject a
+ # Connection: keep-alive header. To account for this
+ # we'll read until we read '\r\n', and ignore any headers
+ # that come immediately after the 100 continue response.
+ current = None
+ while current != b'\r\n':
+ current = fp.readline()
+
+ def _handle_expect_response(self, message_body):
+ # This is called when we sent the request headers containing
+ # an Expect: 100-continue header and received a response.
+ # We now need to figure out what to do.
+ fp = self.sock.makefile('rb', 0)
+ try:
+ maybe_status_line = fp.readline()
+ parts = maybe_status_line.split(None, 2)
+ if self._is_100_continue_status(maybe_status_line):
+ self._consume_headers(fp)
+ logger.debug("100 Continue response seen, "
+ "now sending request body.")
+ self._send_message_body(message_body)
+ elif len(parts) == 3 and parts[0].startswith(b'HTTP/'):
+ # From the RFC:
+ # Requirements for HTTP/1.1 origin servers:
+ #
+ # - Upon receiving a request which includes an Expect
+ # request-header field with the "100-continue"
+ # expectation, an origin server MUST either respond with
+ # 100 (Continue) status and continue to read from the
+ # input stream, or respond with a final status code.
+ #
+ # So if we don't get a 100 Continue response, then
+ # whatever the server has sent back is the final response
+ # and don't send the message_body.
+ logger.debug("Received a non 100 Continue response "
+ "from the server, NOT sending request body.")
+ status_tuple = (parts[0].decode('ascii'),
+ int(parts[1]), parts[2].decode('ascii'))
+ response_class = functools.partial(
+ AWSHTTPResponse, status_tuple=status_tuple)
+ self.response_class = response_class
+ self._response_received = True
+ finally:
+ fp.close()
+
+ def _send_message_body(self, message_body):
+ if message_body is not None:
+ self.send(message_body)
+
+ def send(self, str):
+ if self._response_received:
+ logger.debug("send() called, but reseponse already received. "
+ "Not sending data.")
+ return
return super(AWSConnection, self).send(str)
-
- def _is_100_continue_status(self, maybe_status_line):
- parts = maybe_status_line.split(None, 2)
- # Check for HTTP/<version> 100 Continue\r\n
- return (
- len(parts) >= 3 and parts[0].startswith(b'HTTP/') and
- parts[1] == b'100')
-
-
+
+ def _is_100_continue_status(self, maybe_status_line):
+ parts = maybe_status_line.split(None, 2)
+ # Check for HTTP/<version> 100 Continue\r\n
+ return (
+ len(parts) >= 3 and parts[0].startswith(b'HTTP/') and
+ parts[1] == b'100')
+
+
class AWSHTTPConnection(AWSConnection, HTTPConnection):
""" An HTTPConnection that supports 100 Continue behavior. """
-
-
+
+
class AWSHTTPSConnection(AWSConnection, VerifiedHTTPSConnection):
""" An HTTPSConnection that supports 100 Continue behavior. """
-
-
+
+
class AWSHTTPConnectionPool(HTTPConnectionPool):
ConnectionCls = AWSHTTPConnection
@@ -227,94 +227,94 @@ class AWSHTTPSConnectionPool(HTTPSConnectionPool):
ConnectionCls = AWSHTTPSConnection
-def prepare_request_dict(request_dict, endpoint_url, context=None,
- user_agent=None):
- """
- This method prepares a request dict to be created into an
- AWSRequestObject. This prepares the request dict by adding the
- url and the user agent to the request dict.
-
- :type request_dict: dict
- :param request_dict: The request dict (created from the
- ``serialize`` module).
-
- :type user_agent: string
- :param user_agent: The user agent to use for this request.
-
- :type endpoint_url: string
- :param endpoint_url: The full endpoint url, which contains at least
- the scheme, the hostname, and optionally any path components.
- """
- r = request_dict
- if user_agent is not None:
- headers = r['headers']
- headers['User-Agent'] = user_agent
+def prepare_request_dict(request_dict, endpoint_url, context=None,
+ user_agent=None):
+ """
+ This method prepares a request dict to be created into an
+ AWSRequestObject. This prepares the request dict by adding the
+ url and the user agent to the request dict.
+
+ :type request_dict: dict
+ :param request_dict: The request dict (created from the
+ ``serialize`` module).
+
+ :type user_agent: string
+ :param user_agent: The user agent to use for this request.
+
+ :type endpoint_url: string
+ :param endpoint_url: The full endpoint url, which contains at least
+ the scheme, the hostname, and optionally any path components.
+ """
+ r = request_dict
+ if user_agent is not None:
+ headers = r['headers']
+ headers['User-Agent'] = user_agent
host_prefix = r.get('host_prefix')
url = _urljoin(endpoint_url, r['url_path'], host_prefix)
- if r['query_string']:
+ if r['query_string']:
# NOTE: This is to avoid circular import with utils. This is being
# done to avoid moving classes to different modules as to not cause
# breaking chainges.
percent_encode_sequence = botocore.utils.percent_encode_sequence
- encoded_query_string = percent_encode_sequence(r['query_string'])
- if '?' not in url:
- url += '?%s' % encoded_query_string
- else:
- url += '&%s' % encoded_query_string
- r['url'] = url
- r['context'] = context
- if context is None:
- r['context'] = {}
-
-
-def create_request_object(request_dict):
- """
- This method takes a request dict and creates an AWSRequest object
- from it.
-
- :type request_dict: dict
- :param request_dict: The request dict (created from the
- ``prepare_request_dict`` method).
-
- :rtype: ``botocore.awsrequest.AWSRequest``
- :return: An AWSRequest object based on the request_dict.
-
- """
- r = request_dict
- request_object = AWSRequest(
- method=r['method'], url=r['url'], data=r['body'], headers=r['headers'])
+ encoded_query_string = percent_encode_sequence(r['query_string'])
+ if '?' not in url:
+ url += '?%s' % encoded_query_string
+ else:
+ url += '&%s' % encoded_query_string
+ r['url'] = url
+ r['context'] = context
+ if context is None:
+ r['context'] = {}
+
+
+def create_request_object(request_dict):
+ """
+ This method takes a request dict and creates an AWSRequest object
+ from it.
+
+ :type request_dict: dict
+ :param request_dict: The request dict (created from the
+ ``prepare_request_dict`` method).
+
+ :rtype: ``botocore.awsrequest.AWSRequest``
+ :return: An AWSRequest object based on the request_dict.
+
+ """
+ r = request_dict
+ request_object = AWSRequest(
+ method=r['method'], url=r['url'], data=r['body'], headers=r['headers'])
request_object.context = r['context']
- return request_object
-
-
+ return request_object
+
+
def _urljoin(endpoint_url, url_path, host_prefix):
- p = urlsplit(endpoint_url)
- # <part> - <index>
- # scheme - p[0]
- # netloc - p[1]
- # path - p[2]
- # query - p[3]
- # fragment - p[4]
- if not url_path or url_path == '/':
- # If there's no path component, ensure the URL ends with
- # a '/' for backwards compatibility.
- if not p[2]:
+ p = urlsplit(endpoint_url)
+ # <part> - <index>
+ # scheme - p[0]
+ # netloc - p[1]
+ # path - p[2]
+ # query - p[3]
+ # fragment - p[4]
+ if not url_path or url_path == '/':
+ # If there's no path component, ensure the URL ends with
+ # a '/' for backwards compatibility.
+ if not p[2]:
new_path = '/'
else:
new_path = p[2]
elif p[2].endswith('/') and url_path.startswith('/'):
- new_path = p[2][:-1] + url_path
- else:
- new_path = p[2] + url_path
+ new_path = p[2][:-1] + url_path
+ else:
+ new_path = p[2] + url_path
new_netloc = p[1]
if host_prefix is not None:
new_netloc = host_prefix + new_netloc
reconstructed = urlunsplit((p[0], new_netloc, new_path, p[3], p[4]))
- return reconstructed
-
-
+ return reconstructed
+
+
class AWSRequestPreparer(object):
"""
This class performs preparation on AWSRequest objects similar to that of
@@ -457,84 +457,84 @@ class AWSRequest(object):
for key, value in headers.items():
self.headers[key] = value
- # This is a dictionary to hold information that is used when
- # processing the request. What is inside of ``context`` is open-ended.
- # For example, it may have a timestamp key that is used for holding
- # what the timestamp is when signing the request. Note that none
- # of the information that is inside of ``context`` is directly
- # sent over the wire; the information is only used to assist in
- # creating what is sent over the wire.
- self.context = {}
-
- def prepare(self):
- """Constructs a :class:`AWSPreparedRequest <AWSPreparedRequest>`."""
+ # This is a dictionary to hold information that is used when
+ # processing the request. What is inside of ``context`` is open-ended.
+ # For example, it may have a timestamp key that is used for holding
+ # what the timestamp is when signing the request. Note that none
+ # of the information that is inside of ``context`` is directly
+ # sent over the wire; the information is only used to assist in
+ # creating what is sent over the wire.
+ self.context = {}
+
+ def prepare(self):
+ """Constructs a :class:`AWSPreparedRequest <AWSPreparedRequest>`."""
return self._request_preparer.prepare(self)
-
- @property
- def body(self):
+
+ @property
+ def body(self):
body = self.prepare().body
if isinstance(body, six.text_type):
body = body.encode('utf-8')
return body
-
-
+
+
class AWSPreparedRequest(object):
"""A data class representing a finalized request to be sent over the wire.
-
+
Requests at this stage should be treated as final, and the properties of
the request should not be modified.
:ivar method: The HTTP Method
- :ivar url: The full url
- :ivar headers: The HTTP headers to send.
- :ivar body: The HTTP body.
+ :ivar url: The full url
+ :ivar headers: The HTTP headers to send.
+ :ivar body: The HTTP body.
:ivar stream_output: If the response for this request should be streamed.
- """
+ """
def __init__(self, method, url, headers, body, stream_output):
self.method = method
self.url = url
self.headers = headers
self.body = body
self.stream_output = stream_output
-
+
def __repr__(self):
fmt = (
'<AWSPreparedRequest stream_output=%s, method=%s, url=%s, '
'headers=%s>'
)
return fmt % (self.stream_output, self.method, self.url, self.headers)
-
+
def reset_stream(self):
"""Resets the streaming body to it's initial position.
-
+
If the request contains a streaming body (a streamable file-like object)
seek to the object's initial position to ensure the entire contents of
the object is sent. This is a no-op for static bytes-like body types.
"""
- # Trying to reset a stream when there is a no stream will
- # just immediately return. It's not an error, it will produce
- # the same result as if we had actually reset the stream (we'll send
- # the entire body contents again if we need to).
+ # Trying to reset a stream when there is a no stream will
+ # just immediately return. It's not an error, it will produce
+ # the same result as if we had actually reset the stream (we'll send
+ # the entire body contents again if we need to).
# Same case if the body is a string/bytes/bytearray type.
non_seekable_types = (six.binary_type, six.text_type, bytearray)
if self.body is None or isinstance(self.body, non_seekable_types):
- return
- try:
- logger.debug("Rewinding stream: %s", self.body)
- self.body.seek(0)
- except Exception as e:
- logger.debug("Unable to rewind stream: %s", e)
- raise UnseekableStreamError(stream_object=self.body)
-
-
+ return
+ try:
+ logger.debug("Rewinding stream: %s", self.body)
+ self.body.seek(0)
+ except Exception as e:
+ logger.debug("Unable to rewind stream: %s", e)
+ raise UnseekableStreamError(stream_object=self.body)
+
+
class AWSResponse(object):
"""A data class representing an HTTP response.
-
+
This class was originally inspired by requests.models.Response, but has
been boiled down to meet the specific use cases in botocore. This has
effectively been reduced to a named tuple.
-
+
:ivar url: The full url.
:ivar status_code: The status code of the HTTP response.
:ivar headers: The HTTP headers received.
diff --git a/contrib/python/botocore/botocore/client.py b/contrib/python/botocore/botocore/client.py
index 548442d490..9584658f86 100644
--- a/contrib/python/botocore/botocore/client.py
+++ b/contrib/python/botocore/botocore/client.py
@@ -1,96 +1,96 @@
-# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-import logging
-import functools
-
-from botocore import waiter, xform_name
+# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+import logging
+import functools
+
+from botocore import waiter, xform_name
from botocore.args import ClientArgsCreator
-from botocore.auth import AUTH_TYPE_MAPS
-from botocore.awsrequest import prepare_request_dict
-from botocore.docs.docstring import ClientMethodDocstring
-from botocore.docs.docstring import PaginatorDocstring
+from botocore.auth import AUTH_TYPE_MAPS
+from botocore.awsrequest import prepare_request_dict
+from botocore.docs.docstring import ClientMethodDocstring
+from botocore.docs.docstring import PaginatorDocstring
from botocore.exceptions import (
ClientError, DataNotFoundError, OperationNotPageableError,
UnknownSignatureVersionError, InvalidEndpointDiscoveryConfigurationError,
UnknownFIPSEndpointError,
)
-from botocore.hooks import first_non_none_response
-from botocore.model import ServiceModel
-from botocore.paginate import Paginator
+from botocore.hooks import first_non_none_response
+from botocore.model import ServiceModel
+from botocore.paginate import Paginator
from botocore.utils import (
CachedProperty, get_service_module_name, S3RegionRedirector,
S3ArnParamHandler, S3EndpointSetter, ensure_boolean,
S3ControlArnParamHandler, S3ControlEndpointSetter,
)
from botocore.args import ClientArgsCreator
-from botocore import UNSIGNED
-# Keep this imported. There's pre-existing code that uses
-# "from botocore.client import Config".
-from botocore.config import Config
-from botocore.history import get_global_history_recorder
+from botocore import UNSIGNED
+# Keep this imported. There's pre-existing code that uses
+# "from botocore.client import Config".
+from botocore.config import Config
+from botocore.history import get_global_history_recorder
from botocore.discovery import (
EndpointDiscoveryHandler, EndpointDiscoveryManager,
block_endpoint_discovery_required_operations
)
from botocore.retries import standard
from botocore.retries import adaptive
-
-
-logger = logging.getLogger(__name__)
-history_recorder = get_global_history_recorder()
-
-
-class ClientCreator(object):
- """Creates client objects for a service."""
- def __init__(self, loader, endpoint_resolver, user_agent, event_emitter,
- retry_handler_factory, retry_config_translator,
+
+
+logger = logging.getLogger(__name__)
+history_recorder = get_global_history_recorder()
+
+
+class ClientCreator(object):
+ """Creates client objects for a service."""
+ def __init__(self, loader, endpoint_resolver, user_agent, event_emitter,
+ retry_handler_factory, retry_config_translator,
response_parser_factory=None, exceptions_factory=None,
config_store=None):
- self._loader = loader
- self._endpoint_resolver = endpoint_resolver
- self._user_agent = user_agent
- self._event_emitter = event_emitter
- self._retry_handler_factory = retry_handler_factory
- self._retry_config_translator = retry_config_translator
- self._response_parser_factory = response_parser_factory
- self._exceptions_factory = exceptions_factory
+ self._loader = loader
+ self._endpoint_resolver = endpoint_resolver
+ self._user_agent = user_agent
+ self._event_emitter = event_emitter
+ self._retry_handler_factory = retry_handler_factory
+ self._retry_config_translator = retry_config_translator
+ self._response_parser_factory = response_parser_factory
+ self._exceptions_factory = exceptions_factory
# TODO: Migrate things away from scoped_config in favor of the
# config_store. The config store can pull things from both the scoped
# config and environment variables (and potentially more in the
# future).
self._config_store = config_store
-
- def create_client(self, service_name, region_name, is_secure=True,
- endpoint_url=None, verify=None,
- credentials=None, scoped_config=None,
- api_version=None,
- client_config=None):
+
+ def create_client(self, service_name, region_name, is_secure=True,
+ endpoint_url=None, verify=None,
+ credentials=None, scoped_config=None,
+ api_version=None,
+ client_config=None):
responses = self._event_emitter.emit(
'choose-service-name', service_name=service_name)
service_name = first_non_none_response(responses, default=service_name)
- service_model = self._load_service_model(service_name, api_version)
- cls = self._create_client_class(service_name, service_model)
- endpoint_bridge = ClientEndpointBridge(
- self._endpoint_resolver, scoped_config, client_config,
- service_signing_name=service_model.metadata.get('signingName'))
- client_args = self._get_client_args(
- service_model, region_name, is_secure, endpoint_url,
- verify, credentials, scoped_config, client_config, endpoint_bridge)
- service_client = cls(**client_args)
- self._register_retries(service_client)
- self._register_s3_events(
- service_client, endpoint_bridge, endpoint_url, client_config,
- scoped_config)
+ service_model = self._load_service_model(service_name, api_version)
+ cls = self._create_client_class(service_name, service_model)
+ endpoint_bridge = ClientEndpointBridge(
+ self._endpoint_resolver, scoped_config, client_config,
+ service_signing_name=service_model.metadata.get('signingName'))
+ client_args = self._get_client_args(
+ service_model, region_name, is_secure, endpoint_url,
+ verify, credentials, scoped_config, client_config, endpoint_bridge)
+ service_client = cls(**client_args)
+ self._register_retries(service_client)
+ self._register_s3_events(
+ service_client, endpoint_bridge, endpoint_url, client_config,
+ scoped_config)
self._register_s3_control_events(
service_client, endpoint_bridge, endpoint_url, client_config,
scoped_config)
@@ -98,33 +98,33 @@ class ClientCreator(object):
service_client, endpoint_url, client_config
)
self._register_lazy_block_unknown_fips_pseudo_regions(service_client)
- return service_client
-
- def create_client_class(self, service_name, api_version=None):
- service_model = self._load_service_model(service_name, api_version)
- return self._create_client_class(service_name, service_model)
-
- def _create_client_class(self, service_name, service_model):
- class_attributes = self._create_methods(service_model)
- py_name_to_operation_name = self._create_name_mapping(service_model)
- class_attributes['_PY_TO_OP_NAME'] = py_name_to_operation_name
- bases = [BaseClient]
+ return service_client
+
+ def create_client_class(self, service_name, api_version=None):
+ service_model = self._load_service_model(service_name, api_version)
+ return self._create_client_class(service_name, service_model)
+
+ def _create_client_class(self, service_name, service_model):
+ class_attributes = self._create_methods(service_model)
+ py_name_to_operation_name = self._create_name_mapping(service_model)
+ class_attributes['_PY_TO_OP_NAME'] = py_name_to_operation_name
+ bases = [BaseClient]
service_id = service_model.service_id.hyphenize()
self._event_emitter.emit(
'creating-client-class.%s' % service_id,
class_attributes=class_attributes,
base_classes=bases)
- class_name = get_service_module_name(service_model)
- cls = type(str(class_name), tuple(bases), class_attributes)
- return cls
-
- def _load_service_model(self, service_name, api_version=None):
- json_model = self._loader.load_service_model(service_name, 'service-2',
- api_version=api_version)
- service_model = ServiceModel(json_model, service_name=service_name)
- return service_model
-
- def _register_retries(self, client):
+ class_name = get_service_module_name(service_model)
+ cls = type(str(class_name), tuple(bases), class_attributes)
+ return cls
+
+ def _load_service_model(self, service_name, api_version=None):
+ json_model = self._loader.load_service_model(service_name, 'service-2',
+ api_version=api_version)
+ service_model = ServiceModel(json_model, service_name=service_name)
+ return service_model
+
+ def _register_retries(self, client):
retry_mode = client.meta.config.retries['mode']
if retry_mode == 'standard':
self._register_v2_standard_retries(client)
@@ -145,33 +145,33 @@ class ClientCreator(object):
adaptive.register_retry_handler(client)
def _register_legacy_retries(self, client):
- endpoint_prefix = client.meta.service_model.endpoint_prefix
+ endpoint_prefix = client.meta.service_model.endpoint_prefix
service_id = client.meta.service_model.service_id
service_event_name = service_id.hyphenize()
-
- # First, we load the entire retry config for all services,
- # then pull out just the information we need.
- original_config = self._loader.load_data('_retry')
- if not original_config:
- return
-
+
+ # First, we load the entire retry config for all services,
+ # then pull out just the information we need.
+ original_config = self._loader.load_data('_retry')
+ if not original_config:
+ return
+
retries = self._transform_legacy_retries(client.meta.config.retries)
- retry_config = self._retry_config_translator.build_retry_config(
- endpoint_prefix, original_config.get('retry', {}),
- original_config.get('definitions', {}),
+ retry_config = self._retry_config_translator.build_retry_config(
+ endpoint_prefix, original_config.get('retry', {}),
+ original_config.get('definitions', {}),
retries
- )
-
- logger.debug("Registering retry handlers for service: %s",
- client.meta.service_model.service_name)
- handler = self._retry_handler_factory.create_retry_handler(
- retry_config, endpoint_prefix)
+ )
+
+ logger.debug("Registering retry handlers for service: %s",
+ client.meta.service_model.service_name)
+ handler = self._retry_handler_factory.create_retry_handler(
+ retry_config, endpoint_prefix)
unique_id = 'retry-config-%s' % service_event_name
client.meta.events.register(
'needs-retry.%s' % service_event_name, handler,
unique_id=unique_id
)
-
+
def _transform_legacy_retries(self, retries):
if retries is None:
return
@@ -260,11 +260,11 @@ class ClientCreator(object):
)
client.meta.events.register('before-sign', _lazy_fips_exception)
- def _register_s3_events(self, client, endpoint_bridge, endpoint_url,
- client_config, scoped_config):
- if client.meta.service_model.service_name != 's3':
- return
- S3RegionRedirector(endpoint_bridge, client).register()
+ def _register_s3_events(self, client, endpoint_bridge, endpoint_url,
+ client_config, scoped_config):
+ if client.meta.service_model.service_name != 's3':
+ return
+ S3RegionRedirector(endpoint_bridge, client).register()
S3ArnParamHandler().register(client.meta.events)
S3EndpointSetter(
endpoint_resolver=self._endpoint_resolver,
@@ -273,9 +273,9 @@ class ClientCreator(object):
endpoint_url=endpoint_url,
partition=client.meta.partition
).register(client.meta.events)
- self._set_s3_presign_signature_version(
- client.meta, client_config, scoped_config)
-
+ self._set_s3_presign_signature_version(
+ client.meta, client_config, scoped_config)
+
def _register_s3_control_events(
self, client, endpoint_bridge,
endpoint_url, client_config, scoped_config
@@ -291,144 +291,144 @@ class ClientCreator(object):
partition=client.meta.partition
).register(client.meta.events)
- def _set_s3_presign_signature_version(self, client_meta,
- client_config, scoped_config):
- # This will return the manually configured signature version, or None
- # if none was manually set. If a customer manually sets the signature
- # version, we always want to use what they set.
- provided_signature_version = _get_configured_signature_version(
- 's3', client_config, scoped_config)
- if provided_signature_version is not None:
- return
-
- # Check to see if the region is a region that we know about. If we
- # don't know about a region, then we can safely assume it's a new
- # region that is sigv4 only, since all new S3 regions only allow sigv4.
+ def _set_s3_presign_signature_version(self, client_meta,
+ client_config, scoped_config):
+ # This will return the manually configured signature version, or None
+ # if none was manually set. If a customer manually sets the signature
+ # version, we always want to use what they set.
+ provided_signature_version = _get_configured_signature_version(
+ 's3', client_config, scoped_config)
+ if provided_signature_version is not None:
+ return
+
+ # Check to see if the region is a region that we know about. If we
+ # don't know about a region, then we can safely assume it's a new
+ # region that is sigv4 only, since all new S3 regions only allow sigv4.
# The only exception is aws-global. This is a pseudo-region for the
# global endpoint, we should respect the signature versions it
# supports, which includes v2.
- regions = self._endpoint_resolver.get_available_endpoints(
- 's3', client_meta.partition)
+ regions = self._endpoint_resolver.get_available_endpoints(
+ 's3', client_meta.partition)
if client_meta.region_name != 'aws-global' and \
client_meta.region_name not in regions:
- return
-
- # If it is a region we know about, we want to default to sigv2, so here
- # we check to see if it is available.
- endpoint = self._endpoint_resolver.construct_endpoint(
- 's3', client_meta.region_name)
- signature_versions = endpoint['signatureVersions']
- if 's3' not in signature_versions:
- return
-
- # We now know that we're in a known region that supports sigv2 and
- # the customer hasn't set a signature version so we default the
- # signature version to sigv2.
- client_meta.events.register(
- 'choose-signer.s3', self._default_s3_presign_to_sigv2)
-
- def _default_s3_presign_to_sigv2(self, signature_version, **kwargs):
- """
- Returns the 's3' (sigv2) signer if presigning an s3 request. This is
- intended to be used to set the default signature version for the signer
- to sigv2.
-
- :type signature_version: str
- :param signature_version: The current client signature version.
-
- :type signing_name: str
- :param signing_name: The signing name of the service.
-
- :return: 's3' if the request is an s3 presign request, None otherwise
- """
- for suffix in ['-query', '-presign-post']:
- if signature_version.endswith(suffix):
- return 's3' + suffix
-
- def _get_client_args(self, service_model, region_name, is_secure,
- endpoint_url, verify, credentials,
- scoped_config, client_config, endpoint_bridge):
- args_creator = ClientArgsCreator(
- self._event_emitter, self._user_agent,
- self._response_parser_factory, self._loader,
+ return
+
+ # If it is a region we know about, we want to default to sigv2, so here
+ # we check to see if it is available.
+ endpoint = self._endpoint_resolver.construct_endpoint(
+ 's3', client_meta.region_name)
+ signature_versions = endpoint['signatureVersions']
+ if 's3' not in signature_versions:
+ return
+
+ # We now know that we're in a known region that supports sigv2 and
+ # the customer hasn't set a signature version so we default the
+ # signature version to sigv2.
+ client_meta.events.register(
+ 'choose-signer.s3', self._default_s3_presign_to_sigv2)
+
+ def _default_s3_presign_to_sigv2(self, signature_version, **kwargs):
+ """
+ Returns the 's3' (sigv2) signer if presigning an s3 request. This is
+ intended to be used to set the default signature version for the signer
+ to sigv2.
+
+ :type signature_version: str
+ :param signature_version: The current client signature version.
+
+ :type signing_name: str
+ :param signing_name: The signing name of the service.
+
+ :return: 's3' if the request is an s3 presign request, None otherwise
+ """
+ for suffix in ['-query', '-presign-post']:
+ if signature_version.endswith(suffix):
+ return 's3' + suffix
+
+ def _get_client_args(self, service_model, region_name, is_secure,
+ endpoint_url, verify, credentials,
+ scoped_config, client_config, endpoint_bridge):
+ args_creator = ClientArgsCreator(
+ self._event_emitter, self._user_agent,
+ self._response_parser_factory, self._loader,
self._exceptions_factory, config_store=self._config_store)
- return args_creator.get_client_args(
- service_model, region_name, is_secure, endpoint_url,
- verify, credentials, scoped_config, client_config, endpoint_bridge)
-
- def _create_methods(self, service_model):
- op_dict = {}
- for operation_name in service_model.operation_names:
- py_operation_name = xform_name(operation_name)
- op_dict[py_operation_name] = self._create_api_method(
- py_operation_name, operation_name, service_model)
- return op_dict
-
- def _create_name_mapping(self, service_model):
- # py_name -> OperationName, for every operation available
- # for a service.
- mapping = {}
- for operation_name in service_model.operation_names:
- py_operation_name = xform_name(operation_name)
- mapping[py_operation_name] = operation_name
- return mapping
-
- def _create_api_method(self, py_operation_name, operation_name,
- service_model):
- def _api_call(self, *args, **kwargs):
- # We're accepting *args so that we can give a more helpful
- # error message than TypeError: _api_call takes exactly
- # 1 argument.
- if args:
- raise TypeError(
- "%s() only accepts keyword arguments." % py_operation_name)
- # The "self" in this scope is referring to the BaseClient.
- return self._make_api_call(operation_name, kwargs)
-
- _api_call.__name__ = str(py_operation_name)
-
- # Add the docstring to the client method
- operation_model = service_model.operation_model(operation_name)
- docstring = ClientMethodDocstring(
- operation_model=operation_model,
- method_name=operation_name,
- event_emitter=self._event_emitter,
- method_description=operation_model.documentation,
- example_prefix='response = client.%s' % py_operation_name,
- include_signature=False
- )
- _api_call.__doc__ = docstring
- return _api_call
-
-
-class ClientEndpointBridge(object):
- """Bridges endpoint data and client creation
-
- This class handles taking out the relevant arguments from the endpoint
- resolver and determining which values to use, taking into account any
- client configuration options and scope configuration options.
-
- This class also handles determining what, if any, region to use if no
- explicit region setting is provided. For example, Amazon S3 client will
- utilize "us-east-1" by default if no region can be resolved."""
-
- DEFAULT_ENDPOINT = '{service}.{region}.amazonaws.com'
+ return args_creator.get_client_args(
+ service_model, region_name, is_secure, endpoint_url,
+ verify, credentials, scoped_config, client_config, endpoint_bridge)
+
+ def _create_methods(self, service_model):
+ op_dict = {}
+ for operation_name in service_model.operation_names:
+ py_operation_name = xform_name(operation_name)
+ op_dict[py_operation_name] = self._create_api_method(
+ py_operation_name, operation_name, service_model)
+ return op_dict
+
+ def _create_name_mapping(self, service_model):
+ # py_name -> OperationName, for every operation available
+ # for a service.
+ mapping = {}
+ for operation_name in service_model.operation_names:
+ py_operation_name = xform_name(operation_name)
+ mapping[py_operation_name] = operation_name
+ return mapping
+
+ def _create_api_method(self, py_operation_name, operation_name,
+ service_model):
+ def _api_call(self, *args, **kwargs):
+ # We're accepting *args so that we can give a more helpful
+ # error message than TypeError: _api_call takes exactly
+ # 1 argument.
+ if args:
+ raise TypeError(
+ "%s() only accepts keyword arguments." % py_operation_name)
+ # The "self" in this scope is referring to the BaseClient.
+ return self._make_api_call(operation_name, kwargs)
+
+ _api_call.__name__ = str(py_operation_name)
+
+ # Add the docstring to the client method
+ operation_model = service_model.operation_model(operation_name)
+ docstring = ClientMethodDocstring(
+ operation_model=operation_model,
+ method_name=operation_name,
+ event_emitter=self._event_emitter,
+ method_description=operation_model.documentation,
+ example_prefix='response = client.%s' % py_operation_name,
+ include_signature=False
+ )
+ _api_call.__doc__ = docstring
+ return _api_call
+
+
+class ClientEndpointBridge(object):
+ """Bridges endpoint data and client creation
+
+ This class handles taking out the relevant arguments from the endpoint
+ resolver and determining which values to use, taking into account any
+ client configuration options and scope configuration options.
+
+ This class also handles determining what, if any, region to use if no
+ explicit region setting is provided. For example, Amazon S3 client will
+ utilize "us-east-1" by default if no region can be resolved."""
+
+ DEFAULT_ENDPOINT = '{service}.{region}.amazonaws.com'
_DUALSTACK_ENABLED_SERVICES = ['s3', 's3-control']
-
- def __init__(self, endpoint_resolver, scoped_config=None,
- client_config=None, default_endpoint=None,
- service_signing_name=None):
- self.service_signing_name = service_signing_name
- self.endpoint_resolver = endpoint_resolver
- self.scoped_config = scoped_config
- self.client_config = client_config
- self.default_endpoint = default_endpoint or self.DEFAULT_ENDPOINT
-
- def resolve(self, service_name, region_name=None, endpoint_url=None,
- is_secure=True):
- region_name = self._check_default_region(service_name, region_name)
- resolved = self.endpoint_resolver.construct_endpoint(
- service_name, region_name)
+
+ def __init__(self, endpoint_resolver, scoped_config=None,
+ client_config=None, default_endpoint=None,
+ service_signing_name=None):
+ self.service_signing_name = service_signing_name
+ self.endpoint_resolver = endpoint_resolver
+ self.scoped_config = scoped_config
+ self.client_config = client_config
+ self.default_endpoint = default_endpoint or self.DEFAULT_ENDPOINT
+
+ def resolve(self, service_name, region_name=None, endpoint_url=None,
+ is_secure=True):
+ region_name = self._check_default_region(service_name, region_name)
+ resolved = self.endpoint_resolver.construct_endpoint(
+ service_name, region_name)
# If we can't resolve the region, we'll attempt to get a global
# endpoint for non-regionalized services (iam, route53, etc)
@@ -438,274 +438,274 @@ class ClientEndpointBridge(object):
resolved = self.endpoint_resolver.construct_endpoint(
service_name, region_name, partition_name='aws')
- if resolved:
- return self._create_endpoint(
- resolved, service_name, region_name, endpoint_url, is_secure)
- else:
- return self._assume_endpoint(service_name, region_name,
- endpoint_url, is_secure)
-
- def _check_default_region(self, service_name, region_name):
- if region_name is not None:
- return region_name
- # Use the client_config region if no explicit region was provided.
- if self.client_config and self.client_config.region_name is not None:
- return self.client_config.region_name
-
- def _create_endpoint(self, resolved, service_name, region_name,
- endpoint_url, is_secure):
+ if resolved:
+ return self._create_endpoint(
+ resolved, service_name, region_name, endpoint_url, is_secure)
+ else:
+ return self._assume_endpoint(service_name, region_name,
+ endpoint_url, is_secure)
+
+ def _check_default_region(self, service_name, region_name):
+ if region_name is not None:
+ return region_name
+ # Use the client_config region if no explicit region was provided.
+ if self.client_config and self.client_config.region_name is not None:
+ return self.client_config.region_name
+
+ def _create_endpoint(self, resolved, service_name, region_name,
+ endpoint_url, is_secure):
explicit_region = region_name is not None
- region_name, signing_region = self._pick_region_values(
- resolved, region_name, endpoint_url)
- if endpoint_url is None:
- if self._is_s3_dualstack_mode(service_name):
- endpoint_url = self._create_dualstack_endpoint(
- service_name, region_name,
+ region_name, signing_region = self._pick_region_values(
+ resolved, region_name, endpoint_url)
+ if endpoint_url is None:
+ if self._is_s3_dualstack_mode(service_name):
+ endpoint_url = self._create_dualstack_endpoint(
+ service_name, region_name,
resolved['dnsSuffix'], is_secure, explicit_region)
- else:
- # Use the sslCommonName over the hostname for Python 2.6 compat.
- hostname = resolved.get('sslCommonName', resolved.get('hostname'))
- endpoint_url = self._make_url(hostname, is_secure,
- resolved.get('protocols', []))
- signature_version = self._resolve_signature_version(
- service_name, resolved)
- signing_name = self._resolve_signing_name(service_name, resolved)
- return self._create_result(
- service_name=service_name, region_name=region_name,
- signing_region=signing_region, signing_name=signing_name,
- endpoint_url=endpoint_url, metadata=resolved,
- signature_version=signature_version)
-
- def _is_s3_dualstack_mode(self, service_name):
+ else:
+ # Use the sslCommonName over the hostname for Python 2.6 compat.
+ hostname = resolved.get('sslCommonName', resolved.get('hostname'))
+ endpoint_url = self._make_url(hostname, is_secure,
+ resolved.get('protocols', []))
+ signature_version = self._resolve_signature_version(
+ service_name, resolved)
+ signing_name = self._resolve_signing_name(service_name, resolved)
+ return self._create_result(
+ service_name=service_name, region_name=region_name,
+ signing_region=signing_region, signing_name=signing_name,
+ endpoint_url=endpoint_url, metadata=resolved,
+ signature_version=signature_version)
+
+ def _is_s3_dualstack_mode(self, service_name):
if service_name not in self._DUALSTACK_ENABLED_SERVICES:
- return False
- # TODO: This normalization logic is duplicated from the
- # ClientArgsCreator class. Consolidate everything to
- # ClientArgsCreator. _resolve_signature_version also has similarly
- # duplicated logic.
- client_config = self.client_config
- if client_config is not None and client_config.s3 is not None and \
- 'use_dualstack_endpoint' in client_config.s3:
- # Client config trumps scoped config.
- return client_config.s3['use_dualstack_endpoint']
- if self.scoped_config is None:
- return False
- enabled = self.scoped_config.get('s3', {}).get(
- 'use_dualstack_endpoint', False)
- if enabled in [True, 'True', 'true']:
- return True
- return False
-
- def _create_dualstack_endpoint(self, service_name, region_name,
+ return False
+ # TODO: This normalization logic is duplicated from the
+ # ClientArgsCreator class. Consolidate everything to
+ # ClientArgsCreator. _resolve_signature_version also has similarly
+ # duplicated logic.
+ client_config = self.client_config
+ if client_config is not None and client_config.s3 is not None and \
+ 'use_dualstack_endpoint' in client_config.s3:
+ # Client config trumps scoped config.
+ return client_config.s3['use_dualstack_endpoint']
+ if self.scoped_config is None:
+ return False
+ enabled = self.scoped_config.get('s3', {}).get(
+ 'use_dualstack_endpoint', False)
+ if enabled in [True, 'True', 'true']:
+ return True
+ return False
+
+ def _create_dualstack_endpoint(self, service_name, region_name,
dns_suffix, is_secure, explicit_region):
if not explicit_region and region_name == 'aws-global':
# If the region_name passed was not explicitly set, default to
# us-east-1 instead of the modeled default aws-global. Dualstack
# does not support aws-global
region_name = 'us-east-1'
- hostname = '{service}.dualstack.{region}.{dns_suffix}'.format(
- service=service_name, region=region_name,
- dns_suffix=dns_suffix)
- # Dualstack supports http and https so were hardcoding this value for
- # now. This can potentially move into the endpoints.json file.
- return self._make_url(hostname, is_secure, ['http', 'https'])
-
- def _assume_endpoint(self, service_name, region_name, endpoint_url,
- is_secure):
- if endpoint_url is None:
- # Expand the default hostname URI template.
- hostname = self.default_endpoint.format(
- service=service_name, region=region_name)
- endpoint_url = self._make_url(hostname, is_secure,
- ['http', 'https'])
- logger.debug('Assuming an endpoint for %s, %s: %s',
- service_name, region_name, endpoint_url)
- # We still want to allow the user to provide an explicit version.
- signature_version = self._resolve_signature_version(
- service_name, {'signatureVersions': ['v4']})
- signing_name = self._resolve_signing_name(service_name, resolved={})
- return self._create_result(
- service_name=service_name, region_name=region_name,
- signing_region=region_name, signing_name=signing_name,
- signature_version=signature_version, endpoint_url=endpoint_url,
- metadata={})
-
- def _create_result(self, service_name, region_name, signing_region,
- signing_name, endpoint_url, signature_version,
- metadata):
- return {
- 'service_name': service_name,
- 'region_name': region_name,
- 'signing_region': signing_region,
- 'signing_name': signing_name,
- 'endpoint_url': endpoint_url,
- 'signature_version': signature_version,
- 'metadata': metadata
- }
-
- def _make_url(self, hostname, is_secure, supported_protocols):
- if is_secure and 'https' in supported_protocols:
- scheme = 'https'
- else:
- scheme = 'http'
- return '%s://%s' % (scheme, hostname)
-
- def _resolve_signing_name(self, service_name, resolved):
- # CredentialScope overrides everything else.
- if 'credentialScope' in resolved \
- and 'service' in resolved['credentialScope']:
- return resolved['credentialScope']['service']
- # Use the signingName from the model if present.
- if self.service_signing_name:
- return self.service_signing_name
- # Just assume is the same as the service name.
- return service_name
-
- def _pick_region_values(self, resolved, region_name, endpoint_url):
- signing_region = region_name
- if endpoint_url is None:
- # Do not use the region name or signing name from the resolved
- # endpoint if the user explicitly provides an endpoint_url. This
- # would happen if we resolve to an endpoint where the service has
- # a "defaults" section that overrides all endpoint with a single
- # hostname and credentialScope. This has been the case historically
- # for how STS has worked. The only way to resolve an STS endpoint
- # was to provide a region_name and an endpoint_url. In that case,
- # we would still resolve an endpoint, but we would not use the
- # resolved endpointName or signingRegion because we want to allow
- # custom endpoints.
- region_name = resolved['endpointName']
- signing_region = region_name
- if 'credentialScope' in resolved \
- and 'region' in resolved['credentialScope']:
- signing_region = resolved['credentialScope']['region']
- return region_name, signing_region
-
- def _resolve_signature_version(self, service_name, resolved):
- configured_version = _get_configured_signature_version(
- service_name, self.client_config, self.scoped_config)
- if configured_version is not None:
- return configured_version
-
- # Pick a signature version from the endpoint metadata if present.
- if 'signatureVersions' in resolved:
- potential_versions = resolved['signatureVersions']
- if service_name == 's3':
- return 's3v4'
- if 'v4' in potential_versions:
- return 'v4'
- # Now just iterate over the signature versions in order until we
- # find the first one that is known to Botocore.
+ hostname = '{service}.dualstack.{region}.{dns_suffix}'.format(
+ service=service_name, region=region_name,
+ dns_suffix=dns_suffix)
+ # Dualstack supports http and https so were hardcoding this value for
+ # now. This can potentially move into the endpoints.json file.
+ return self._make_url(hostname, is_secure, ['http', 'https'])
+
+ def _assume_endpoint(self, service_name, region_name, endpoint_url,
+ is_secure):
+ if endpoint_url is None:
+ # Expand the default hostname URI template.
+ hostname = self.default_endpoint.format(
+ service=service_name, region=region_name)
+ endpoint_url = self._make_url(hostname, is_secure,
+ ['http', 'https'])
+ logger.debug('Assuming an endpoint for %s, %s: %s',
+ service_name, region_name, endpoint_url)
+ # We still want to allow the user to provide an explicit version.
+ signature_version = self._resolve_signature_version(
+ service_name, {'signatureVersions': ['v4']})
+ signing_name = self._resolve_signing_name(service_name, resolved={})
+ return self._create_result(
+ service_name=service_name, region_name=region_name,
+ signing_region=region_name, signing_name=signing_name,
+ signature_version=signature_version, endpoint_url=endpoint_url,
+ metadata={})
+
+ def _create_result(self, service_name, region_name, signing_region,
+ signing_name, endpoint_url, signature_version,
+ metadata):
+ return {
+ 'service_name': service_name,
+ 'region_name': region_name,
+ 'signing_region': signing_region,
+ 'signing_name': signing_name,
+ 'endpoint_url': endpoint_url,
+ 'signature_version': signature_version,
+ 'metadata': metadata
+ }
+
+ def _make_url(self, hostname, is_secure, supported_protocols):
+ if is_secure and 'https' in supported_protocols:
+ scheme = 'https'
+ else:
+ scheme = 'http'
+ return '%s://%s' % (scheme, hostname)
+
+ def _resolve_signing_name(self, service_name, resolved):
+ # CredentialScope overrides everything else.
+ if 'credentialScope' in resolved \
+ and 'service' in resolved['credentialScope']:
+ return resolved['credentialScope']['service']
+ # Use the signingName from the model if present.
+ if self.service_signing_name:
+ return self.service_signing_name
+ # Just assume is the same as the service name.
+ return service_name
+
+ def _pick_region_values(self, resolved, region_name, endpoint_url):
+ signing_region = region_name
+ if endpoint_url is None:
+ # Do not use the region name or signing name from the resolved
+ # endpoint if the user explicitly provides an endpoint_url. This
+ # would happen if we resolve to an endpoint where the service has
+ # a "defaults" section that overrides all endpoint with a single
+ # hostname and credentialScope. This has been the case historically
+ # for how STS has worked. The only way to resolve an STS endpoint
+ # was to provide a region_name and an endpoint_url. In that case,
+ # we would still resolve an endpoint, but we would not use the
+ # resolved endpointName or signingRegion because we want to allow
+ # custom endpoints.
+ region_name = resolved['endpointName']
+ signing_region = region_name
+ if 'credentialScope' in resolved \
+ and 'region' in resolved['credentialScope']:
+ signing_region = resolved['credentialScope']['region']
+ return region_name, signing_region
+
+ def _resolve_signature_version(self, service_name, resolved):
+ configured_version = _get_configured_signature_version(
+ service_name, self.client_config, self.scoped_config)
+ if configured_version is not None:
+ return configured_version
+
+ # Pick a signature version from the endpoint metadata if present.
+ if 'signatureVersions' in resolved:
+ potential_versions = resolved['signatureVersions']
+ if service_name == 's3':
+ return 's3v4'
+ if 'v4' in potential_versions:
+ return 'v4'
+ # Now just iterate over the signature versions in order until we
+ # find the first one that is known to Botocore.
for known in potential_versions:
if known in AUTH_TYPE_MAPS:
- return known
- raise UnknownSignatureVersionError(
- signature_version=resolved.get('signatureVersions'))
-
-
-class BaseClient(object):
-
- # This is actually reassigned with the py->op_name mapping
- # when the client creator creates the subclass. This value is used
- # because calls such as client.get_paginator('list_objects') use the
- # snake_case name, but we need to know the ListObjects form.
- # xform_name() does the ListObjects->list_objects conversion, but
- # we need the reverse mapping here.
- _PY_TO_OP_NAME = {}
-
- def __init__(self, serializer, endpoint, response_parser,
- event_emitter, request_signer, service_model, loader,
- client_config, partition, exceptions_factory):
- self._serializer = serializer
- self._endpoint = endpoint
- self._response_parser = response_parser
- self._request_signer = request_signer
- self._cache = {}
- self._loader = loader
- self._client_config = client_config
- self.meta = ClientMeta(event_emitter, self._client_config,
- endpoint.host, service_model,
- self._PY_TO_OP_NAME, partition)
- self._exceptions_factory = exceptions_factory
- self._exceptions = None
- self._register_handlers()
-
- def __getattr__(self, item):
+ return known
+ raise UnknownSignatureVersionError(
+ signature_version=resolved.get('signatureVersions'))
+
+
+class BaseClient(object):
+
+ # This is actually reassigned with the py->op_name mapping
+ # when the client creator creates the subclass. This value is used
+ # because calls such as client.get_paginator('list_objects') use the
+ # snake_case name, but we need to know the ListObjects form.
+ # xform_name() does the ListObjects->list_objects conversion, but
+ # we need the reverse mapping here.
+ _PY_TO_OP_NAME = {}
+
+ def __init__(self, serializer, endpoint, response_parser,
+ event_emitter, request_signer, service_model, loader,
+ client_config, partition, exceptions_factory):
+ self._serializer = serializer
+ self._endpoint = endpoint
+ self._response_parser = response_parser
+ self._request_signer = request_signer
+ self._cache = {}
+ self._loader = loader
+ self._client_config = client_config
+ self.meta = ClientMeta(event_emitter, self._client_config,
+ endpoint.host, service_model,
+ self._PY_TO_OP_NAME, partition)
+ self._exceptions_factory = exceptions_factory
+ self._exceptions = None
+ self._register_handlers()
+
+ def __getattr__(self, item):
event_name = 'getattr.%s.%s' % (
self._service_model.service_id.hyphenize(), item
)
- handler, event_response = self.meta.events.emit_until_response(
- event_name, client=self)
-
- if event_response is not None:
- return event_response
-
- raise AttributeError(
- "'%s' object has no attribute '%s'" % (
- self.__class__.__name__, item)
- )
-
- def _register_handlers(self):
- # Register the handler required to sign requests.
+ handler, event_response = self.meta.events.emit_until_response(
+ event_name, client=self)
+
+ if event_response is not None:
+ return event_response
+
+ raise AttributeError(
+ "'%s' object has no attribute '%s'" % (
+ self.__class__.__name__, item)
+ )
+
+ def _register_handlers(self):
+ # Register the handler required to sign requests.
service_id = self.meta.service_model.service_id.hyphenize()
self.meta.events.register(
'request-created.%s' % service_id,
self._request_signer.handler
)
-
- @property
- def _service_model(self):
- return self.meta.service_model
-
- def _make_api_call(self, operation_name, api_params):
- operation_model = self._service_model.operation_model(operation_name)
- service_name = self._service_model.service_name
- history_recorder.record('API_CALL', {
- 'service': service_name,
- 'operation': operation_name,
- 'params': api_params,
- })
- if operation_model.deprecated:
- logger.debug('Warning: %s.%s() is deprecated',
- service_name, operation_name)
- request_context = {
- 'client_region': self.meta.region_name,
- 'client_config': self.meta.config,
- 'has_streaming_input': operation_model.has_streaming_input,
- 'auth_type': operation_model.auth_type,
- }
- request_dict = self._convert_to_request_dict(
- api_params, operation_model, context=request_context)
-
+
+ @property
+ def _service_model(self):
+ return self.meta.service_model
+
+ def _make_api_call(self, operation_name, api_params):
+ operation_model = self._service_model.operation_model(operation_name)
+ service_name = self._service_model.service_name
+ history_recorder.record('API_CALL', {
+ 'service': service_name,
+ 'operation': operation_name,
+ 'params': api_params,
+ })
+ if operation_model.deprecated:
+ logger.debug('Warning: %s.%s() is deprecated',
+ service_name, operation_name)
+ request_context = {
+ 'client_region': self.meta.region_name,
+ 'client_config': self.meta.config,
+ 'has_streaming_input': operation_model.has_streaming_input,
+ 'auth_type': operation_model.auth_type,
+ }
+ request_dict = self._convert_to_request_dict(
+ api_params, operation_model, context=request_context)
+
service_id = self._service_model.service_id.hyphenize()
- handler, event_response = self.meta.events.emit_until_response(
+ handler, event_response = self.meta.events.emit_until_response(
'before-call.{service_id}.{operation_name}'.format(
service_id=service_id,
- operation_name=operation_name),
- model=operation_model, params=request_dict,
- request_signer=self._request_signer, context=request_context)
-
- if event_response is not None:
- http, parsed_response = event_response
- else:
+ operation_name=operation_name),
+ model=operation_model, params=request_dict,
+ request_signer=self._request_signer, context=request_context)
+
+ if event_response is not None:
+ http, parsed_response = event_response
+ else:
http, parsed_response = self._make_request(
operation_model, request_dict, request_context)
-
- self.meta.events.emit(
+
+ self.meta.events.emit(
'after-call.{service_id}.{operation_name}'.format(
service_id=service_id,
- operation_name=operation_name),
- http_response=http, parsed=parsed_response,
- model=operation_model, context=request_context
- )
-
- if http.status_code >= 300:
- error_code = parsed_response.get("Error", {}).get("Code")
- error_class = self.exceptions.from_code(error_code)
- raise error_class(parsed_response, operation_name)
- else:
- return parsed_response
-
+ operation_name=operation_name),
+ http_response=http, parsed=parsed_response,
+ model=operation_model, context=request_context
+ )
+
+ if http.status_code >= 300:
+ error_code = parsed_response.get("Error", {}).get("Code")
+ error_class = self.exceptions.from_code(error_code)
+ raise error_class(parsed_response, operation_name)
+ else:
+ return parsed_response
+
def _make_request(self, operation_model, request_dict, request_context):
try:
return self._endpoint.make_request(operation_model, request_dict)
@@ -718,8 +718,8 @@ class BaseClient(object):
)
raise
- def _convert_to_request_dict(self, api_params, operation_model,
- context=None):
+ def _convert_to_request_dict(self, api_params, operation_model,
+ context=None):
api_params = self._emit_api_params(
api_params, operation_model, context)
request_dict = self._serializer.serialize_to_request(
@@ -732,127 +732,127 @@ class BaseClient(object):
return request_dict
def _emit_api_params(self, api_params, operation_model, context):
- # Given the API params provided by the user and the operation_model
- # we can serialize the request to a request_dict.
- operation_name = operation_model.name
-
- # Emit an event that allows users to modify the parameters at the
- # beginning of the method. It allows handlers to modify existing
- # parameters or return a new set of parameters to use.
+ # Given the API params provided by the user and the operation_model
+ # we can serialize the request to a request_dict.
+ operation_name = operation_model.name
+
+ # Emit an event that allows users to modify the parameters at the
+ # beginning of the method. It allows handlers to modify existing
+ # parameters or return a new set of parameters to use.
service_id = self._service_model.service_id.hyphenize()
- responses = self.meta.events.emit(
+ responses = self.meta.events.emit(
'provide-client-params.{service_id}.{operation_name}'.format(
service_id=service_id,
- operation_name=operation_name),
- params=api_params, model=operation_model, context=context)
- api_params = first_non_none_response(responses, default=api_params)
-
- event_name = (
+ operation_name=operation_name),
+ params=api_params, model=operation_model, context=context)
+ api_params = first_non_none_response(responses, default=api_params)
+
+ event_name = (
'before-parameter-build.{service_id}.{operation_name}')
- self.meta.events.emit(
- event_name.format(
+ self.meta.events.emit(
+ event_name.format(
service_id=service_id,
- operation_name=operation_name),
- params=api_params, model=operation_model, context=context)
+ operation_name=operation_name),
+ params=api_params, model=operation_model, context=context)
return api_params
-
- def get_paginator(self, operation_name):
- """Create a paginator for an operation.
-
- :type operation_name: string
- :param operation_name: The operation name. This is the same name
- as the method name on the client. For example, if the
- method name is ``create_foo``, and you'd normally invoke the
- operation as ``client.create_foo(**kwargs)``, if the
- ``create_foo`` operation can be paginated, you can use the
- call ``client.get_paginator("create_foo")``.
-
- :raise OperationNotPageableError: Raised if the operation is not
- pageable. You can use the ``client.can_paginate`` method to
- check if an operation is pageable.
-
- :rtype: L{botocore.paginate.Paginator}
- :return: A paginator object.
-
- """
- if not self.can_paginate(operation_name):
- raise OperationNotPageableError(operation_name=operation_name)
- else:
- actual_operation_name = self._PY_TO_OP_NAME[operation_name]
-
- # Create a new paginate method that will serve as a proxy to
- # the underlying Paginator.paginate method. This is needed to
- # attach a docstring to the method.
- def paginate(self, **kwargs):
- return Paginator.paginate(self, **kwargs)
-
- paginator_config = self._cache['page_config'][
- actual_operation_name]
- # Add the docstring for the paginate method.
- paginate.__doc__ = PaginatorDocstring(
- paginator_name=actual_operation_name,
- event_emitter=self.meta.events,
- service_model=self.meta.service_model,
- paginator_config=paginator_config,
- include_signature=False
- )
-
- # Rename the paginator class based on the type of paginator.
- paginator_class_name = str('%s.Paginator.%s' % (
- get_service_module_name(self.meta.service_model),
- actual_operation_name))
-
- # Create the new paginator class
- documented_paginator_cls = type(
- paginator_class_name, (Paginator,), {'paginate': paginate})
-
- operation_model = self._service_model.operation_model(actual_operation_name)
- paginator = documented_paginator_cls(
- getattr(self, operation_name),
- paginator_config,
- operation_model)
- return paginator
-
- def can_paginate(self, operation_name):
- """Check if an operation can be paginated.
-
- :type operation_name: string
- :param operation_name: The operation name. This is the same name
- as the method name on the client. For example, if the
- method name is ``create_foo``, and you'd normally invoke the
- operation as ``client.create_foo(**kwargs)``, if the
- ``create_foo`` operation can be paginated, you can use the
- call ``client.get_paginator("create_foo")``.
-
- :return: ``True`` if the operation can be paginated,
- ``False`` otherwise.
-
- """
- if 'page_config' not in self._cache:
- try:
- page_config = self._loader.load_service_model(
- self._service_model.service_name,
- 'paginators-1',
- self._service_model.api_version)['pagination']
- self._cache['page_config'] = page_config
- except DataNotFoundError:
- self._cache['page_config'] = {}
- actual_operation_name = self._PY_TO_OP_NAME[operation_name]
- return actual_operation_name in self._cache['page_config']
-
- def _get_waiter_config(self):
- if 'waiter_config' not in self._cache:
- try:
- waiter_config = self._loader.load_service_model(
- self._service_model.service_name,
- 'waiters-2',
- self._service_model.api_version)
- self._cache['waiter_config'] = waiter_config
- except DataNotFoundError:
- self._cache['waiter_config'] = {}
- return self._cache['waiter_config']
-
- def get_waiter(self, waiter_name):
+
+ def get_paginator(self, operation_name):
+ """Create a paginator for an operation.
+
+ :type operation_name: string
+ :param operation_name: The operation name. This is the same name
+ as the method name on the client. For example, if the
+ method name is ``create_foo``, and you'd normally invoke the
+ operation as ``client.create_foo(**kwargs)``, if the
+ ``create_foo`` operation can be paginated, you can use the
+ call ``client.get_paginator("create_foo")``.
+
+ :raise OperationNotPageableError: Raised if the operation is not
+ pageable. You can use the ``client.can_paginate`` method to
+ check if an operation is pageable.
+
+ :rtype: L{botocore.paginate.Paginator}
+ :return: A paginator object.
+
+ """
+ if not self.can_paginate(operation_name):
+ raise OperationNotPageableError(operation_name=operation_name)
+ else:
+ actual_operation_name = self._PY_TO_OP_NAME[operation_name]
+
+ # Create a new paginate method that will serve as a proxy to
+ # the underlying Paginator.paginate method. This is needed to
+ # attach a docstring to the method.
+ def paginate(self, **kwargs):
+ return Paginator.paginate(self, **kwargs)
+
+ paginator_config = self._cache['page_config'][
+ actual_operation_name]
+ # Add the docstring for the paginate method.
+ paginate.__doc__ = PaginatorDocstring(
+ paginator_name=actual_operation_name,
+ event_emitter=self.meta.events,
+ service_model=self.meta.service_model,
+ paginator_config=paginator_config,
+ include_signature=False
+ )
+
+ # Rename the paginator class based on the type of paginator.
+ paginator_class_name = str('%s.Paginator.%s' % (
+ get_service_module_name(self.meta.service_model),
+ actual_operation_name))
+
+ # Create the new paginator class
+ documented_paginator_cls = type(
+ paginator_class_name, (Paginator,), {'paginate': paginate})
+
+ operation_model = self._service_model.operation_model(actual_operation_name)
+ paginator = documented_paginator_cls(
+ getattr(self, operation_name),
+ paginator_config,
+ operation_model)
+ return paginator
+
+ def can_paginate(self, operation_name):
+ """Check if an operation can be paginated.
+
+ :type operation_name: string
+ :param operation_name: The operation name. This is the same name
+ as the method name on the client. For example, if the
+ method name is ``create_foo``, and you'd normally invoke the
+ operation as ``client.create_foo(**kwargs)``, if the
+ ``create_foo`` operation can be paginated, you can use the
+ call ``client.get_paginator("create_foo")``.
+
+ :return: ``True`` if the operation can be paginated,
+ ``False`` otherwise.
+
+ """
+ if 'page_config' not in self._cache:
+ try:
+ page_config = self._loader.load_service_model(
+ self._service_model.service_name,
+ 'paginators-1',
+ self._service_model.api_version)['pagination']
+ self._cache['page_config'] = page_config
+ except DataNotFoundError:
+ self._cache['page_config'] = {}
+ actual_operation_name = self._PY_TO_OP_NAME[operation_name]
+ return actual_operation_name in self._cache['page_config']
+
+ def _get_waiter_config(self):
+ if 'waiter_config' not in self._cache:
+ try:
+ waiter_config = self._loader.load_service_model(
+ self._service_model.service_name,
+ 'waiters-2',
+ self._service_model.api_version)
+ self._cache['waiter_config'] = waiter_config
+ except DataNotFoundError:
+ self._cache['waiter_config'] = {}
+ return self._cache['waiter_config']
+
+ def get_waiter(self, waiter_name):
"""Returns an object that can wait for some condition.
:type waiter_name: str
@@ -862,111 +862,111 @@ class BaseClient(object):
:returns: The specified waiter object.
:rtype: botocore.waiter.Waiter
"""
- config = self._get_waiter_config()
- if not config:
- raise ValueError("Waiter does not exist: %s" % waiter_name)
- model = waiter.WaiterModel(config)
- mapping = {}
- for name in model.waiter_names:
- mapping[xform_name(name)] = name
- if waiter_name not in mapping:
- raise ValueError("Waiter does not exist: %s" % waiter_name)
-
- return waiter.create_waiter_with_client(
- mapping[waiter_name], model, self)
-
- @CachedProperty
- def waiter_names(self):
- """Returns a list of all available waiters."""
- config = self._get_waiter_config()
- if not config:
- return []
- model = waiter.WaiterModel(config)
- # Waiter configs is a dict, we just want the waiter names
- # which are the keys in the dict.
- return [xform_name(name) for name in model.waiter_names]
-
- @property
- def exceptions(self):
- if self._exceptions is None:
- self._exceptions = self._load_exceptions()
- return self._exceptions
-
- def _load_exceptions(self):
- return self._exceptions_factory.create_client_exceptions(
- self._service_model)
-
-
-class ClientMeta(object):
- """Holds additional client methods.
-
- This class holds additional information for clients. It exists for
- two reasons:
-
- * To give advanced functionality to clients
- * To namespace additional client attributes from the operation
- names which are mapped to methods at runtime. This avoids
- ever running into collisions with operation names.
-
- """
-
- def __init__(self, events, client_config, endpoint_url, service_model,
- method_to_api_mapping, partition):
- self.events = events
- self._client_config = client_config
- self._endpoint_url = endpoint_url
- self._service_model = service_model
- self._method_to_api_mapping = method_to_api_mapping
- self._partition = partition
-
- @property
- def service_model(self):
- return self._service_model
-
- @property
- def region_name(self):
- return self._client_config.region_name
-
- @property
- def endpoint_url(self):
- return self._endpoint_url
-
- @property
- def config(self):
- return self._client_config
-
- @property
- def method_to_api_mapping(self):
- return self._method_to_api_mapping
-
- @property
- def partition(self):
- return self._partition
-
-
-def _get_configured_signature_version(service_name, client_config,
- scoped_config):
- """
- Gets the manually configured signature version.
-
- :returns: the customer configured signature version, or None if no
- signature version was configured.
- """
- # Client config overrides everything.
- if client_config and client_config.signature_version is not None:
- return client_config.signature_version
-
- # Scoped config overrides picking from the endpoint metadata.
- if scoped_config is not None:
- # A given service may have service specific configuration in the
- # config file, so we need to check there as well.
- service_config = scoped_config.get(service_name)
- if service_config is not None and isinstance(service_config, dict):
- version = service_config.get('signature_version')
- if version:
- logger.debug(
- "Switching signature version for service %s "
- "to version %s based on config file override.",
- service_name, version)
- return version
- return None
+ config = self._get_waiter_config()
+ if not config:
+ raise ValueError("Waiter does not exist: %s" % waiter_name)
+ model = waiter.WaiterModel(config)
+ mapping = {}
+ for name in model.waiter_names:
+ mapping[xform_name(name)] = name
+ if waiter_name not in mapping:
+ raise ValueError("Waiter does not exist: %s" % waiter_name)
+
+ return waiter.create_waiter_with_client(
+ mapping[waiter_name], model, self)
+
+ @CachedProperty
+ def waiter_names(self):
+ """Returns a list of all available waiters."""
+ config = self._get_waiter_config()
+ if not config:
+ return []
+ model = waiter.WaiterModel(config)
+ # Waiter configs is a dict, we just want the waiter names
+ # which are the keys in the dict.
+ return [xform_name(name) for name in model.waiter_names]
+
+ @property
+ def exceptions(self):
+ if self._exceptions is None:
+ self._exceptions = self._load_exceptions()
+ return self._exceptions
+
+ def _load_exceptions(self):
+ return self._exceptions_factory.create_client_exceptions(
+ self._service_model)
+
+
+class ClientMeta(object):
+ """Holds additional client methods.
+
+ This class holds additional information for clients. It exists for
+ two reasons:
+
+ * To give advanced functionality to clients
+ * To namespace additional client attributes from the operation
+ names which are mapped to methods at runtime. This avoids
+ ever running into collisions with operation names.
+
+ """
+
+ def __init__(self, events, client_config, endpoint_url, service_model,
+ method_to_api_mapping, partition):
+ self.events = events
+ self._client_config = client_config
+ self._endpoint_url = endpoint_url
+ self._service_model = service_model
+ self._method_to_api_mapping = method_to_api_mapping
+ self._partition = partition
+
+ @property
+ def service_model(self):
+ return self._service_model
+
+ @property
+ def region_name(self):
+ return self._client_config.region_name
+
+ @property
+ def endpoint_url(self):
+ return self._endpoint_url
+
+ @property
+ def config(self):
+ return self._client_config
+
+ @property
+ def method_to_api_mapping(self):
+ return self._method_to_api_mapping
+
+ @property
+ def partition(self):
+ return self._partition
+
+
+def _get_configured_signature_version(service_name, client_config,
+ scoped_config):
+ """
+ Gets the manually configured signature version.
+
+ :returns: the customer configured signature version, or None if no
+ signature version was configured.
+ """
+ # Client config overrides everything.
+ if client_config and client_config.signature_version is not None:
+ return client_config.signature_version
+
+ # Scoped config overrides picking from the endpoint metadata.
+ if scoped_config is not None:
+ # A given service may have service specific configuration in the
+ # config file, so we need to check there as well.
+ service_config = scoped_config.get(service_name)
+ if service_config is not None and isinstance(service_config, dict):
+ version = service_config.get('signature_version')
+ if version:
+ logger.debug(
+ "Switching signature version for service %s "
+ "to version %s based on config file override.",
+ service_name, version)
+ return version
+ return None
diff --git a/contrib/python/botocore/botocore/compat.py b/contrib/python/botocore/botocore/compat.py
index 44188260fd..bdefb914d4 100644
--- a/contrib/python/botocore/botocore/compat.py
+++ b/contrib/python/botocore/botocore/compat.py
@@ -1,152 +1,152 @@
-# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-
-import copy
-import datetime
+# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+
+import copy
+import datetime
import sys
import inspect
import warnings
import hashlib
-import logging
-import shlex
+import logging
+import shlex
import os
-from math import floor
-
+from math import floor
+
import six
-from botocore.exceptions import MD5UnavailableError
+from botocore.exceptions import MD5UnavailableError
from dateutil.tz import tzlocal
from urllib3 import exceptions
-
-logger = logging.getLogger(__name__)
-
-
-if six.PY3:
+
+logger = logging.getLogger(__name__)
+
+
+if six.PY3:
from six.moves import http_client
-
- class HTTPHeaders(http_client.HTTPMessage):
- pass
-
- from urllib.parse import quote
- from urllib.parse import urlencode
- from urllib.parse import unquote
- from urllib.parse import unquote_plus
- from urllib.parse import urlparse
- from urllib.parse import urlsplit
- from urllib.parse import urlunsplit
- from urllib.parse import urljoin
- from urllib.parse import parse_qsl
- from urllib.parse import parse_qs
- from http.client import HTTPResponse
- from io import IOBase as _IOBase
- from base64 import encodebytes
- from email.utils import formatdate
- from itertools import zip_longest
- file_type = _IOBase
- zip = zip
-
- # In python3, unquote takes a str() object, url decodes it,
- # then takes the bytestring and decodes it to utf-8.
- # Python2 we'll have to do this ourself (see below).
- unquote_str = unquote_plus
-
- def set_socket_timeout(http_response, timeout):
- """Set the timeout of the socket from an HTTPResponse.
-
- :param http_response: An instance of ``httplib.HTTPResponse``
-
- """
- http_response._fp.fp.raw._sock.settimeout(timeout)
-
- def accepts_kwargs(func):
- # In python3.4.1, there's backwards incompatible
- # changes when using getargspec with functools.partials.
- return inspect.getfullargspec(func)[2]
-
- def ensure_unicode(s, encoding=None, errors=None):
- # NOOP in Python 3, because every string is already unicode
- return s
-
- def ensure_bytes(s, encoding='utf-8', errors='strict'):
- if isinstance(s, str):
- return s.encode(encoding, errors)
- if isinstance(s, bytes):
- return s
- raise ValueError("Expected str or bytes, received %s." % type(s))
-
-else:
- from urllib import quote
- from urllib import urlencode
- from urllib import unquote
- from urllib import unquote_plus
- from urlparse import urlparse
- from urlparse import urlsplit
- from urlparse import urlunsplit
- from urlparse import urljoin
- from urlparse import parse_qsl
- from urlparse import parse_qs
- from email.message import Message
- from email.Utils import formatdate
- file_type = file
- from itertools import izip as zip
- from itertools import izip_longest as zip_longest
- from httplib import HTTPResponse
- from base64 import encodestring as encodebytes
-
- class HTTPHeaders(Message):
-
- # The __iter__ method is not available in python2.x, so we have
- # to port the py3 version.
- def __iter__(self):
- for field, value in self._headers:
- yield field
-
- def unquote_str(value, encoding='utf-8'):
- # In python2, unquote() gives us a string back that has the urldecoded
- # bits, but not the unicode parts. We need to decode this manually.
- # unquote has special logic in which if it receives a unicode object it
- # will decode it to latin1. This is hard coded. To avoid this, we'll
- # encode the string with the passed in encoding before trying to
- # unquote it.
- byte_string = value.encode(encoding)
- return unquote_plus(byte_string).decode(encoding)
-
- def set_socket_timeout(http_response, timeout):
- """Set the timeout of the socket from an HTTPResponse.
-
- :param http_response: An instance of ``httplib.HTTPResponse``
-
- """
- http_response._fp.fp._sock.settimeout(timeout)
-
- def accepts_kwargs(func):
- return inspect.getargspec(func)[2]
-
- def ensure_unicode(s, encoding='utf-8', errors='strict'):
- if isinstance(s, six.text_type):
- return s
- return unicode(s, encoding, errors)
-
- def ensure_bytes(s, encoding='utf-8', errors='strict'):
- if isinstance(s, unicode):
- return s.encode(encoding, errors)
- if isinstance(s, str):
- return s
- raise ValueError("Expected str or unicode, received %s." % type(s))
-
-
+
+ class HTTPHeaders(http_client.HTTPMessage):
+ pass
+
+ from urllib.parse import quote
+ from urllib.parse import urlencode
+ from urllib.parse import unquote
+ from urllib.parse import unquote_plus
+ from urllib.parse import urlparse
+ from urllib.parse import urlsplit
+ from urllib.parse import urlunsplit
+ from urllib.parse import urljoin
+ from urllib.parse import parse_qsl
+ from urllib.parse import parse_qs
+ from http.client import HTTPResponse
+ from io import IOBase as _IOBase
+ from base64 import encodebytes
+ from email.utils import formatdate
+ from itertools import zip_longest
+ file_type = _IOBase
+ zip = zip
+
+ # In python3, unquote takes a str() object, url decodes it,
+ # then takes the bytestring and decodes it to utf-8.
+ # Python2 we'll have to do this ourself (see below).
+ unquote_str = unquote_plus
+
+ def set_socket_timeout(http_response, timeout):
+ """Set the timeout of the socket from an HTTPResponse.
+
+ :param http_response: An instance of ``httplib.HTTPResponse``
+
+ """
+ http_response._fp.fp.raw._sock.settimeout(timeout)
+
+ def accepts_kwargs(func):
+ # In python3.4.1, there's backwards incompatible
+ # changes when using getargspec with functools.partials.
+ return inspect.getfullargspec(func)[2]
+
+ def ensure_unicode(s, encoding=None, errors=None):
+ # NOOP in Python 3, because every string is already unicode
+ return s
+
+ def ensure_bytes(s, encoding='utf-8', errors='strict'):
+ if isinstance(s, str):
+ return s.encode(encoding, errors)
+ if isinstance(s, bytes):
+ return s
+ raise ValueError("Expected str or bytes, received %s." % type(s))
+
+else:
+ from urllib import quote
+ from urllib import urlencode
+ from urllib import unquote
+ from urllib import unquote_plus
+ from urlparse import urlparse
+ from urlparse import urlsplit
+ from urlparse import urlunsplit
+ from urlparse import urljoin
+ from urlparse import parse_qsl
+ from urlparse import parse_qs
+ from email.message import Message
+ from email.Utils import formatdate
+ file_type = file
+ from itertools import izip as zip
+ from itertools import izip_longest as zip_longest
+ from httplib import HTTPResponse
+ from base64 import encodestring as encodebytes
+
+ class HTTPHeaders(Message):
+
+ # The __iter__ method is not available in python2.x, so we have
+ # to port the py3 version.
+ def __iter__(self):
+ for field, value in self._headers:
+ yield field
+
+ def unquote_str(value, encoding='utf-8'):
+ # In python2, unquote() gives us a string back that has the urldecoded
+ # bits, but not the unicode parts. We need to decode this manually.
+ # unquote has special logic in which if it receives a unicode object it
+ # will decode it to latin1. This is hard coded. To avoid this, we'll
+ # encode the string with the passed in encoding before trying to
+ # unquote it.
+ byte_string = value.encode(encoding)
+ return unquote_plus(byte_string).decode(encoding)
+
+ def set_socket_timeout(http_response, timeout):
+ """Set the timeout of the socket from an HTTPResponse.
+
+ :param http_response: An instance of ``httplib.HTTPResponse``
+
+ """
+ http_response._fp.fp._sock.settimeout(timeout)
+
+ def accepts_kwargs(func):
+ return inspect.getargspec(func)[2]
+
+ def ensure_unicode(s, encoding='utf-8', errors='strict'):
+ if isinstance(s, six.text_type):
+ return s
+ return unicode(s, encoding, errors)
+
+ def ensure_bytes(s, encoding='utf-8', errors='strict'):
+ if isinstance(s, unicode):
+ return s.encode(encoding, errors)
+ if isinstance(s, str):
+ return s
+ raise ValueError("Expected str or unicode, received %s." % type(s))
+
+
from collections import OrderedDict
-
-
+
+
try:
import xml.etree.cElementTree as ETree
except ImportError:
@@ -154,185 +154,185 @@ except ImportError:
import xml.etree.ElementTree as ETree
XMLParseError = ETree.ParseError
import json
-
-
-def filter_ssl_warnings():
- # Ignore warnings related to SNI as it is not being used in validations.
- warnings.filterwarnings(
- 'ignore',
- message="A true SSLContext object is not available.*",
- category=exceptions.InsecurePlatformWarning,
- module=r".*urllib3\.util\.ssl_")
-
-
-@classmethod
-def from_dict(cls, d):
- new_instance = cls()
- for key, value in d.items():
- new_instance[key] = value
- return new_instance
-
-
-@classmethod
-def from_pairs(cls, pairs):
- new_instance = cls()
- for key, value in pairs:
- new_instance[key] = value
- return new_instance
-
-HTTPHeaders.from_dict = from_dict
-HTTPHeaders.from_pairs = from_pairs
-
-
-def copy_kwargs(kwargs):
- """
+
+
+def filter_ssl_warnings():
+ # Ignore warnings related to SNI as it is not being used in validations.
+ warnings.filterwarnings(
+ 'ignore',
+ message="A true SSLContext object is not available.*",
+ category=exceptions.InsecurePlatformWarning,
+ module=r".*urllib3\.util\.ssl_")
+
+
+@classmethod
+def from_dict(cls, d):
+ new_instance = cls()
+ for key, value in d.items():
+ new_instance[key] = value
+ return new_instance
+
+
+@classmethod
+def from_pairs(cls, pairs):
+ new_instance = cls()
+ for key, value in pairs:
+ new_instance[key] = value
+ return new_instance
+
+HTTPHeaders.from_dict = from_dict
+HTTPHeaders.from_pairs = from_pairs
+
+
+def copy_kwargs(kwargs):
+ """
This used to be a compat shim for 2.6 but is now just an alias.
- """
+ """
copy_kwargs = copy.copy(kwargs)
- return copy_kwargs
-
-
-def total_seconds(delta):
- """
- Returns the total seconds in a ``datetime.timedelta``.
-
+ return copy_kwargs
+
+
+def total_seconds(delta):
+ """
+ Returns the total seconds in a ``datetime.timedelta``.
+
This used to be a compat shim for 2.6 but is now just an alias.
-
- :param delta: The timedelta object
- :type delta: ``datetime.timedelta``
- """
+
+ :param delta: The timedelta object
+ :type delta: ``datetime.timedelta``
+ """
return delta.total_seconds()
-
-
-# Checks to see if md5 is available on this system. A given system might not
-# have access to it for various reasons, such as FIPS mode being enabled.
-try:
- hashlib.md5()
- MD5_AVAILABLE = True
-except ValueError:
- MD5_AVAILABLE = False
-
-
-def get_md5(*args, **kwargs):
- """
- Attempts to get an md5 hashing object.
-
- :param raise_error_if_unavailable: raise an error if md5 is unavailable on
- this system. If False, None will be returned if it is unavailable.
- :type raise_error_if_unavailable: bool
- :param args: Args to pass to the MD5 constructor
- :param kwargs: Key word arguments to pass to the MD5 constructor
- :return: An MD5 hashing object if available. If it is unavailable, None
- is returned if raise_error_if_unavailable is set to False.
- """
- if MD5_AVAILABLE:
- return hashlib.md5(*args, **kwargs)
- else:
- raise MD5UnavailableError()
-
-
-def compat_shell_split(s, platform=None):
- if platform is None:
- platform = sys.platform
-
- if platform == "win32":
- return _windows_shell_split(s)
- else:
- return shlex.split(s)
-
-
-def _windows_shell_split(s):
- """Splits up a windows command as the built-in command parser would.
-
- Windows has potentially bizarre rules depending on where you look. When
- spawning a process via the Windows C runtime (which is what python does
- when you call popen) the rules are as follows:
-
- https://docs.microsoft.com/en-us/cpp/cpp/parsing-cpp-command-line-arguments
-
- To summarize:
-
- * Only space and tab are valid delimiters
- * Double quotes are the only valid quotes
- * Backslash is interpreted literally unless it is part of a chain that
- leads up to a double quote. Then the backslashes escape the backslashes,
- and if there is an odd number the final backslash escapes the quote.
-
- :param s: The command string to split up into parts.
- :return: A list of command components.
- """
- if not s:
- return []
-
- components = []
- buff = []
- is_quoted = False
- num_backslashes = 0
- for character in s:
- if character == '\\':
- # We can't simply append backslashes because we don't know if
- # they are being used as escape characters or not. Instead we
- # keep track of how many we've encountered and handle them when
- # we encounter a different character.
- num_backslashes += 1
- elif character == '"':
- if num_backslashes > 0:
- # The backslashes are in a chain leading up to a double
- # quote, so they are escaping each other.
- buff.append('\\' * int(floor(num_backslashes / 2)))
- remainder = num_backslashes % 2
- num_backslashes = 0
- if remainder == 1:
- # The number of backslashes is uneven, so they are also
- # escaping the double quote, so it needs to be added to
- # the current component buffer.
- buff.append('"')
- continue
-
- # We've encountered a double quote that is not escaped,
- # so we toggle is_quoted.
- is_quoted = not is_quoted
-
- # If there are quotes, then we may want an empty string. To be
- # safe, we add an empty string to the buffer so that we make
- # sure it sticks around if there's nothing else between quotes.
- # If there is other stuff between quotes, the empty string will
- # disappear during the joining process.
- buff.append('')
- elif character in [' ', '\t'] and not is_quoted:
- # Since the backslashes aren't leading up to a quote, we put in
- # the exact number of backslashes.
- if num_backslashes > 0:
- buff.append('\\' * num_backslashes)
- num_backslashes = 0
-
- # Excess whitespace is ignored, so only add the components list
- # if there is anything in the buffer.
- if buff:
- components.append(''.join(buff))
- buff = []
- else:
- # Since the backslashes aren't leading up to a quote, we put in
- # the exact number of backslashes.
- if num_backslashes > 0:
- buff.append('\\' * num_backslashes)
- num_backslashes = 0
- buff.append(character)
-
- # Quotes must be terminated.
- if is_quoted:
- raise ValueError('No closing quotation in string: %s' % s)
-
- # There may be some leftover backslashes, so we need to add them in.
- # There's no quote so we add the exact number.
- if num_backslashes > 0:
- buff.append('\\' * num_backslashes)
-
- # Add the final component in if there is anything in the buffer.
- if buff:
- components.append(''.join(buff))
-
- return components
+
+
+# Checks to see if md5 is available on this system. A given system might not
+# have access to it for various reasons, such as FIPS mode being enabled.
+try:
+ hashlib.md5()
+ MD5_AVAILABLE = True
+except ValueError:
+ MD5_AVAILABLE = False
+
+
+def get_md5(*args, **kwargs):
+ """
+ Attempts to get an md5 hashing object.
+
+ :param raise_error_if_unavailable: raise an error if md5 is unavailable on
+ this system. If False, None will be returned if it is unavailable.
+ :type raise_error_if_unavailable: bool
+ :param args: Args to pass to the MD5 constructor
+ :param kwargs: Key word arguments to pass to the MD5 constructor
+ :return: An MD5 hashing object if available. If it is unavailable, None
+ is returned if raise_error_if_unavailable is set to False.
+ """
+ if MD5_AVAILABLE:
+ return hashlib.md5(*args, **kwargs)
+ else:
+ raise MD5UnavailableError()
+
+
+def compat_shell_split(s, platform=None):
+ if platform is None:
+ platform = sys.platform
+
+ if platform == "win32":
+ return _windows_shell_split(s)
+ else:
+ return shlex.split(s)
+
+
+def _windows_shell_split(s):
+ """Splits up a windows command as the built-in command parser would.
+
+ Windows has potentially bizarre rules depending on where you look. When
+ spawning a process via the Windows C runtime (which is what python does
+ when you call popen) the rules are as follows:
+
+ https://docs.microsoft.com/en-us/cpp/cpp/parsing-cpp-command-line-arguments
+
+ To summarize:
+
+ * Only space and tab are valid delimiters
+ * Double quotes are the only valid quotes
+ * Backslash is interpreted literally unless it is part of a chain that
+ leads up to a double quote. Then the backslashes escape the backslashes,
+ and if there is an odd number the final backslash escapes the quote.
+
+ :param s: The command string to split up into parts.
+ :return: A list of command components.
+ """
+ if not s:
+ return []
+
+ components = []
+ buff = []
+ is_quoted = False
+ num_backslashes = 0
+ for character in s:
+ if character == '\\':
+ # We can't simply append backslashes because we don't know if
+ # they are being used as escape characters or not. Instead we
+ # keep track of how many we've encountered and handle them when
+ # we encounter a different character.
+ num_backslashes += 1
+ elif character == '"':
+ if num_backslashes > 0:
+ # The backslashes are in a chain leading up to a double
+ # quote, so they are escaping each other.
+ buff.append('\\' * int(floor(num_backslashes / 2)))
+ remainder = num_backslashes % 2
+ num_backslashes = 0
+ if remainder == 1:
+ # The number of backslashes is uneven, so they are also
+ # escaping the double quote, so it needs to be added to
+ # the current component buffer.
+ buff.append('"')
+ continue
+
+ # We've encountered a double quote that is not escaped,
+ # so we toggle is_quoted.
+ is_quoted = not is_quoted
+
+ # If there are quotes, then we may want an empty string. To be
+ # safe, we add an empty string to the buffer so that we make
+ # sure it sticks around if there's nothing else between quotes.
+ # If there is other stuff between quotes, the empty string will
+ # disappear during the joining process.
+ buff.append('')
+ elif character in [' ', '\t'] and not is_quoted:
+ # Since the backslashes aren't leading up to a quote, we put in
+ # the exact number of backslashes.
+ if num_backslashes > 0:
+ buff.append('\\' * num_backslashes)
+ num_backslashes = 0
+
+ # Excess whitespace is ignored, so only add the components list
+ # if there is anything in the buffer.
+ if buff:
+ components.append(''.join(buff))
+ buff = []
+ else:
+ # Since the backslashes aren't leading up to a quote, we put in
+ # the exact number of backslashes.
+ if num_backslashes > 0:
+ buff.append('\\' * num_backslashes)
+ num_backslashes = 0
+ buff.append(character)
+
+ # Quotes must be terminated.
+ if is_quoted:
+ raise ValueError('No closing quotation in string: %s' % s)
+
+ # There may be some leftover backslashes, so we need to add them in.
+ # There's no quote so we add the exact number.
+ if num_backslashes > 0:
+ buff.append('\\' * num_backslashes)
+
+ # Add the final component in if there is anything in the buffer.
+ if buff:
+ components.append(''.join(buff))
+
+ return components
def get_tzinfo_options():
diff --git a/contrib/python/botocore/botocore/config.py b/contrib/python/botocore/botocore/config.py
index aa2ff7514f..13f58f7edb 100644
--- a/contrib/python/botocore/botocore/config.py
+++ b/contrib/python/botocore/botocore/config.py
@@ -1,68 +1,68 @@
-# Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-import copy
-from botocore.compat import OrderedDict
-
-from botocore.endpoint import DEFAULT_TIMEOUT, MAX_POOL_CONNECTIONS
-from botocore.exceptions import InvalidS3AddressingStyleError
-from botocore.exceptions import InvalidRetryConfigurationError
-from botocore.exceptions import InvalidMaxRetryAttemptsError
+# Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+import copy
+from botocore.compat import OrderedDict
+
+from botocore.endpoint import DEFAULT_TIMEOUT, MAX_POOL_CONNECTIONS
+from botocore.exceptions import InvalidS3AddressingStyleError
+from botocore.exceptions import InvalidRetryConfigurationError
+from botocore.exceptions import InvalidMaxRetryAttemptsError
from botocore.exceptions import InvalidRetryModeError
-
-
-class Config(object):
- """Advanced configuration for Botocore clients.
-
- :type region_name: str
- :param region_name: The region to use in instantiating the client
-
- :type signature_version: str
- :param signature_version: The signature version when signing requests.
-
- :type user_agent: str
- :param user_agent: The value to use in the User-Agent header.
-
- :type user_agent_extra: str
- :param user_agent_extra: The value to append to the current User-Agent
- header value.
-
+
+
+class Config(object):
+ """Advanced configuration for Botocore clients.
+
+ :type region_name: str
+ :param region_name: The region to use in instantiating the client
+
+ :type signature_version: str
+ :param signature_version: The signature version when signing requests.
+
+ :type user_agent: str
+ :param user_agent: The value to use in the User-Agent header.
+
+ :type user_agent_extra: str
+ :param user_agent_extra: The value to append to the current User-Agent
+ header value.
+
:type connect_timeout: float or int
- :param connect_timeout: The time in seconds till a timeout exception is
- thrown when attempting to make a connection. The default is 60
- seconds.
-
+ :param connect_timeout: The time in seconds till a timeout exception is
+ thrown when attempting to make a connection. The default is 60
+ seconds.
+
:type read_timeout: float or int
- :param read_timeout: The time in seconds till a timeout exception is
- thrown when attempting to read from a connection. The default is
- 60 seconds.
-
- :type parameter_validation: bool
- :param parameter_validation: Whether parameter validation should occur
- when serializing requests. The default is True. You can disable
- parameter validation for performance reasons. Otherwise, it's
- recommended to leave parameter validation enabled.
-
- :type max_pool_connections: int
- :param max_pool_connections: The maximum number of connections to
- keep in a connection pool. If this value is not set, the default
- value of 10 is used.
-
- :type proxies: dict
- :param proxies: A dictionary of proxy servers to use by protocol or
- endpoint, e.g.:
- {'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}.
- The proxies are used on each request.
-
+ :param read_timeout: The time in seconds till a timeout exception is
+ thrown when attempting to read from a connection. The default is
+ 60 seconds.
+
+ :type parameter_validation: bool
+ :param parameter_validation: Whether parameter validation should occur
+ when serializing requests. The default is True. You can disable
+ parameter validation for performance reasons. Otherwise, it's
+ recommended to leave parameter validation enabled.
+
+ :type max_pool_connections: int
+ :param max_pool_connections: The maximum number of connections to
+ keep in a connection pool. If this value is not set, the default
+ value of 10 is used.
+
+ :type proxies: dict
+ :param proxies: A dictionary of proxy servers to use by protocol or
+ endpoint, e.g.:
+ {'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}.
+ The proxies are used on each request.
+
:type proxies_config: dict
:param proxies_config: A dictionary of additional proxy configurations.
Valid keys are:
@@ -83,35 +83,35 @@ class Config(object):
URI. We strongly recommend you only use this option with
trusted or corporate proxies. Value must be boolean.
- :type s3: dict
- :param s3: A dictionary of s3 specific configurations.
- Valid keys are:
-
- * 'use_accelerate_endpoint' -- Refers to whether to use the S3
- Accelerate endpoint. The value must be a boolean. If True, the
- client will use the S3 Accelerate endpoint. If the S3 Accelerate
- endpoint is being used then the addressing style will always
- be virtual.
-
- * 'payload_signing_enabled' -- Refers to whether or not to SHA256
- sign sigv4 payloads. By default, this is disabled for streaming
- uploads (UploadPart and PutObject).
-
- * 'addressing_style' -- Refers to the style in which to address
- s3 endpoints. Values must be a string that equals:
-
- * auto -- Addressing style is chosen for user. Depending
- on the configuration of client, the endpoint may be addressed in
- the virtual or the path style. Note that this is the default
- behavior if no style is specified.
-
- * virtual -- Addressing style is always virtual. The name of the
- bucket must be DNS compatible or an exception will be thrown.
- Endpoints will be addressed as such: mybucket.s3.amazonaws.com
-
- * path -- Addressing style is always by path. Endpoints will be
- addressed as such: s3.amazonaws.com/mybucket
-
+ :type s3: dict
+ :param s3: A dictionary of s3 specific configurations.
+ Valid keys are:
+
+ * 'use_accelerate_endpoint' -- Refers to whether to use the S3
+ Accelerate endpoint. The value must be a boolean. If True, the
+ client will use the S3 Accelerate endpoint. If the S3 Accelerate
+ endpoint is being used then the addressing style will always
+ be virtual.
+
+ * 'payload_signing_enabled' -- Refers to whether or not to SHA256
+ sign sigv4 payloads. By default, this is disabled for streaming
+ uploads (UploadPart and PutObject).
+
+ * 'addressing_style' -- Refers to the style in which to address
+ s3 endpoints. Values must be a string that equals:
+
+ * auto -- Addressing style is chosen for user. Depending
+ on the configuration of client, the endpoint may be addressed in
+ the virtual or the path style. Note that this is the default
+ behavior if no style is specified.
+
+ * virtual -- Addressing style is always virtual. The name of the
+ bucket must be DNS compatible or an exception will be thrown.
+ Endpoints will be addressed as such: mybucket.s3.amazonaws.com
+
+ * path -- Addressing style is always by path. Endpoints will be
+ addressed as such: s3.amazonaws.com/mybucket
+
* 'us_east_1_regional_endpoint' - Refers to what S3 endpoint to use
when the region is configured to be us-east-1. Values must be a
string that equals:
@@ -124,10 +124,10 @@ class Config(object):
the configuration option is not specified.
- :type retries: dict
- :param retries: A dictionary for retry specific configurations.
- Valid keys are:
-
+ :type retries: dict
+ :param retries: A dictionary for retry specific configurations.
+ Valid keys are:
+
* 'total_max_attempts' -- An integer representing the maximum number of
total attempts that will be made on a single request. This includes
the initial request, so a value of 1 indicates that no requests
@@ -136,13 +136,13 @@ class Config(object):
``total_max_attempts`` is preferred over ``max_attempts`` because
it maps to the ``AWS_MAX_ATTEMPTS`` environment variable and
the ``max_attempts`` config file value.
- * 'max_attempts' -- An integer representing the maximum number of
- retry attempts that will be made on a single request. For
- example, setting this value to 2 will result in the request
- being retried at most two times after the initial request. Setting
- this value to 0 will result in no retries ever being attempted on
- the initial request. If not provided, the number of retries will
- default to whatever is modeled, which is typically four retries.
+ * 'max_attempts' -- An integer representing the maximum number of
+ retry attempts that will be made on a single request. For
+ example, setting this value to 2 will result in the request
+ being retried at most two times after the initial request. Setting
+ this value to 0 will result in no retries ever being attempted on
+ the initial request. If not provided, the number of retries will
+ default to whatever is modeled, which is typically four retries.
* 'mode' -- A string representing the type of retry mode botocore
should use. Valid values are:
* ``legacy`` - The pre-existing retry behavior.
@@ -169,93 +169,93 @@ class Config(object):
Setting this to False disables the injection of operation parameters
into the prefix of the hostname. This is useful for clients providing
custom endpoints that should not have their host prefix modified.
- """
- OPTION_DEFAULTS = OrderedDict([
- ('region_name', None),
- ('signature_version', None),
- ('user_agent', None),
- ('user_agent_extra', None),
- ('connect_timeout', DEFAULT_TIMEOUT),
- ('read_timeout', DEFAULT_TIMEOUT),
- ('parameter_validation', True),
- ('max_pool_connections', MAX_POOL_CONNECTIONS),
- ('proxies', None),
+ """
+ OPTION_DEFAULTS = OrderedDict([
+ ('region_name', None),
+ ('signature_version', None),
+ ('user_agent', None),
+ ('user_agent_extra', None),
+ ('connect_timeout', DEFAULT_TIMEOUT),
+ ('read_timeout', DEFAULT_TIMEOUT),
+ ('parameter_validation', True),
+ ('max_pool_connections', MAX_POOL_CONNECTIONS),
+ ('proxies', None),
('proxies_config', None),
- ('s3', None),
+ ('s3', None),
('retries', None),
('client_cert', None),
('inject_host_prefix', True),
('endpoint_discovery_enabled', None),
- ])
-
- def __init__(self, *args, **kwargs):
- self._user_provided_options = self._record_user_provided_options(
- args, kwargs)
-
- # Merge the user_provided options onto the default options
- config_vars = copy.copy(self.OPTION_DEFAULTS)
- config_vars.update(self._user_provided_options)
-
- # Set the attributes based on the config_vars
- for key, value in config_vars.items():
- setattr(self, key, value)
-
- # Validate the s3 options
- self._validate_s3_configuration(self.s3)
-
- self._validate_retry_configuration(self.retries)
-
- def _record_user_provided_options(self, args, kwargs):
- option_order = list(self.OPTION_DEFAULTS)
- user_provided_options = {}
-
- # Iterate through the kwargs passed through to the constructor and
- # map valid keys to the dictionary
- for key, value in kwargs.items():
- if key in self.OPTION_DEFAULTS:
- user_provided_options[key] = value
- # The key must exist in the available options
- else:
- raise TypeError(
- 'Got unexpected keyword argument \'%s\'' % key)
-
- # The number of args should not be longer than the allowed
- # options
- if len(args) > len(option_order):
- raise TypeError(
- 'Takes at most %s arguments (%s given)' % (
- len(option_order), len(args)))
-
- # Iterate through the args passed through to the constructor and map
- # them to appropriate keys.
- for i, arg in enumerate(args):
- # If it a kwarg was specified for the arg, then error out
- if option_order[i] in user_provided_options:
- raise TypeError(
- 'Got multiple values for keyword argument \'%s\'' % (
- option_order[i]))
- user_provided_options[option_order[i]] = arg
-
- return user_provided_options
-
- def _validate_s3_configuration(self, s3):
- if s3 is not None:
- addressing_style = s3.get('addressing_style')
- if addressing_style not in ['virtual', 'auto', 'path', None]:
- raise InvalidS3AddressingStyleError(
- s3_addressing_style=addressing_style)
-
- def _validate_retry_configuration(self, retries):
- if retries is not None:
+ ])
+
+ def __init__(self, *args, **kwargs):
+ self._user_provided_options = self._record_user_provided_options(
+ args, kwargs)
+
+ # Merge the user_provided options onto the default options
+ config_vars = copy.copy(self.OPTION_DEFAULTS)
+ config_vars.update(self._user_provided_options)
+
+ # Set the attributes based on the config_vars
+ for key, value in config_vars.items():
+ setattr(self, key, value)
+
+ # Validate the s3 options
+ self._validate_s3_configuration(self.s3)
+
+ self._validate_retry_configuration(self.retries)
+
+ def _record_user_provided_options(self, args, kwargs):
+ option_order = list(self.OPTION_DEFAULTS)
+ user_provided_options = {}
+
+ # Iterate through the kwargs passed through to the constructor and
+ # map valid keys to the dictionary
+ for key, value in kwargs.items():
+ if key in self.OPTION_DEFAULTS:
+ user_provided_options[key] = value
+ # The key must exist in the available options
+ else:
+ raise TypeError(
+ 'Got unexpected keyword argument \'%s\'' % key)
+
+ # The number of args should not be longer than the allowed
+ # options
+ if len(args) > len(option_order):
+ raise TypeError(
+ 'Takes at most %s arguments (%s given)' % (
+ len(option_order), len(args)))
+
+ # Iterate through the args passed through to the constructor and map
+ # them to appropriate keys.
+ for i, arg in enumerate(args):
+ # If it a kwarg was specified for the arg, then error out
+ if option_order[i] in user_provided_options:
+ raise TypeError(
+ 'Got multiple values for keyword argument \'%s\'' % (
+ option_order[i]))
+ user_provided_options[option_order[i]] = arg
+
+ return user_provided_options
+
+ def _validate_s3_configuration(self, s3):
+ if s3 is not None:
+ addressing_style = s3.get('addressing_style')
+ if addressing_style not in ['virtual', 'auto', 'path', None]:
+ raise InvalidS3AddressingStyleError(
+ s3_addressing_style=addressing_style)
+
+ def _validate_retry_configuration(self, retries):
+ if retries is not None:
for key, value in retries.items():
if key not in ['max_attempts', 'mode', 'total_max_attempts']:
- raise InvalidRetryConfigurationError(
- retry_config_option=key)
+ raise InvalidRetryConfigurationError(
+ retry_config_option=key)
if key == 'max_attempts' and value < 0:
- raise InvalidMaxRetryAttemptsError(
+ raise InvalidMaxRetryAttemptsError(
provided_max_attempts=value,
min_value=0,
- )
+ )
if key == 'total_max_attempts' and value < 1:
raise InvalidMaxRetryAttemptsError(
provided_max_attempts=value,
@@ -266,25 +266,25 @@ class Config(object):
raise InvalidRetryModeError(
provided_retry_mode=value
)
-
- def merge(self, other_config):
- """Merges the config object with another config object
-
- This will merge in all non-default values from the provided config
- and return a new config object
-
- :type other_config: botocore.config.Config
- :param other config: Another config object to merge with. The values
- in the provided config object will take precedence in the merging
-
- :returns: A config object built from the merged values of both
- config objects.
- """
- # Make a copy of the current attributes in the config object.
- config_options = copy.copy(self._user_provided_options)
-
- # Merge in the user provided options from the other config
- config_options.update(other_config._user_provided_options)
-
- # Return a new config object with the merged properties.
- return Config(**config_options)
+
+ def merge(self, other_config):
+ """Merges the config object with another config object
+
+ This will merge in all non-default values from the provided config
+ and return a new config object
+
+ :type other_config: botocore.config.Config
+ :param other config: Another config object to merge with. The values
+ in the provided config object will take precedence in the merging
+
+ :returns: A config object built from the merged values of both
+ config objects.
+ """
+ # Make a copy of the current attributes in the config object.
+ config_options = copy.copy(self._user_provided_options)
+
+ # Merge in the user provided options from the other config
+ config_options.update(other_config._user_provided_options)
+
+ # Return a new config object with the merged properties.
+ return Config(**config_options)
diff --git a/contrib/python/botocore/botocore/configloader.py b/contrib/python/botocore/botocore/configloader.py
index 9cb2483f44..5e0002ce48 100644
--- a/contrib/python/botocore/botocore/configloader.py
+++ b/contrib/python/botocore/botocore/configloader.py
@@ -1,272 +1,272 @@
-# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/
-# Copyright 2012-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-import os
-import shlex
-import copy
-import sys
-
-from botocore.compat import six
-
-import botocore.exceptions
-
-
-def multi_file_load_config(*filenames):
- """Load and combine multiple INI configs with profiles.
-
- This function will take a list of filesnames and return
- a single dictionary that represents the merging of the loaded
- config files.
-
- If any of the provided filenames does not exist, then that file
- is ignored. It is therefore ok to provide a list of filenames,
- some of which may not exist.
-
- Configuration files are **not** deep merged, only the top level
- keys are merged. The filenames should be passed in order of
- precedence. The first config file has precedence over the
- second config file, which has precedence over the third config file,
- etc. The only exception to this is that the "profiles" key is
- merged to combine profiles from multiple config files into a
- single profiles mapping. However, if a profile is defined in
- multiple config files, then the config file with the highest
- precedence is used. Profile values themselves are not merged.
- For example::
-
- FileA FileB FileC
- [foo] [foo] [bar]
- a=1 a=2 a=3
- b=2
-
- [bar] [baz] [profile a]
- a=2 a=3 region=e
-
- [profile a] [profile b] [profile c]
- region=c region=d region=f
-
- The final result of ``multi_file_load_config(FileA, FileB, FileC)``
- would be::
-
- {"foo": {"a": 1}, "bar": {"a": 2}, "baz": {"a": 3},
- "profiles": {"a": {"region": "c"}}, {"b": {"region": d"}},
- {"c": {"region": "f"}}}
-
- Note that the "foo" key comes from A, even though it's defined in both
- FileA and FileB. Because "foo" was defined in FileA first, then the values
- for "foo" from FileA are used and the values for "foo" from FileB are
- ignored. Also note where the profiles originate from. Profile "a"
- comes FileA, profile "b" comes from FileB, and profile "c" comes
- from FileC.
-
- """
- configs = []
- profiles = []
- for filename in filenames:
- try:
- loaded = load_config(filename)
- except botocore.exceptions.ConfigNotFound:
- continue
- profiles.append(loaded.pop('profiles'))
- configs.append(loaded)
- merged_config = _merge_list_of_dicts(configs)
- merged_profiles = _merge_list_of_dicts(profiles)
- merged_config['profiles'] = merged_profiles
- return merged_config
-
-
-def _merge_list_of_dicts(list_of_dicts):
- merged_dicts = {}
- for single_dict in list_of_dicts:
- for key, value in single_dict.items():
- if key not in merged_dicts:
- merged_dicts[key] = value
- return merged_dicts
-
-
-def load_config(config_filename):
- """Parse a INI config with profiles.
-
- This will parse an INI config file and map top level profiles
- into a top level "profile" key.
-
- If you want to parse an INI file and map all section names to
- top level keys, use ``raw_config_parse`` instead.
-
- """
- parsed = raw_config_parse(config_filename)
- return build_profile_map(parsed)
-
-
-def raw_config_parse(config_filename, parse_subsections=True):
- """Returns the parsed INI config contents.
-
- Each section name is a top level key.
-
- :param config_filename: The name of the INI file to parse
-
- :param parse_subsections: If True, parse indented blocks as
- subsections that represent their own configuration dictionary.
- For example, if the config file had the contents::
-
- s3 =
- signature_version = s3v4
- addressing_style = path
-
- The resulting ``raw_config_parse`` would be::
-
- {'s3': {'signature_version': 's3v4', 'addressing_style': 'path'}}
-
- If False, do not try to parse subsections and return the indented
- block as its literal value::
-
- {'s3': '\nsignature_version = s3v4\naddressing_style = path'}
-
- :returns: A dict with keys for each profile found in the config
- file and the value of each key being a dict containing name
- value pairs found in that profile.
-
- :raises: ConfigNotFound, ConfigParseError
- """
- config = {}
- path = config_filename
- if path is not None:
- path = os.path.expandvars(path)
- path = os.path.expanduser(path)
- if not os.path.isfile(path):
- raise botocore.exceptions.ConfigNotFound(path=_unicode_path(path))
- cp = six.moves.configparser.RawConfigParser()
- try:
- cp.read([path])
+# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/
+# Copyright 2012-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+import os
+import shlex
+import copy
+import sys
+
+from botocore.compat import six
+
+import botocore.exceptions
+
+
+def multi_file_load_config(*filenames):
+ """Load and combine multiple INI configs with profiles.
+
+ This function will take a list of filesnames and return
+ a single dictionary that represents the merging of the loaded
+ config files.
+
+ If any of the provided filenames does not exist, then that file
+ is ignored. It is therefore ok to provide a list of filenames,
+ some of which may not exist.
+
+ Configuration files are **not** deep merged, only the top level
+ keys are merged. The filenames should be passed in order of
+ precedence. The first config file has precedence over the
+ second config file, which has precedence over the third config file,
+ etc. The only exception to this is that the "profiles" key is
+ merged to combine profiles from multiple config files into a
+ single profiles mapping. However, if a profile is defined in
+ multiple config files, then the config file with the highest
+ precedence is used. Profile values themselves are not merged.
+ For example::
+
+ FileA FileB FileC
+ [foo] [foo] [bar]
+ a=1 a=2 a=3
+ b=2
+
+ [bar] [baz] [profile a]
+ a=2 a=3 region=e
+
+ [profile a] [profile b] [profile c]
+ region=c region=d region=f
+
+ The final result of ``multi_file_load_config(FileA, FileB, FileC)``
+ would be::
+
+ {"foo": {"a": 1}, "bar": {"a": 2}, "baz": {"a": 3},
+ "profiles": {"a": {"region": "c"}}, {"b": {"region": d"}},
+ {"c": {"region": "f"}}}
+
+ Note that the "foo" key comes from A, even though it's defined in both
+ FileA and FileB. Because "foo" was defined in FileA first, then the values
+ for "foo" from FileA are used and the values for "foo" from FileB are
+ ignored. Also note where the profiles originate from. Profile "a"
+ comes FileA, profile "b" comes from FileB, and profile "c" comes
+ from FileC.
+
+ """
+ configs = []
+ profiles = []
+ for filename in filenames:
+ try:
+ loaded = load_config(filename)
+ except botocore.exceptions.ConfigNotFound:
+ continue
+ profiles.append(loaded.pop('profiles'))
+ configs.append(loaded)
+ merged_config = _merge_list_of_dicts(configs)
+ merged_profiles = _merge_list_of_dicts(profiles)
+ merged_config['profiles'] = merged_profiles
+ return merged_config
+
+
+def _merge_list_of_dicts(list_of_dicts):
+ merged_dicts = {}
+ for single_dict in list_of_dicts:
+ for key, value in single_dict.items():
+ if key not in merged_dicts:
+ merged_dicts[key] = value
+ return merged_dicts
+
+
+def load_config(config_filename):
+ """Parse a INI config with profiles.
+
+ This will parse an INI config file and map top level profiles
+ into a top level "profile" key.
+
+ If you want to parse an INI file and map all section names to
+ top level keys, use ``raw_config_parse`` instead.
+
+ """
+ parsed = raw_config_parse(config_filename)
+ return build_profile_map(parsed)
+
+
+def raw_config_parse(config_filename, parse_subsections=True):
+ """Returns the parsed INI config contents.
+
+ Each section name is a top level key.
+
+ :param config_filename: The name of the INI file to parse
+
+ :param parse_subsections: If True, parse indented blocks as
+ subsections that represent their own configuration dictionary.
+ For example, if the config file had the contents::
+
+ s3 =
+ signature_version = s3v4
+ addressing_style = path
+
+ The resulting ``raw_config_parse`` would be::
+
+ {'s3': {'signature_version': 's3v4', 'addressing_style': 'path'}}
+
+ If False, do not try to parse subsections and return the indented
+ block as its literal value::
+
+ {'s3': '\nsignature_version = s3v4\naddressing_style = path'}
+
+ :returns: A dict with keys for each profile found in the config
+ file and the value of each key being a dict containing name
+ value pairs found in that profile.
+
+ :raises: ConfigNotFound, ConfigParseError
+ """
+ config = {}
+ path = config_filename
+ if path is not None:
+ path = os.path.expandvars(path)
+ path = os.path.expanduser(path)
+ if not os.path.isfile(path):
+ raise botocore.exceptions.ConfigNotFound(path=_unicode_path(path))
+ cp = six.moves.configparser.RawConfigParser()
+ try:
+ cp.read([path])
except (six.moves.configparser.Error, UnicodeDecodeError):
- raise botocore.exceptions.ConfigParseError(
- path=_unicode_path(path))
- else:
- for section in cp.sections():
- config[section] = {}
- for option in cp.options(section):
- config_value = cp.get(section, option)
- if parse_subsections and config_value.startswith('\n'):
- # Then we need to parse the inner contents as
- # hierarchical. We support a single level
- # of nesting for now.
- try:
- config_value = _parse_nested(config_value)
- except ValueError:
- raise botocore.exceptions.ConfigParseError(
- path=_unicode_path(path))
- config[section][option] = config_value
- return config
-
-
-def _unicode_path(path):
- if isinstance(path, six.text_type):
- return path
+ raise botocore.exceptions.ConfigParseError(
+ path=_unicode_path(path))
+ else:
+ for section in cp.sections():
+ config[section] = {}
+ for option in cp.options(section):
+ config_value = cp.get(section, option)
+ if parse_subsections and config_value.startswith('\n'):
+ # Then we need to parse the inner contents as
+ # hierarchical. We support a single level
+ # of nesting for now.
+ try:
+ config_value = _parse_nested(config_value)
+ except ValueError:
+ raise botocore.exceptions.ConfigParseError(
+ path=_unicode_path(path))
+ config[section][option] = config_value
+ return config
+
+
+def _unicode_path(path):
+ if isinstance(path, six.text_type):
+ return path
# According to the documentation getfilesystemencoding can return None
# on unix in which case the default encoding is used instead.
filesystem_encoding = sys.getfilesystemencoding()
if filesystem_encoding is None:
filesystem_encoding = sys.getdefaultencoding()
return path.decode(filesystem_encoding, 'replace')
-
-
-def _parse_nested(config_value):
- # Given a value like this:
- # \n
- # foo = bar
- # bar = baz
- # We need to parse this into
- # {'foo': 'bar', 'bar': 'baz}
- parsed = {}
- for line in config_value.splitlines():
- line = line.strip()
- if not line:
- continue
- # The caller will catch ValueError
- # and raise an appropriate error
- # if this fails.
- key, value = line.split('=', 1)
- parsed[key.strip()] = value.strip()
- return parsed
-
-
-def build_profile_map(parsed_ini_config):
- """Convert the parsed INI config into a profile map.
-
- The config file format requires that every profile except the
- default to be prepended with "profile", e.g.::
-
- [profile test]
- aws_... = foo
- aws_... = bar
-
- [profile bar]
- aws_... = foo
- aws_... = bar
-
- # This is *not* a profile
- [preview]
- otherstuff = 1
-
- # Neither is this
- [foobar]
- morestuff = 2
-
- The build_profile_map will take a parsed INI config file where each top
- level key represents a section name, and convert into a format where all
- the profiles are under a single top level "profiles" key, and each key in
- the sub dictionary is a profile name. For example, the above config file
- would be converted from::
-
- {"profile test": {"aws_...": "foo", "aws...": "bar"},
- "profile bar": {"aws...": "foo", "aws...": "bar"},
- "preview": {"otherstuff": ...},
- "foobar": {"morestuff": ...},
- }
-
- into::
-
- {"profiles": {"test": {"aws_...": "foo", "aws...": "bar"},
- "bar": {"aws...": "foo", "aws...": "bar"},
- "preview": {"otherstuff": ...},
- "foobar": {"morestuff": ...},
- }
-
- If there are no profiles in the provided parsed INI contents, then
- an empty dict will be the value associated with the ``profiles`` key.
-
- .. note::
-
- This will not mutate the passed in parsed_ini_config. Instead it will
- make a deepcopy and return that value.
-
- """
- parsed_config = copy.deepcopy(parsed_ini_config)
- profiles = {}
- final_config = {}
- for key, values in parsed_config.items():
- if key.startswith("profile"):
- try:
- parts = shlex.split(key)
- except ValueError:
- continue
- if len(parts) == 2:
- profiles[parts[1]] = values
- elif key == 'default':
- # default section is special and is considered a profile
- # name but we don't require you use 'profile "default"'
- # as a section.
- profiles[key] = values
- else:
- final_config[key] = values
- final_config['profiles'] = profiles
- return final_config
+
+
+def _parse_nested(config_value):
+ # Given a value like this:
+ # \n
+ # foo = bar
+ # bar = baz
+ # We need to parse this into
+ # {'foo': 'bar', 'bar': 'baz}
+ parsed = {}
+ for line in config_value.splitlines():
+ line = line.strip()
+ if not line:
+ continue
+ # The caller will catch ValueError
+ # and raise an appropriate error
+ # if this fails.
+ key, value = line.split('=', 1)
+ parsed[key.strip()] = value.strip()
+ return parsed
+
+
+def build_profile_map(parsed_ini_config):
+ """Convert the parsed INI config into a profile map.
+
+ The config file format requires that every profile except the
+ default to be prepended with "profile", e.g.::
+
+ [profile test]
+ aws_... = foo
+ aws_... = bar
+
+ [profile bar]
+ aws_... = foo
+ aws_... = bar
+
+ # This is *not* a profile
+ [preview]
+ otherstuff = 1
+
+ # Neither is this
+ [foobar]
+ morestuff = 2
+
+ The build_profile_map will take a parsed INI config file where each top
+ level key represents a section name, and convert into a format where all
+ the profiles are under a single top level "profiles" key, and each key in
+ the sub dictionary is a profile name. For example, the above config file
+ would be converted from::
+
+ {"profile test": {"aws_...": "foo", "aws...": "bar"},
+ "profile bar": {"aws...": "foo", "aws...": "bar"},
+ "preview": {"otherstuff": ...},
+ "foobar": {"morestuff": ...},
+ }
+
+ into::
+
+ {"profiles": {"test": {"aws_...": "foo", "aws...": "bar"},
+ "bar": {"aws...": "foo", "aws...": "bar"},
+ "preview": {"otherstuff": ...},
+ "foobar": {"morestuff": ...},
+ }
+
+ If there are no profiles in the provided parsed INI contents, then
+ an empty dict will be the value associated with the ``profiles`` key.
+
+ .. note::
+
+ This will not mutate the passed in parsed_ini_config. Instead it will
+ make a deepcopy and return that value.
+
+ """
+ parsed_config = copy.deepcopy(parsed_ini_config)
+ profiles = {}
+ final_config = {}
+ for key, values in parsed_config.items():
+ if key.startswith("profile"):
+ try:
+ parts = shlex.split(key)
+ except ValueError:
+ continue
+ if len(parts) == 2:
+ profiles[parts[1]] = values
+ elif key == 'default':
+ # default section is special and is considered a profile
+ # name but we don't require you use 'profile "default"'
+ # as a section.
+ profiles[key] = values
+ else:
+ final_config[key] = values
+ final_config['profiles'] = profiles
+ return final_config
diff --git a/contrib/python/botocore/botocore/credentials.py b/contrib/python/botocore/botocore/credentials.py
index d3ab487d46..1b0eb8202c 100644
--- a/contrib/python/botocore/botocore/credentials.py
+++ b/contrib/python/botocore/botocore/credentials.py
@@ -1,68 +1,68 @@
-# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/
-# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-import time
-import datetime
-import logging
-import os
-import getpass
-import threading
+# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/
+# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+import time
+import datetime
+import logging
+import os
+import getpass
+import threading
import json
-import subprocess
-from collections import namedtuple
-from copy import deepcopy
-from hashlib import sha1
-
-from dateutil.parser import parse
+import subprocess
+from collections import namedtuple
+from copy import deepcopy
+from hashlib import sha1
+
+from dateutil.parser import parse
from dateutil.tz import tzlocal, tzutc
-
-import botocore.configloader
-import botocore.compat
+
+import botocore.configloader
+import botocore.compat
from botocore import UNSIGNED
-from botocore.compat import total_seconds
-from botocore.compat import compat_shell_split
+from botocore.compat import total_seconds
+from botocore.compat import compat_shell_split
from botocore.config import Config
-from botocore.exceptions import UnknownCredentialError
-from botocore.exceptions import PartialCredentialsError
-from botocore.exceptions import ConfigNotFound
-from botocore.exceptions import InvalidConfigError
-from botocore.exceptions import InfiniteLoopConfigError
-from botocore.exceptions import RefreshWithMFAUnsupportedError
-from botocore.exceptions import MetadataRetrievalError
-from botocore.exceptions import CredentialRetrievalError
+from botocore.exceptions import UnknownCredentialError
+from botocore.exceptions import PartialCredentialsError
+from botocore.exceptions import ConfigNotFound
+from botocore.exceptions import InvalidConfigError
+from botocore.exceptions import InfiniteLoopConfigError
+from botocore.exceptions import RefreshWithMFAUnsupportedError
+from botocore.exceptions import MetadataRetrievalError
+from botocore.exceptions import CredentialRetrievalError
from botocore.exceptions import UnauthorizedSSOTokenError
-from botocore.utils import InstanceMetadataFetcher, parse_key_val_file
-from botocore.utils import ContainerMetadataFetcher
+from botocore.utils import InstanceMetadataFetcher, parse_key_val_file
+from botocore.utils import ContainerMetadataFetcher
from botocore.utils import FileWebIdentityTokenLoader
from botocore.utils import SSOTokenLoader
-
-
-logger = logging.getLogger(__name__)
-ReadOnlyCredentials = namedtuple('ReadOnlyCredentials',
- ['access_key', 'secret_key', 'token'])
-
-
+
+
+logger = logging.getLogger(__name__)
+ReadOnlyCredentials = namedtuple('ReadOnlyCredentials',
+ ['access_key', 'secret_key', 'token'])
+
+
def create_credential_resolver(session, cache=None, region_name=None):
- """Create a default credential resolver.
-
- This creates a pre-configured credential resolver
- that includes the default lookup chain for
- credentials.
-
- """
- profile_name = session.get_config_variable('profile') or 'default'
- metadata_timeout = session.get_config_variable('metadata_service_timeout')
- num_attempts = session.get_config_variable('metadata_service_num_attempts')
+ """Create a default credential resolver.
+
+ This creates a pre-configured credential resolver
+ that includes the default lookup chain for
+ credentials.
+
+ """
+ profile_name = session.get_config_variable('profile') or 'default'
+ metadata_timeout = session.get_config_variable('metadata_service_timeout')
+ num_attempts = session.get_config_variable('metadata_service_num_attempts')
disable_env_vars = session.instance_variables().get('profile') is not None
imds_config = {
@@ -73,70 +73,70 @@ def create_credential_resolver(session, cache=None, region_name=None):
if cache is None:
cache = {}
-
- env_provider = EnvProvider()
- container_provider = ContainerProvider()
- instance_metadata_provider = InstanceMetadataProvider(
- iam_role_fetcher=InstanceMetadataFetcher(
- timeout=metadata_timeout,
+
+ env_provider = EnvProvider()
+ container_provider = ContainerProvider()
+ instance_metadata_provider = InstanceMetadataProvider(
+ iam_role_fetcher=InstanceMetadataFetcher(
+ timeout=metadata_timeout,
num_attempts=num_attempts,
user_agent=session.user_agent(),
config=imds_config)
- )
+ )
profile_provider_builder = ProfileProviderBuilder(
session, cache=cache, region_name=region_name)
- assume_role_provider = AssumeRoleProvider(
- load_config=lambda: session.full_config,
+ assume_role_provider = AssumeRoleProvider(
+ load_config=lambda: session.full_config,
client_creator=_get_client_creator(session, region_name),
cache=cache,
- profile_name=profile_name,
- credential_sourcer=CanonicalNameCredentialSourcer([
- env_provider, container_provider, instance_metadata_provider
+ profile_name=profile_name,
+ credential_sourcer=CanonicalNameCredentialSourcer([
+ env_provider, container_provider, instance_metadata_provider
]),
profile_provider_builder=profile_provider_builder,
- )
+ )
pre_profile = [
- env_provider,
- assume_role_provider,
+ env_provider,
+ assume_role_provider,
]
profile_providers = profile_provider_builder.providers(
profile_name=profile_name,
disable_env_vars=disable_env_vars,
)
post_profile = [
- OriginalEC2Provider(),
- BotoProvider(),
- container_provider,
+ OriginalEC2Provider(),
+ BotoProvider(),
+ container_provider,
instance_metadata_provider,
- ]
+ ]
providers = pre_profile + profile_providers + post_profile
if disable_env_vars:
- # An explicitly provided profile will negate an EnvProvider.
- # We will defer to providers that understand the "profile"
- # concept to retrieve credentials.
- # The one edge case if is all three values are provided via
- # env vars:
- # export AWS_ACCESS_KEY_ID=foo
- # export AWS_SECRET_ACCESS_KEY=bar
- # export AWS_PROFILE=baz
- # Then, just like our client() calls, the explicit credentials
- # will take precedence.
- #
- # This precedence is enforced by leaving the EnvProvider in the chain.
- # This means that the only way a "profile" would win is if the
- # EnvProvider does not return credentials, which is what we want
- # in this scenario.
- providers.remove(env_provider)
- logger.debug('Skipping environment variable credential check'
- ' because profile name was explicitly set.')
-
- resolver = CredentialResolver(providers=providers)
- return resolver
-
-
+ # An explicitly provided profile will negate an EnvProvider.
+ # We will defer to providers that understand the "profile"
+ # concept to retrieve credentials.
+ # The one edge case if is all three values are provided via
+ # env vars:
+ # export AWS_ACCESS_KEY_ID=foo
+ # export AWS_SECRET_ACCESS_KEY=bar
+ # export AWS_PROFILE=baz
+ # Then, just like our client() calls, the explicit credentials
+ # will take precedence.
+ #
+ # This precedence is enforced by leaving the EnvProvider in the chain.
+ # This means that the only way a "profile" would win is if the
+ # EnvProvider does not return credentials, which is what we want
+ # in this scenario.
+ providers.remove(env_provider)
+ logger.debug('Skipping environment variable credential check'
+ ' because profile name was explicitly set.')
+
+ resolver = CredentialResolver(providers=providers)
+ return resolver
+
+
class ProfileProviderBuilder(object):
"""This class handles the creation of profile based providers.
@@ -205,29 +205,29 @@ class ProfileProviderBuilder(object):
)
-def get_credentials(session):
- resolver = create_credential_resolver(session)
- return resolver.load_credentials()
-
-
-def _local_now():
- return datetime.datetime.now(tzlocal())
-
-
-def _parse_if_needed(value):
- if isinstance(value, datetime.datetime):
- return value
- return parse(value)
-
-
-def _serialize_if_needed(value, iso=False):
- if isinstance(value, datetime.datetime):
- if iso:
- return value.isoformat()
- return value.strftime('%Y-%m-%dT%H:%M:%S%Z')
- return value
-
-
+def get_credentials(session):
+ resolver = create_credential_resolver(session)
+ return resolver.load_credentials()
+
+
+def _local_now():
+ return datetime.datetime.now(tzlocal())
+
+
+def _parse_if_needed(value):
+ if isinstance(value, datetime.datetime):
+ return value
+ return parse(value)
+
+
+def _serialize_if_needed(value, iso=False):
+ if isinstance(value, datetime.datetime):
+ if iso:
+ return value.isoformat()
+ return value.strftime('%Y-%m-%dT%H:%M:%S%Z')
+ return value
+
+
def _get_client_creator(session, region_name):
def client_creator(service_name, **kwargs):
create_client_kwargs = {
@@ -239,40 +239,40 @@ def _get_client_creator(session, region_name):
return client_creator
-def create_assume_role_refresher(client, params):
- def refresh():
- response = client.assume_role(**params)
- credentials = response['Credentials']
- # We need to normalize the credential names to
- # the values expected by the refresh creds.
- return {
- 'access_key': credentials['AccessKeyId'],
- 'secret_key': credentials['SecretAccessKey'],
- 'token': credentials['SessionToken'],
- 'expiry_time': _serialize_if_needed(credentials['Expiration']),
- }
- return refresh
-
-
-def create_mfa_serial_refresher(actual_refresh):
-
- class _Refresher(object):
- def __init__(self, refresh):
- self._refresh = refresh
- self._has_been_called = False
-
- def __call__(self):
- if self._has_been_called:
- # We can explore an option in the future to support
- # reprompting for MFA, but for now we just error out
- # when the temp creds expire.
- raise RefreshWithMFAUnsupportedError()
- self._has_been_called = True
- return self._refresh()
-
- return _Refresher(actual_refresh)
-
-
+def create_assume_role_refresher(client, params):
+ def refresh():
+ response = client.assume_role(**params)
+ credentials = response['Credentials']
+ # We need to normalize the credential names to
+ # the values expected by the refresh creds.
+ return {
+ 'access_key': credentials['AccessKeyId'],
+ 'secret_key': credentials['SecretAccessKey'],
+ 'token': credentials['SessionToken'],
+ 'expiry_time': _serialize_if_needed(credentials['Expiration']),
+ }
+ return refresh
+
+
+def create_mfa_serial_refresher(actual_refresh):
+
+ class _Refresher(object):
+ def __init__(self, refresh):
+ self._refresh = refresh
+ self._has_been_called = False
+
+ def __call__(self):
+ if self._has_been_called:
+ # We can explore an option in the future to support
+ # reprompting for MFA, but for now we just error out
+ # when the temp creds expire.
+ raise RefreshWithMFAUnsupportedError()
+ self._has_been_called = True
+ return self._refresh()
+
+ return _Refresher(actual_refresh)
+
+
class JSONFileCache(object):
"""JSON file cache.
This provides a dict like interface that stores JSON serializable
@@ -324,242 +324,242 @@ class JSONFileCache(object):
return full_path
-class Credentials(object):
- """
- Holds the credentials needed to authenticate requests.
-
- :ivar access_key: The access key part of the credentials.
- :ivar secret_key: The secret key part of the credentials.
- :ivar token: The security token, valid only for session credentials.
- :ivar method: A string which identifies where the credentials
- were found.
- """
-
- def __init__(self, access_key, secret_key, token=None,
- method=None):
- self.access_key = access_key
- self.secret_key = secret_key
- self.token = token
-
- if method is None:
- method = 'explicit'
- self.method = method
-
- self._normalize()
-
- def _normalize(self):
- # Keys would sometimes (accidentally) contain non-ascii characters.
- # It would cause a confusing UnicodeDecodeError in Python 2.
- # We explicitly convert them into unicode to avoid such error.
- #
- # Eventually the service will decide whether to accept the credential.
- # This also complies with the behavior in Python 3.
- self.access_key = botocore.compat.ensure_unicode(self.access_key)
- self.secret_key = botocore.compat.ensure_unicode(self.secret_key)
-
- def get_frozen_credentials(self):
- return ReadOnlyCredentials(self.access_key,
- self.secret_key,
- self.token)
-
-
-class RefreshableCredentials(Credentials):
- """
- Holds the credentials needed to authenticate requests. In addition, it
- knows how to refresh itself.
-
- :ivar access_key: The access key part of the credentials.
- :ivar secret_key: The secret key part of the credentials.
- :ivar token: The security token, valid only for session credentials.
- :ivar method: A string which identifies where the credentials
- were found.
- """
- # The time at which we'll attempt to refresh, but not
- # block if someone else is refreshing.
- _advisory_refresh_timeout = 15 * 60
- # The time at which all threads will block waiting for
- # refreshed credentials.
- _mandatory_refresh_timeout = 10 * 60
-
- def __init__(self, access_key, secret_key, token,
- expiry_time, refresh_using, method,
- time_fetcher=_local_now):
- self._refresh_using = refresh_using
- self._access_key = access_key
- self._secret_key = secret_key
- self._token = token
- self._expiry_time = expiry_time
- self._time_fetcher = time_fetcher
- self._refresh_lock = threading.Lock()
- self.method = method
- self._frozen_credentials = ReadOnlyCredentials(
- access_key, secret_key, token)
- self._normalize()
-
- def _normalize(self):
- self._access_key = botocore.compat.ensure_unicode(self._access_key)
- self._secret_key = botocore.compat.ensure_unicode(self._secret_key)
-
- @classmethod
- def create_from_metadata(cls, metadata, refresh_using, method):
- instance = cls(
- access_key=metadata['access_key'],
- secret_key=metadata['secret_key'],
- token=metadata['token'],
- expiry_time=cls._expiry_datetime(metadata['expiry_time']),
- method=method,
- refresh_using=refresh_using
- )
- return instance
-
- @property
- def access_key(self):
- """Warning: Using this property can lead to race conditions if you
- access another property subsequently along the refresh boundary.
- Please use get_frozen_credentials instead.
- """
- self._refresh()
- return self._access_key
-
- @access_key.setter
- def access_key(self, value):
- self._access_key = value
-
- @property
- def secret_key(self):
- """Warning: Using this property can lead to race conditions if you
- access another property subsequently along the refresh boundary.
- Please use get_frozen_credentials instead.
- """
- self._refresh()
- return self._secret_key
-
- @secret_key.setter
- def secret_key(self, value):
- self._secret_key = value
-
- @property
- def token(self):
- """Warning: Using this property can lead to race conditions if you
- access another property subsequently along the refresh boundary.
- Please use get_frozen_credentials instead.
- """
- self._refresh()
- return self._token
-
- @token.setter
- def token(self, value):
- self._token = value
-
- def _seconds_remaining(self):
- delta = self._expiry_time - self._time_fetcher()
- return total_seconds(delta)
-
- def refresh_needed(self, refresh_in=None):
- """Check if a refresh is needed.
-
- A refresh is needed if the expiry time associated
- with the temporary credentials is less than the
- provided ``refresh_in``. If ``time_delta`` is not
- provided, ``self.advisory_refresh_needed`` will be used.
-
- For example, if your temporary credentials expire
- in 10 minutes and the provided ``refresh_in`` is
- ``15 * 60``, then this function will return ``True``.
-
- :type refresh_in: int
- :param refresh_in: The number of seconds before the
- credentials expire in which refresh attempts should
- be made.
-
+class Credentials(object):
+ """
+ Holds the credentials needed to authenticate requests.
+
+ :ivar access_key: The access key part of the credentials.
+ :ivar secret_key: The secret key part of the credentials.
+ :ivar token: The security token, valid only for session credentials.
+ :ivar method: A string which identifies where the credentials
+ were found.
+ """
+
+ def __init__(self, access_key, secret_key, token=None,
+ method=None):
+ self.access_key = access_key
+ self.secret_key = secret_key
+ self.token = token
+
+ if method is None:
+ method = 'explicit'
+ self.method = method
+
+ self._normalize()
+
+ def _normalize(self):
+ # Keys would sometimes (accidentally) contain non-ascii characters.
+ # It would cause a confusing UnicodeDecodeError in Python 2.
+ # We explicitly convert them into unicode to avoid such error.
+ #
+ # Eventually the service will decide whether to accept the credential.
+ # This also complies with the behavior in Python 3.
+ self.access_key = botocore.compat.ensure_unicode(self.access_key)
+ self.secret_key = botocore.compat.ensure_unicode(self.secret_key)
+
+ def get_frozen_credentials(self):
+ return ReadOnlyCredentials(self.access_key,
+ self.secret_key,
+ self.token)
+
+
+class RefreshableCredentials(Credentials):
+ """
+ Holds the credentials needed to authenticate requests. In addition, it
+ knows how to refresh itself.
+
+ :ivar access_key: The access key part of the credentials.
+ :ivar secret_key: The secret key part of the credentials.
+ :ivar token: The security token, valid only for session credentials.
+ :ivar method: A string which identifies where the credentials
+ were found.
+ """
+ # The time at which we'll attempt to refresh, but not
+ # block if someone else is refreshing.
+ _advisory_refresh_timeout = 15 * 60
+ # The time at which all threads will block waiting for
+ # refreshed credentials.
+ _mandatory_refresh_timeout = 10 * 60
+
+ def __init__(self, access_key, secret_key, token,
+ expiry_time, refresh_using, method,
+ time_fetcher=_local_now):
+ self._refresh_using = refresh_using
+ self._access_key = access_key
+ self._secret_key = secret_key
+ self._token = token
+ self._expiry_time = expiry_time
+ self._time_fetcher = time_fetcher
+ self._refresh_lock = threading.Lock()
+ self.method = method
+ self._frozen_credentials = ReadOnlyCredentials(
+ access_key, secret_key, token)
+ self._normalize()
+
+ def _normalize(self):
+ self._access_key = botocore.compat.ensure_unicode(self._access_key)
+ self._secret_key = botocore.compat.ensure_unicode(self._secret_key)
+
+ @classmethod
+ def create_from_metadata(cls, metadata, refresh_using, method):
+ instance = cls(
+ access_key=metadata['access_key'],
+ secret_key=metadata['secret_key'],
+ token=metadata['token'],
+ expiry_time=cls._expiry_datetime(metadata['expiry_time']),
+ method=method,
+ refresh_using=refresh_using
+ )
+ return instance
+
+ @property
+ def access_key(self):
+ """Warning: Using this property can lead to race conditions if you
+ access another property subsequently along the refresh boundary.
+ Please use get_frozen_credentials instead.
+ """
+ self._refresh()
+ return self._access_key
+
+ @access_key.setter
+ def access_key(self, value):
+ self._access_key = value
+
+ @property
+ def secret_key(self):
+ """Warning: Using this property can lead to race conditions if you
+ access another property subsequently along the refresh boundary.
+ Please use get_frozen_credentials instead.
+ """
+ self._refresh()
+ return self._secret_key
+
+ @secret_key.setter
+ def secret_key(self, value):
+ self._secret_key = value
+
+ @property
+ def token(self):
+ """Warning: Using this property can lead to race conditions if you
+ access another property subsequently along the refresh boundary.
+ Please use get_frozen_credentials instead.
+ """
+ self._refresh()
+ return self._token
+
+ @token.setter
+ def token(self, value):
+ self._token = value
+
+ def _seconds_remaining(self):
+ delta = self._expiry_time - self._time_fetcher()
+ return total_seconds(delta)
+
+ def refresh_needed(self, refresh_in=None):
+ """Check if a refresh is needed.
+
+ A refresh is needed if the expiry time associated
+ with the temporary credentials is less than the
+ provided ``refresh_in``. If ``time_delta`` is not
+ provided, ``self.advisory_refresh_needed`` will be used.
+
+ For example, if your temporary credentials expire
+ in 10 minutes and the provided ``refresh_in`` is
+ ``15 * 60``, then this function will return ``True``.
+
+ :type refresh_in: int
+ :param refresh_in: The number of seconds before the
+ credentials expire in which refresh attempts should
+ be made.
+
:return: True if refresh needed, False otherwise.
-
- """
- if self._expiry_time is None:
- # No expiration, so assume we don't need to refresh.
- return False
-
- if refresh_in is None:
- refresh_in = self._advisory_refresh_timeout
- # The credentials should be refreshed if they're going to expire
- # in less than 5 minutes.
- if self._seconds_remaining() >= refresh_in:
- # There's enough time left. Don't refresh.
- return False
- logger.debug("Credentials need to be refreshed.")
- return True
-
- def _is_expired(self):
- # Checks if the current credentials are expired.
- return self.refresh_needed(refresh_in=0)
-
- def _refresh(self):
- # In the common case where we don't need a refresh, we
- # can immediately exit and not require acquiring the
- # refresh lock.
- if not self.refresh_needed(self._advisory_refresh_timeout):
- return
-
- # acquire() doesn't accept kwargs, but False is indicating
- # that we should not block if we can't acquire the lock.
- # If we aren't able to acquire the lock, we'll trigger
- # the else clause.
- if self._refresh_lock.acquire(False):
- try:
- if not self.refresh_needed(self._advisory_refresh_timeout):
- return
- is_mandatory_refresh = self.refresh_needed(
- self._mandatory_refresh_timeout)
- self._protected_refresh(is_mandatory=is_mandatory_refresh)
- return
- finally:
- self._refresh_lock.release()
- elif self.refresh_needed(self._mandatory_refresh_timeout):
- # If we're within the mandatory refresh window,
- # we must block until we get refreshed credentials.
- with self._refresh_lock:
- if not self.refresh_needed(self._mandatory_refresh_timeout):
- return
- self._protected_refresh(is_mandatory=True)
-
- def _protected_refresh(self, is_mandatory):
- # precondition: this method should only be called if you've acquired
- # the self._refresh_lock.
- try:
- metadata = self._refresh_using()
- except Exception as e:
- period_name = 'mandatory' if is_mandatory else 'advisory'
- logger.warning("Refreshing temporary credentials failed "
- "during %s refresh period.",
- period_name, exc_info=True)
- if is_mandatory:
- # If this is a mandatory refresh, then
- # all errors that occur when we attempt to refresh
- # credentials are propagated back to the user.
- raise
- # Otherwise we'll just return.
- # The end result will be that we'll use the current
- # set of temporary credentials we have.
- return
- self._set_from_data(metadata)
+
+ """
+ if self._expiry_time is None:
+ # No expiration, so assume we don't need to refresh.
+ return False
+
+ if refresh_in is None:
+ refresh_in = self._advisory_refresh_timeout
+ # The credentials should be refreshed if they're going to expire
+ # in less than 5 minutes.
+ if self._seconds_remaining() >= refresh_in:
+ # There's enough time left. Don't refresh.
+ return False
+ logger.debug("Credentials need to be refreshed.")
+ return True
+
+ def _is_expired(self):
+ # Checks if the current credentials are expired.
+ return self.refresh_needed(refresh_in=0)
+
+ def _refresh(self):
+ # In the common case where we don't need a refresh, we
+ # can immediately exit and not require acquiring the
+ # refresh lock.
+ if not self.refresh_needed(self._advisory_refresh_timeout):
+ return
+
+ # acquire() doesn't accept kwargs, but False is indicating
+ # that we should not block if we can't acquire the lock.
+ # If we aren't able to acquire the lock, we'll trigger
+ # the else clause.
+ if self._refresh_lock.acquire(False):
+ try:
+ if not self.refresh_needed(self._advisory_refresh_timeout):
+ return
+ is_mandatory_refresh = self.refresh_needed(
+ self._mandatory_refresh_timeout)
+ self._protected_refresh(is_mandatory=is_mandatory_refresh)
+ return
+ finally:
+ self._refresh_lock.release()
+ elif self.refresh_needed(self._mandatory_refresh_timeout):
+ # If we're within the mandatory refresh window,
+ # we must block until we get refreshed credentials.
+ with self._refresh_lock:
+ if not self.refresh_needed(self._mandatory_refresh_timeout):
+ return
+ self._protected_refresh(is_mandatory=True)
+
+ def _protected_refresh(self, is_mandatory):
+ # precondition: this method should only be called if you've acquired
+ # the self._refresh_lock.
+ try:
+ metadata = self._refresh_using()
+ except Exception as e:
+ period_name = 'mandatory' if is_mandatory else 'advisory'
+ logger.warning("Refreshing temporary credentials failed "
+ "during %s refresh period.",
+ period_name, exc_info=True)
+ if is_mandatory:
+ # If this is a mandatory refresh, then
+ # all errors that occur when we attempt to refresh
+ # credentials are propagated back to the user.
+ raise
+ # Otherwise we'll just return.
+ # The end result will be that we'll use the current
+ # set of temporary credentials we have.
+ return
+ self._set_from_data(metadata)
self._frozen_credentials = ReadOnlyCredentials(
self._access_key, self._secret_key, self._token)
- if self._is_expired():
- # We successfully refreshed credentials but for whatever
- # reason, our refreshing function returned credentials
- # that are still expired. In this scenario, the only
- # thing we can do is let the user know and raise
- # an exception.
- msg = ("Credentials were refreshed, but the "
- "refreshed credentials are still expired.")
- logger.warning(msg)
- raise RuntimeError(msg)
-
- @staticmethod
- def _expiry_datetime(time_str):
- return parse(time_str)
-
- def _set_from_data(self, data):
+ if self._is_expired():
+ # We successfully refreshed credentials but for whatever
+ # reason, our refreshing function returned credentials
+ # that are still expired. In this scenario, the only
+ # thing we can do is let the user know and raise
+ # an exception.
+ msg = ("Credentials were refreshed, but the "
+ "refreshed credentials are still expired.")
+ logger.warning(msg)
+ raise RuntimeError(msg)
+
+ @staticmethod
+ def _expiry_datetime(time_str):
+ return parse(time_str)
+
+ def _set_from_data(self, data):
expected_keys = ['access_key', 'secret_key', 'token', 'expiry_time']
if not data:
missing_keys = expected_keys
@@ -573,145 +573,145 @@ class RefreshableCredentials(Credentials):
error_msg=message % ', '.join(missing_keys),
)
- self.access_key = data['access_key']
- self.secret_key = data['secret_key']
- self.token = data['token']
- self._expiry_time = parse(data['expiry_time'])
- logger.debug("Retrieved credentials will expire at: %s",
- self._expiry_time)
- self._normalize()
-
- def get_frozen_credentials(self):
- """Return immutable credentials.
-
- The ``access_key``, ``secret_key``, and ``token`` properties
- on this class will always check and refresh credentials if
- needed before returning the particular credentials.
-
- This has an edge case where you can get inconsistent
- credentials. Imagine this:
-
- # Current creds are "t1"
- tmp.access_key ---> expired? no, so return t1.access_key
- # ---- time is now expired, creds need refreshing to "t2" ----
- tmp.secret_key ---> expired? yes, refresh and return t2.secret_key
-
- This means we're using the access key from t1 with the secret key
- from t2. To fix this issue, you can request a frozen credential object
- which is guaranteed not to change.
-
- The frozen credentials returned from this method should be used
- immediately and then discarded. The typical usage pattern would
- be::
-
- creds = RefreshableCredentials(...)
- some_code = SomeSignerObject()
- # I'm about to sign the request.
- # The frozen credentials are only used for the
- # duration of generate_presigned_url and will be
- # immediately thrown away.
- request = some_code.sign_some_request(
- with_credentials=creds.get_frozen_credentials())
- print("Signed request:", request)
-
- """
- self._refresh()
- return self._frozen_credentials
-
-
-class DeferredRefreshableCredentials(RefreshableCredentials):
- """Refreshable credentials that don't require initial credentials.
-
- refresh_using will be called upon first access.
- """
- def __init__(self, refresh_using, method, time_fetcher=_local_now):
- self._refresh_using = refresh_using
- self._access_key = None
- self._secret_key = None
- self._token = None
- self._expiry_time = None
- self._time_fetcher = time_fetcher
- self._refresh_lock = threading.Lock()
- self.method = method
- self._frozen_credentials = None
-
- def refresh_needed(self, refresh_in=None):
+ self.access_key = data['access_key']
+ self.secret_key = data['secret_key']
+ self.token = data['token']
+ self._expiry_time = parse(data['expiry_time'])
+ logger.debug("Retrieved credentials will expire at: %s",
+ self._expiry_time)
+ self._normalize()
+
+ def get_frozen_credentials(self):
+ """Return immutable credentials.
+
+ The ``access_key``, ``secret_key``, and ``token`` properties
+ on this class will always check and refresh credentials if
+ needed before returning the particular credentials.
+
+ This has an edge case where you can get inconsistent
+ credentials. Imagine this:
+
+ # Current creds are "t1"
+ tmp.access_key ---> expired? no, so return t1.access_key
+ # ---- time is now expired, creds need refreshing to "t2" ----
+ tmp.secret_key ---> expired? yes, refresh and return t2.secret_key
+
+ This means we're using the access key from t1 with the secret key
+ from t2. To fix this issue, you can request a frozen credential object
+ which is guaranteed not to change.
+
+ The frozen credentials returned from this method should be used
+ immediately and then discarded. The typical usage pattern would
+ be::
+
+ creds = RefreshableCredentials(...)
+ some_code = SomeSignerObject()
+ # I'm about to sign the request.
+ # The frozen credentials are only used for the
+ # duration of generate_presigned_url and will be
+ # immediately thrown away.
+ request = some_code.sign_some_request(
+ with_credentials=creds.get_frozen_credentials())
+ print("Signed request:", request)
+
+ """
+ self._refresh()
+ return self._frozen_credentials
+
+
+class DeferredRefreshableCredentials(RefreshableCredentials):
+ """Refreshable credentials that don't require initial credentials.
+
+ refresh_using will be called upon first access.
+ """
+ def __init__(self, refresh_using, method, time_fetcher=_local_now):
+ self._refresh_using = refresh_using
+ self._access_key = None
+ self._secret_key = None
+ self._token = None
+ self._expiry_time = None
+ self._time_fetcher = time_fetcher
+ self._refresh_lock = threading.Lock()
+ self.method = method
+ self._frozen_credentials = None
+
+ def refresh_needed(self, refresh_in=None):
if self._frozen_credentials is None:
- return True
- return super(DeferredRefreshableCredentials, self).refresh_needed(
- refresh_in
- )
-
-
-class CachedCredentialFetcher(object):
+ return True
+ return super(DeferredRefreshableCredentials, self).refresh_needed(
+ refresh_in
+ )
+
+
+class CachedCredentialFetcher(object):
DEFAULT_EXPIRY_WINDOW_SECONDS = 60 * 15
def __init__(self, cache=None, expiry_window_seconds=None):
- if cache is None:
- cache = {}
- self._cache = cache
- self._cache_key = self._create_cache_key()
+ if cache is None:
+ cache = {}
+ self._cache = cache
+ self._cache_key = self._create_cache_key()
if expiry_window_seconds is None:
expiry_window_seconds = self.DEFAULT_EXPIRY_WINDOW_SECONDS
- self._expiry_window_seconds = expiry_window_seconds
-
- def _create_cache_key(self):
- raise NotImplementedError('_create_cache_key()')
-
- def _make_file_safe(self, filename):
- # Replace :, path sep, and / to make it the string filename safe.
- filename = filename.replace(':', '_').replace(os.path.sep, '_')
- return filename.replace('/', '_')
-
- def _get_credentials(self):
- raise NotImplementedError('_get_credentials()')
-
- def fetch_credentials(self):
- return self._get_cached_credentials()
-
- def _get_cached_credentials(self):
- """Get up-to-date credentials.
-
- This will check the cache for up-to-date credentials, calling assume
- role if none are available.
- """
- response = self._load_from_cache()
- if response is None:
- response = self._get_credentials()
- self._write_to_cache(response)
- else:
- logger.debug("Credentials for role retrieved from cache.")
-
- creds = response['Credentials']
- expiration = _serialize_if_needed(creds['Expiration'], iso=True)
- return {
- 'access_key': creds['AccessKeyId'],
- 'secret_key': creds['SecretAccessKey'],
- 'token': creds['SessionToken'],
- 'expiry_time': expiration,
- }
-
- def _load_from_cache(self):
- if self._cache_key in self._cache:
- creds = deepcopy(self._cache[self._cache_key])
- if not self._is_expired(creds):
- return creds
- else:
- logger.debug(
- "Credentials were found in cache, but they are expired."
- )
- return None
-
- def _write_to_cache(self, response):
- self._cache[self._cache_key] = deepcopy(response)
-
- def _is_expired(self, credentials):
- """Check if credentials are expired."""
- end_time = _parse_if_needed(credentials['Credentials']['Expiration'])
- seconds = total_seconds(end_time - _local_now())
- return seconds < self._expiry_window_seconds
-
-
+ self._expiry_window_seconds = expiry_window_seconds
+
+ def _create_cache_key(self):
+ raise NotImplementedError('_create_cache_key()')
+
+ def _make_file_safe(self, filename):
+ # Replace :, path sep, and / to make it the string filename safe.
+ filename = filename.replace(':', '_').replace(os.path.sep, '_')
+ return filename.replace('/', '_')
+
+ def _get_credentials(self):
+ raise NotImplementedError('_get_credentials()')
+
+ def fetch_credentials(self):
+ return self._get_cached_credentials()
+
+ def _get_cached_credentials(self):
+ """Get up-to-date credentials.
+
+ This will check the cache for up-to-date credentials, calling assume
+ role if none are available.
+ """
+ response = self._load_from_cache()
+ if response is None:
+ response = self._get_credentials()
+ self._write_to_cache(response)
+ else:
+ logger.debug("Credentials for role retrieved from cache.")
+
+ creds = response['Credentials']
+ expiration = _serialize_if_needed(creds['Expiration'], iso=True)
+ return {
+ 'access_key': creds['AccessKeyId'],
+ 'secret_key': creds['SecretAccessKey'],
+ 'token': creds['SessionToken'],
+ 'expiry_time': expiration,
+ }
+
+ def _load_from_cache(self):
+ if self._cache_key in self._cache:
+ creds = deepcopy(self._cache[self._cache_key])
+ if not self._is_expired(creds):
+ return creds
+ else:
+ logger.debug(
+ "Credentials were found in cache, but they are expired."
+ )
+ return None
+
+ def _write_to_cache(self, response):
+ self._cache[self._cache_key] = deepcopy(response)
+
+ def _is_expired(self, credentials):
+ """Check if credentials are expired."""
+ end_time = _parse_if_needed(credentials['Credentials']['Expiration'])
+ seconds = total_seconds(end_time - _local_now())
+ return seconds < self._expiry_window_seconds
+
+
class BaseAssumeRoleCredentialFetcher(CachedCredentialFetcher):
def __init__(self, client_creator, role_arn, extra_args=None,
cache=None, expiry_window_seconds=None):
@@ -762,85 +762,85 @@ class BaseAssumeRoleCredentialFetcher(CachedCredentialFetcher):
class AssumeRoleCredentialFetcher(BaseAssumeRoleCredentialFetcher):
- def __init__(self, client_creator, source_credentials, role_arn,
- extra_args=None, mfa_prompter=None, cache=None,
+ def __init__(self, client_creator, source_credentials, role_arn,
+ extra_args=None, mfa_prompter=None, cache=None,
expiry_window_seconds=None):
- """
- :type client_creator: callable
- :param client_creator: A callable that creates a client taking
- arguments like ``Session.create_client``.
-
- :type source_credentials: Credentials
- :param source_credentials: The credentials to use to create the
- client for the call to AssumeRole.
-
- :type role_arn: str
- :param role_arn: The ARN of the role to be assumed.
-
- :type extra_args: dict
- :param extra_args: Any additional arguments to add to the assume
- role request using the format of the botocore operation.
- Possible keys include, but may not be limited to,
- DurationSeconds, Policy, SerialNumber, ExternalId and
- RoleSessionName.
-
- :type mfa_prompter: callable
- :param mfa_prompter: A callable that returns input provided by the
- user (i.e raw_input, getpass.getpass, etc.).
-
- :type cache: dict
- :param cache: An object that supports ``__getitem__``,
- ``__setitem__``, and ``__contains__``. An example of this is
- the ``JSONFileCache`` class in aws-cli.
-
- :type expiry_window_seconds: int
- :param expiry_window_seconds: The amount of time, in seconds,
- """
- self._source_credentials = source_credentials
- self._mfa_prompter = mfa_prompter
- if self._mfa_prompter is None:
- self._mfa_prompter = getpass.getpass
-
- super(AssumeRoleCredentialFetcher, self).__init__(
+ """
+ :type client_creator: callable
+ :param client_creator: A callable that creates a client taking
+ arguments like ``Session.create_client``.
+
+ :type source_credentials: Credentials
+ :param source_credentials: The credentials to use to create the
+ client for the call to AssumeRole.
+
+ :type role_arn: str
+ :param role_arn: The ARN of the role to be assumed.
+
+ :type extra_args: dict
+ :param extra_args: Any additional arguments to add to the assume
+ role request using the format of the botocore operation.
+ Possible keys include, but may not be limited to,
+ DurationSeconds, Policy, SerialNumber, ExternalId and
+ RoleSessionName.
+
+ :type mfa_prompter: callable
+ :param mfa_prompter: A callable that returns input provided by the
+ user (i.e raw_input, getpass.getpass, etc.).
+
+ :type cache: dict
+ :param cache: An object that supports ``__getitem__``,
+ ``__setitem__``, and ``__contains__``. An example of this is
+ the ``JSONFileCache`` class in aws-cli.
+
+ :type expiry_window_seconds: int
+ :param expiry_window_seconds: The amount of time, in seconds,
+ """
+ self._source_credentials = source_credentials
+ self._mfa_prompter = mfa_prompter
+ if self._mfa_prompter is None:
+ self._mfa_prompter = getpass.getpass
+
+ super(AssumeRoleCredentialFetcher, self).__init__(
client_creator, role_arn, extra_args=extra_args,
cache=cache, expiry_window_seconds=expiry_window_seconds
- )
-
- def _get_credentials(self):
- """Get credentials by calling assume role."""
- kwargs = self._assume_role_kwargs()
- client = self._create_client()
- return client.assume_role(**kwargs)
-
- def _assume_role_kwargs(self):
- """Get the arguments for assume role based on current configuration."""
+ )
+
+ def _get_credentials(self):
+ """Get credentials by calling assume role."""
+ kwargs = self._assume_role_kwargs()
+ client = self._create_client()
+ return client.assume_role(**kwargs)
+
+ def _assume_role_kwargs(self):
+ """Get the arguments for assume role based on current configuration."""
assume_role_kwargs = deepcopy(self._assume_kwargs)
- mfa_serial = assume_role_kwargs.get('SerialNumber')
-
- if mfa_serial is not None:
- prompt = 'Enter MFA code for %s: ' % mfa_serial
- token_code = self._mfa_prompter(prompt)
- assume_role_kwargs['TokenCode'] = token_code
-
+ mfa_serial = assume_role_kwargs.get('SerialNumber')
+
+ if mfa_serial is not None:
+ prompt = 'Enter MFA code for %s: ' % mfa_serial
+ token_code = self._mfa_prompter(prompt)
+ assume_role_kwargs['TokenCode'] = token_code
+
duration_seconds = assume_role_kwargs.get('DurationSeconds')
if duration_seconds is not None:
assume_role_kwargs['DurationSeconds'] = duration_seconds
- return assume_role_kwargs
-
- def _create_client(self):
- """Create an STS client using the source credentials."""
- frozen_credentials = self._source_credentials.get_frozen_credentials()
- return self._client_creator(
- 'sts',
- aws_access_key_id=frozen_credentials.access_key,
- aws_secret_access_key=frozen_credentials.secret_key,
- aws_session_token=frozen_credentials.token,
- )
-
-
+ return assume_role_kwargs
+
+ def _create_client(self):
+ """Create an STS client using the source credentials."""
+ frozen_credentials = self._source_credentials.get_frozen_credentials()
+ return self._client_creator(
+ 'sts',
+ aws_access_key_id=frozen_credentials.access_key,
+ aws_secret_access_key=frozen_credentials.secret_key,
+ aws_session_token=frozen_credentials.token,
+ )
+
+
class AssumeRoleWithWebIdentityCredentialFetcher(
BaseAssumeRoleCredentialFetcher
):
@@ -898,515 +898,515 @@ class AssumeRoleWithWebIdentityCredentialFetcher(
return assume_role_kwargs
-class CredentialProvider(object):
- # A short name to identify the provider within botocore.
- METHOD = None
-
- # A name to identify the provider for use in cross-sdk features like
- # assume role's `credential_source` configuration option. These names
- # are to be treated in a case-insensitive way. NOTE: any providers not
- # implemented in botocore MUST prefix their canonical names with
- # 'custom' or we DO NOT guarantee that it will work with any features
- # that this provides.
- CANONICAL_NAME = None
-
- def __init__(self, session=None):
- self.session = session
-
- def load(self):
- """
- Loads the credentials from their source & sets them on the object.
-
- Subclasses should implement this method (by reading from disk, the
- environment, the network or wherever), returning ``True`` if they were
- found & loaded.
-
- If not found, this method should return ``False``, indictating that the
- ``CredentialResolver`` should fall back to the next available method.
-
- The default implementation does nothing, assuming the user has set the
- ``access_key/secret_key/token`` themselves.
-
- :returns: Whether credentials were found & set
- :rtype: Credentials
- """
- return True
-
- def _extract_creds_from_mapping(self, mapping, *key_names):
- found = []
- for key_name in key_names:
- try:
- found.append(mapping[key_name])
- except KeyError:
- raise PartialCredentialsError(provider=self.METHOD,
- cred_var=key_name)
- return found
-
-
-class ProcessProvider(CredentialProvider):
-
- METHOD = 'custom-process'
-
- def __init__(self, profile_name, load_config, popen=subprocess.Popen):
- self._profile_name = profile_name
- self._load_config = load_config
- self._loaded_config = None
- self._popen = popen
-
- def load(self):
- credential_process = self._credential_process
- if credential_process is None:
- return
-
- creds_dict = self._retrieve_credentials_using(credential_process)
- if creds_dict.get('expiry_time') is not None:
- return RefreshableCredentials.create_from_metadata(
- creds_dict,
- lambda: self._retrieve_credentials_using(credential_process),
- self.METHOD
- )
-
- return Credentials(
- access_key=creds_dict['access_key'],
- secret_key=creds_dict['secret_key'],
- token=creds_dict.get('token'),
- method=self.METHOD
- )
-
- def _retrieve_credentials_using(self, credential_process):
- # We're not using shell=True, so we need to pass the
- # command and all arguments as a list.
- process_list = compat_shell_split(credential_process)
- p = self._popen(process_list,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
- stdout, stderr = p.communicate()
- if p.returncode != 0:
- raise CredentialRetrievalError(
- provider=self.METHOD, error_msg=stderr.decode('utf-8'))
- parsed = botocore.compat.json.loads(stdout.decode('utf-8'))
- version = parsed.get('Version', '<Version key not provided>')
- if version != 1:
- raise CredentialRetrievalError(
- provider=self.METHOD,
- error_msg=("Unsupported version '%s' for credential process "
- "provider, supported versions: 1" % version))
- try:
- return {
- 'access_key': parsed['AccessKeyId'],
- 'secret_key': parsed['SecretAccessKey'],
- 'token': parsed.get('SessionToken'),
- 'expiry_time': parsed.get('Expiration'),
- }
- except KeyError as e:
- raise CredentialRetrievalError(
- provider=self.METHOD,
- error_msg="Missing required key in response: %s" % e
- )
-
- @property
- def _credential_process(self):
- if self._loaded_config is None:
- self._loaded_config = self._load_config()
- profile_config = self._loaded_config.get(
- 'profiles', {}).get(self._profile_name, {})
- return profile_config.get('credential_process')
-
-
-class InstanceMetadataProvider(CredentialProvider):
- METHOD = 'iam-role'
- CANONICAL_NAME = 'Ec2InstanceMetadata'
-
- def __init__(self, iam_role_fetcher):
- self._role_fetcher = iam_role_fetcher
-
- def load(self):
- fetcher = self._role_fetcher
- # We do the first request, to see if we get useful data back.
- # If not, we'll pass & move on to whatever's next in the credential
- # chain.
- metadata = fetcher.retrieve_iam_role_credentials()
- if not metadata:
- return None
- logger.debug('Found credentials from IAM Role: %s',
- metadata['role_name'])
- # We manually set the data here, since we already made the request &
- # have it. When the expiry is hit, the credentials will auto-refresh
- # themselves.
- creds = RefreshableCredentials.create_from_metadata(
- metadata,
- method=self.METHOD,
- refresh_using=fetcher.retrieve_iam_role_credentials,
- )
- return creds
-
-
-class EnvProvider(CredentialProvider):
- METHOD = 'env'
- CANONICAL_NAME = 'Environment'
- ACCESS_KEY = 'AWS_ACCESS_KEY_ID'
- SECRET_KEY = 'AWS_SECRET_ACCESS_KEY'
- # The token can come from either of these env var.
- # AWS_SESSION_TOKEN is what other AWS SDKs have standardized on.
- TOKENS = ['AWS_SECURITY_TOKEN', 'AWS_SESSION_TOKEN']
- EXPIRY_TIME = 'AWS_CREDENTIAL_EXPIRATION'
-
- def __init__(self, environ=None, mapping=None):
- """
-
- :param environ: The environment variables (defaults to
- ``os.environ`` if no value is provided).
- :param mapping: An optional mapping of variable names to
- environment variable names. Use this if you want to
- change the mapping of access_key->AWS_ACCESS_KEY_ID, etc.
- The dict can have up to 3 keys: ``access_key``, ``secret_key``,
- ``session_token``.
- """
- if environ is None:
- environ = os.environ
- self.environ = environ
- self._mapping = self._build_mapping(mapping)
-
- def _build_mapping(self, mapping):
- # Mapping of variable name to env var name.
- var_mapping = {}
- if mapping is None:
- # Use the class var default.
- var_mapping['access_key'] = self.ACCESS_KEY
- var_mapping['secret_key'] = self.SECRET_KEY
- var_mapping['token'] = self.TOKENS
- var_mapping['expiry_time'] = self.EXPIRY_TIME
- else:
- var_mapping['access_key'] = mapping.get(
- 'access_key', self.ACCESS_KEY)
- var_mapping['secret_key'] = mapping.get(
- 'secret_key', self.SECRET_KEY)
- var_mapping['token'] = mapping.get(
- 'token', self.TOKENS)
- if not isinstance(var_mapping['token'], list):
- var_mapping['token'] = [var_mapping['token']]
- var_mapping['expiry_time'] = mapping.get(
- 'expiry_time', self.EXPIRY_TIME)
- return var_mapping
-
- def load(self):
- """
- Search for credentials in explicit environment variables.
- """
+class CredentialProvider(object):
+ # A short name to identify the provider within botocore.
+ METHOD = None
+
+ # A name to identify the provider for use in cross-sdk features like
+ # assume role's `credential_source` configuration option. These names
+ # are to be treated in a case-insensitive way. NOTE: any providers not
+ # implemented in botocore MUST prefix their canonical names with
+ # 'custom' or we DO NOT guarantee that it will work with any features
+ # that this provides.
+ CANONICAL_NAME = None
+
+ def __init__(self, session=None):
+ self.session = session
+
+ def load(self):
+ """
+ Loads the credentials from their source & sets them on the object.
+
+ Subclasses should implement this method (by reading from disk, the
+ environment, the network or wherever), returning ``True`` if they were
+ found & loaded.
+
+ If not found, this method should return ``False``, indictating that the
+ ``CredentialResolver`` should fall back to the next available method.
+
+ The default implementation does nothing, assuming the user has set the
+ ``access_key/secret_key/token`` themselves.
+
+ :returns: Whether credentials were found & set
+ :rtype: Credentials
+ """
+ return True
+
+ def _extract_creds_from_mapping(self, mapping, *key_names):
+ found = []
+ for key_name in key_names:
+ try:
+ found.append(mapping[key_name])
+ except KeyError:
+ raise PartialCredentialsError(provider=self.METHOD,
+ cred_var=key_name)
+ return found
+
+
+class ProcessProvider(CredentialProvider):
+
+ METHOD = 'custom-process'
+
+ def __init__(self, profile_name, load_config, popen=subprocess.Popen):
+ self._profile_name = profile_name
+ self._load_config = load_config
+ self._loaded_config = None
+ self._popen = popen
+
+ def load(self):
+ credential_process = self._credential_process
+ if credential_process is None:
+ return
+
+ creds_dict = self._retrieve_credentials_using(credential_process)
+ if creds_dict.get('expiry_time') is not None:
+ return RefreshableCredentials.create_from_metadata(
+ creds_dict,
+ lambda: self._retrieve_credentials_using(credential_process),
+ self.METHOD
+ )
+
+ return Credentials(
+ access_key=creds_dict['access_key'],
+ secret_key=creds_dict['secret_key'],
+ token=creds_dict.get('token'),
+ method=self.METHOD
+ )
+
+ def _retrieve_credentials_using(self, credential_process):
+ # We're not using shell=True, so we need to pass the
+ # command and all arguments as a list.
+ process_list = compat_shell_split(credential_process)
+ p = self._popen(process_list,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ stdout, stderr = p.communicate()
+ if p.returncode != 0:
+ raise CredentialRetrievalError(
+ provider=self.METHOD, error_msg=stderr.decode('utf-8'))
+ parsed = botocore.compat.json.loads(stdout.decode('utf-8'))
+ version = parsed.get('Version', '<Version key not provided>')
+ if version != 1:
+ raise CredentialRetrievalError(
+ provider=self.METHOD,
+ error_msg=("Unsupported version '%s' for credential process "
+ "provider, supported versions: 1" % version))
+ try:
+ return {
+ 'access_key': parsed['AccessKeyId'],
+ 'secret_key': parsed['SecretAccessKey'],
+ 'token': parsed.get('SessionToken'),
+ 'expiry_time': parsed.get('Expiration'),
+ }
+ except KeyError as e:
+ raise CredentialRetrievalError(
+ provider=self.METHOD,
+ error_msg="Missing required key in response: %s" % e
+ )
+
+ @property
+ def _credential_process(self):
+ if self._loaded_config is None:
+ self._loaded_config = self._load_config()
+ profile_config = self._loaded_config.get(
+ 'profiles', {}).get(self._profile_name, {})
+ return profile_config.get('credential_process')
+
+
+class InstanceMetadataProvider(CredentialProvider):
+ METHOD = 'iam-role'
+ CANONICAL_NAME = 'Ec2InstanceMetadata'
+
+ def __init__(self, iam_role_fetcher):
+ self._role_fetcher = iam_role_fetcher
+
+ def load(self):
+ fetcher = self._role_fetcher
+ # We do the first request, to see if we get useful data back.
+ # If not, we'll pass & move on to whatever's next in the credential
+ # chain.
+ metadata = fetcher.retrieve_iam_role_credentials()
+ if not metadata:
+ return None
+ logger.debug('Found credentials from IAM Role: %s',
+ metadata['role_name'])
+ # We manually set the data here, since we already made the request &
+ # have it. When the expiry is hit, the credentials will auto-refresh
+ # themselves.
+ creds = RefreshableCredentials.create_from_metadata(
+ metadata,
+ method=self.METHOD,
+ refresh_using=fetcher.retrieve_iam_role_credentials,
+ )
+ return creds
+
+
+class EnvProvider(CredentialProvider):
+ METHOD = 'env'
+ CANONICAL_NAME = 'Environment'
+ ACCESS_KEY = 'AWS_ACCESS_KEY_ID'
+ SECRET_KEY = 'AWS_SECRET_ACCESS_KEY'
+ # The token can come from either of these env var.
+ # AWS_SESSION_TOKEN is what other AWS SDKs have standardized on.
+ TOKENS = ['AWS_SECURITY_TOKEN', 'AWS_SESSION_TOKEN']
+ EXPIRY_TIME = 'AWS_CREDENTIAL_EXPIRATION'
+
+ def __init__(self, environ=None, mapping=None):
+ """
+
+ :param environ: The environment variables (defaults to
+ ``os.environ`` if no value is provided).
+ :param mapping: An optional mapping of variable names to
+ environment variable names. Use this if you want to
+ change the mapping of access_key->AWS_ACCESS_KEY_ID, etc.
+ The dict can have up to 3 keys: ``access_key``, ``secret_key``,
+ ``session_token``.
+ """
+ if environ is None:
+ environ = os.environ
+ self.environ = environ
+ self._mapping = self._build_mapping(mapping)
+
+ def _build_mapping(self, mapping):
+ # Mapping of variable name to env var name.
+ var_mapping = {}
+ if mapping is None:
+ # Use the class var default.
+ var_mapping['access_key'] = self.ACCESS_KEY
+ var_mapping['secret_key'] = self.SECRET_KEY
+ var_mapping['token'] = self.TOKENS
+ var_mapping['expiry_time'] = self.EXPIRY_TIME
+ else:
+ var_mapping['access_key'] = mapping.get(
+ 'access_key', self.ACCESS_KEY)
+ var_mapping['secret_key'] = mapping.get(
+ 'secret_key', self.SECRET_KEY)
+ var_mapping['token'] = mapping.get(
+ 'token', self.TOKENS)
+ if not isinstance(var_mapping['token'], list):
+ var_mapping['token'] = [var_mapping['token']]
+ var_mapping['expiry_time'] = mapping.get(
+ 'expiry_time', self.EXPIRY_TIME)
+ return var_mapping
+
+ def load(self):
+ """
+ Search for credentials in explicit environment variables.
+ """
access_key = self.environ.get(self._mapping['access_key'], '')
if access_key:
- logger.info('Found credentials in environment variables.')
- fetcher = self._create_credentials_fetcher()
- credentials = fetcher(require_expiry=False)
-
- expiry_time = credentials['expiry_time']
- if expiry_time is not None:
- expiry_time = parse(expiry_time)
- return RefreshableCredentials(
- credentials['access_key'], credentials['secret_key'],
- credentials['token'], expiry_time,
- refresh_using=fetcher, method=self.METHOD
- )
-
- return Credentials(
- credentials['access_key'], credentials['secret_key'],
- credentials['token'], method=self.METHOD
- )
- else:
- return None
-
- def _create_credentials_fetcher(self):
- mapping = self._mapping
- method = self.METHOD
- environ = self.environ
-
- def fetch_credentials(require_expiry=True):
- credentials = {}
-
+ logger.info('Found credentials in environment variables.')
+ fetcher = self._create_credentials_fetcher()
+ credentials = fetcher(require_expiry=False)
+
+ expiry_time = credentials['expiry_time']
+ if expiry_time is not None:
+ expiry_time = parse(expiry_time)
+ return RefreshableCredentials(
+ credentials['access_key'], credentials['secret_key'],
+ credentials['token'], expiry_time,
+ refresh_using=fetcher, method=self.METHOD
+ )
+
+ return Credentials(
+ credentials['access_key'], credentials['secret_key'],
+ credentials['token'], method=self.METHOD
+ )
+ else:
+ return None
+
+ def _create_credentials_fetcher(self):
+ mapping = self._mapping
+ method = self.METHOD
+ environ = self.environ
+
+ def fetch_credentials(require_expiry=True):
+ credentials = {}
+
access_key = environ.get(mapping['access_key'], '')
if not access_key:
- raise PartialCredentialsError(
- provider=method, cred_var=mapping['access_key'])
- credentials['access_key'] = access_key
-
+ raise PartialCredentialsError(
+ provider=method, cred_var=mapping['access_key'])
+ credentials['access_key'] = access_key
+
secret_key = environ.get(mapping['secret_key'], '')
if not secret_key:
- raise PartialCredentialsError(
- provider=method, cred_var=mapping['secret_key'])
- credentials['secret_key'] = secret_key
-
+ raise PartialCredentialsError(
+ provider=method, cred_var=mapping['secret_key'])
+ credentials['secret_key'] = secret_key
+
credentials['token'] = None
- for token_env_var in mapping['token']:
+ for token_env_var in mapping['token']:
token = environ.get(token_env_var, '')
if token:
credentials['token'] = token
- break
-
+ break
+
credentials['expiry_time'] = None
expiry_time = environ.get(mapping['expiry_time'], '')
if expiry_time:
credentials['expiry_time'] = expiry_time
if require_expiry and not expiry_time:
- raise PartialCredentialsError(
- provider=method, cred_var=mapping['expiry_time'])
-
- return credentials
-
- return fetch_credentials
-
-
-class OriginalEC2Provider(CredentialProvider):
- METHOD = 'ec2-credentials-file'
- CANONICAL_NAME = 'Ec2Config'
-
- CRED_FILE_ENV = 'AWS_CREDENTIAL_FILE'
- ACCESS_KEY = 'AWSAccessKeyId'
- SECRET_KEY = 'AWSSecretKey'
-
- def __init__(self, environ=None, parser=None):
- if environ is None:
- environ = os.environ
- if parser is None:
- parser = parse_key_val_file
- self._environ = environ
- self._parser = parser
-
- def load(self):
- """
- Search for a credential file used by original EC2 CLI tools.
- """
- if 'AWS_CREDENTIAL_FILE' in self._environ:
- full_path = os.path.expanduser(
- self._environ['AWS_CREDENTIAL_FILE'])
- creds = self._parser(full_path)
- if self.ACCESS_KEY in creds:
- logger.info('Found credentials in AWS_CREDENTIAL_FILE.')
- access_key = creds[self.ACCESS_KEY]
- secret_key = creds[self.SECRET_KEY]
- # EC2 creds file doesn't support session tokens.
- return Credentials(access_key, secret_key, method=self.METHOD)
- else:
- return None
-
-
-class SharedCredentialProvider(CredentialProvider):
- METHOD = 'shared-credentials-file'
- CANONICAL_NAME = 'SharedCredentials'
-
- ACCESS_KEY = 'aws_access_key_id'
- SECRET_KEY = 'aws_secret_access_key'
- # Same deal as the EnvProvider above. Botocore originally supported
- # aws_security_token, but the SDKs are standardizing on aws_session_token
- # so we support both.
- TOKENS = ['aws_security_token', 'aws_session_token']
-
- def __init__(self, creds_filename, profile_name=None, ini_parser=None):
- self._creds_filename = creds_filename
- if profile_name is None:
- profile_name = 'default'
- self._profile_name = profile_name
- if ini_parser is None:
- ini_parser = botocore.configloader.raw_config_parse
- self._ini_parser = ini_parser
-
- def load(self):
- try:
- available_creds = self._ini_parser(self._creds_filename)
- except ConfigNotFound:
- return None
- if self._profile_name in available_creds:
- config = available_creds[self._profile_name]
- if self.ACCESS_KEY in config:
- logger.info("Found credentials in shared credentials file: %s",
- self._creds_filename)
- access_key, secret_key = self._extract_creds_from_mapping(
- config, self.ACCESS_KEY, self.SECRET_KEY)
- token = self._get_session_token(config)
- return Credentials(access_key, secret_key, token,
- method=self.METHOD)
-
- def _get_session_token(self, config):
- for token_envvar in self.TOKENS:
- if token_envvar in config:
- return config[token_envvar]
-
-
-class ConfigProvider(CredentialProvider):
- """INI based config provider with profile sections."""
- METHOD = 'config-file'
- CANONICAL_NAME = 'SharedConfig'
-
- ACCESS_KEY = 'aws_access_key_id'
- SECRET_KEY = 'aws_secret_access_key'
- # Same deal as the EnvProvider above. Botocore originally supported
- # aws_security_token, but the SDKs are standardizing on aws_session_token
- # so we support both.
- TOKENS = ['aws_security_token', 'aws_session_token']
-
- def __init__(self, config_filename, profile_name, config_parser=None):
- """
-
- :param config_filename: The session configuration scoped to the current
- profile. This is available via ``session.config``.
- :param profile_name: The name of the current profile.
- :param config_parser: A config parser callable.
-
- """
- self._config_filename = config_filename
- self._profile_name = profile_name
- if config_parser is None:
- config_parser = botocore.configloader.load_config
- self._config_parser = config_parser
-
- def load(self):
- """
- If there is are credentials in the configuration associated with
- the session, use those.
- """
- try:
- full_config = self._config_parser(self._config_filename)
- except ConfigNotFound:
- return None
- if self._profile_name in full_config['profiles']:
- profile_config = full_config['profiles'][self._profile_name]
- if self.ACCESS_KEY in profile_config:
- logger.info("Credentials found in config file: %s",
- self._config_filename)
- access_key, secret_key = self._extract_creds_from_mapping(
- profile_config, self.ACCESS_KEY, self.SECRET_KEY)
- token = self._get_session_token(profile_config)
- return Credentials(access_key, secret_key, token,
- method=self.METHOD)
- else:
- return None
-
- def _get_session_token(self, profile_config):
- for token_name in self.TOKENS:
- if token_name in profile_config:
- return profile_config[token_name]
-
-
-class BotoProvider(CredentialProvider):
- METHOD = 'boto-config'
- CANONICAL_NAME = 'Boto2Config'
-
- BOTO_CONFIG_ENV = 'BOTO_CONFIG'
- DEFAULT_CONFIG_FILENAMES = ['/etc/boto.cfg', '~/.boto']
- ACCESS_KEY = 'aws_access_key_id'
- SECRET_KEY = 'aws_secret_access_key'
-
- def __init__(self, environ=None, ini_parser=None):
- if environ is None:
- environ = os.environ
- if ini_parser is None:
- ini_parser = botocore.configloader.raw_config_parse
- self._environ = environ
- self._ini_parser = ini_parser
-
- def load(self):
- """
- Look for credentials in boto config file.
- """
- if self.BOTO_CONFIG_ENV in self._environ:
- potential_locations = [self._environ[self.BOTO_CONFIG_ENV]]
- else:
- potential_locations = self.DEFAULT_CONFIG_FILENAMES
- for filename in potential_locations:
- try:
- config = self._ini_parser(filename)
- except ConfigNotFound:
- # Move on to the next potential config file name.
- continue
- if 'Credentials' in config:
- credentials = config['Credentials']
- if self.ACCESS_KEY in credentials:
- logger.info("Found credentials in boto config file: %s",
- filename)
- access_key, secret_key = self._extract_creds_from_mapping(
- credentials, self.ACCESS_KEY, self.SECRET_KEY)
- return Credentials(access_key, secret_key,
- method=self.METHOD)
-
-
-class AssumeRoleProvider(CredentialProvider):
- METHOD = 'assume-role'
- # The AssumeRole provider is logically part of the SharedConfig and
- # SharedCredentials providers. Since the purpose of the canonical name
- # is to provide cross-sdk compatibility, calling code will need to be
- # aware that either of those providers should be tied to the AssumeRole
- # provider as much as possible.
- CANONICAL_NAME = None
- ROLE_CONFIG_VAR = 'role_arn'
+ raise PartialCredentialsError(
+ provider=method, cred_var=mapping['expiry_time'])
+
+ return credentials
+
+ return fetch_credentials
+
+
+class OriginalEC2Provider(CredentialProvider):
+ METHOD = 'ec2-credentials-file'
+ CANONICAL_NAME = 'Ec2Config'
+
+ CRED_FILE_ENV = 'AWS_CREDENTIAL_FILE'
+ ACCESS_KEY = 'AWSAccessKeyId'
+ SECRET_KEY = 'AWSSecretKey'
+
+ def __init__(self, environ=None, parser=None):
+ if environ is None:
+ environ = os.environ
+ if parser is None:
+ parser = parse_key_val_file
+ self._environ = environ
+ self._parser = parser
+
+ def load(self):
+ """
+ Search for a credential file used by original EC2 CLI tools.
+ """
+ if 'AWS_CREDENTIAL_FILE' in self._environ:
+ full_path = os.path.expanduser(
+ self._environ['AWS_CREDENTIAL_FILE'])
+ creds = self._parser(full_path)
+ if self.ACCESS_KEY in creds:
+ logger.info('Found credentials in AWS_CREDENTIAL_FILE.')
+ access_key = creds[self.ACCESS_KEY]
+ secret_key = creds[self.SECRET_KEY]
+ # EC2 creds file doesn't support session tokens.
+ return Credentials(access_key, secret_key, method=self.METHOD)
+ else:
+ return None
+
+
+class SharedCredentialProvider(CredentialProvider):
+ METHOD = 'shared-credentials-file'
+ CANONICAL_NAME = 'SharedCredentials'
+
+ ACCESS_KEY = 'aws_access_key_id'
+ SECRET_KEY = 'aws_secret_access_key'
+ # Same deal as the EnvProvider above. Botocore originally supported
+ # aws_security_token, but the SDKs are standardizing on aws_session_token
+ # so we support both.
+ TOKENS = ['aws_security_token', 'aws_session_token']
+
+ def __init__(self, creds_filename, profile_name=None, ini_parser=None):
+ self._creds_filename = creds_filename
+ if profile_name is None:
+ profile_name = 'default'
+ self._profile_name = profile_name
+ if ini_parser is None:
+ ini_parser = botocore.configloader.raw_config_parse
+ self._ini_parser = ini_parser
+
+ def load(self):
+ try:
+ available_creds = self._ini_parser(self._creds_filename)
+ except ConfigNotFound:
+ return None
+ if self._profile_name in available_creds:
+ config = available_creds[self._profile_name]
+ if self.ACCESS_KEY in config:
+ logger.info("Found credentials in shared credentials file: %s",
+ self._creds_filename)
+ access_key, secret_key = self._extract_creds_from_mapping(
+ config, self.ACCESS_KEY, self.SECRET_KEY)
+ token = self._get_session_token(config)
+ return Credentials(access_key, secret_key, token,
+ method=self.METHOD)
+
+ def _get_session_token(self, config):
+ for token_envvar in self.TOKENS:
+ if token_envvar in config:
+ return config[token_envvar]
+
+
+class ConfigProvider(CredentialProvider):
+ """INI based config provider with profile sections."""
+ METHOD = 'config-file'
+ CANONICAL_NAME = 'SharedConfig'
+
+ ACCESS_KEY = 'aws_access_key_id'
+ SECRET_KEY = 'aws_secret_access_key'
+ # Same deal as the EnvProvider above. Botocore originally supported
+ # aws_security_token, but the SDKs are standardizing on aws_session_token
+ # so we support both.
+ TOKENS = ['aws_security_token', 'aws_session_token']
+
+ def __init__(self, config_filename, profile_name, config_parser=None):
+ """
+
+ :param config_filename: The session configuration scoped to the current
+ profile. This is available via ``session.config``.
+ :param profile_name: The name of the current profile.
+ :param config_parser: A config parser callable.
+
+ """
+ self._config_filename = config_filename
+ self._profile_name = profile_name
+ if config_parser is None:
+ config_parser = botocore.configloader.load_config
+ self._config_parser = config_parser
+
+ def load(self):
+ """
+ If there is are credentials in the configuration associated with
+ the session, use those.
+ """
+ try:
+ full_config = self._config_parser(self._config_filename)
+ except ConfigNotFound:
+ return None
+ if self._profile_name in full_config['profiles']:
+ profile_config = full_config['profiles'][self._profile_name]
+ if self.ACCESS_KEY in profile_config:
+ logger.info("Credentials found in config file: %s",
+ self._config_filename)
+ access_key, secret_key = self._extract_creds_from_mapping(
+ profile_config, self.ACCESS_KEY, self.SECRET_KEY)
+ token = self._get_session_token(profile_config)
+ return Credentials(access_key, secret_key, token,
+ method=self.METHOD)
+ else:
+ return None
+
+ def _get_session_token(self, profile_config):
+ for token_name in self.TOKENS:
+ if token_name in profile_config:
+ return profile_config[token_name]
+
+
+class BotoProvider(CredentialProvider):
+ METHOD = 'boto-config'
+ CANONICAL_NAME = 'Boto2Config'
+
+ BOTO_CONFIG_ENV = 'BOTO_CONFIG'
+ DEFAULT_CONFIG_FILENAMES = ['/etc/boto.cfg', '~/.boto']
+ ACCESS_KEY = 'aws_access_key_id'
+ SECRET_KEY = 'aws_secret_access_key'
+
+ def __init__(self, environ=None, ini_parser=None):
+ if environ is None:
+ environ = os.environ
+ if ini_parser is None:
+ ini_parser = botocore.configloader.raw_config_parse
+ self._environ = environ
+ self._ini_parser = ini_parser
+
+ def load(self):
+ """
+ Look for credentials in boto config file.
+ """
+ if self.BOTO_CONFIG_ENV in self._environ:
+ potential_locations = [self._environ[self.BOTO_CONFIG_ENV]]
+ else:
+ potential_locations = self.DEFAULT_CONFIG_FILENAMES
+ for filename in potential_locations:
+ try:
+ config = self._ini_parser(filename)
+ except ConfigNotFound:
+ # Move on to the next potential config file name.
+ continue
+ if 'Credentials' in config:
+ credentials = config['Credentials']
+ if self.ACCESS_KEY in credentials:
+ logger.info("Found credentials in boto config file: %s",
+ filename)
+ access_key, secret_key = self._extract_creds_from_mapping(
+ credentials, self.ACCESS_KEY, self.SECRET_KEY)
+ return Credentials(access_key, secret_key,
+ method=self.METHOD)
+
+
+class AssumeRoleProvider(CredentialProvider):
+ METHOD = 'assume-role'
+ # The AssumeRole provider is logically part of the SharedConfig and
+ # SharedCredentials providers. Since the purpose of the canonical name
+ # is to provide cross-sdk compatibility, calling code will need to be
+ # aware that either of those providers should be tied to the AssumeRole
+ # provider as much as possible.
+ CANONICAL_NAME = None
+ ROLE_CONFIG_VAR = 'role_arn'
WEB_IDENTITY_TOKE_FILE_VAR = 'web_identity_token_file'
- # Credentials are considered expired (and will be refreshed) once the total
- # remaining time left until the credentials expires is less than the
- # EXPIRY_WINDOW.
- EXPIRY_WINDOW_SECONDS = 60 * 15
-
- def __init__(self, load_config, client_creator, cache, profile_name,
+ # Credentials are considered expired (and will be refreshed) once the total
+ # remaining time left until the credentials expires is less than the
+ # EXPIRY_WINDOW.
+ EXPIRY_WINDOW_SECONDS = 60 * 15
+
+ def __init__(self, load_config, client_creator, cache, profile_name,
prompter=getpass.getpass, credential_sourcer=None,
profile_provider_builder=None):
- """
- :type load_config: callable
- :param load_config: A function that accepts no arguments, and
- when called, will return the full configuration dictionary
- for the session (``session.full_config``).
-
- :type client_creator: callable
- :param client_creator: A factory function that will create
- a client when called. Has the same interface as
- ``botocore.session.Session.create_client``.
-
- :type cache: dict
- :param cache: An object that supports ``__getitem__``,
- ``__setitem__``, and ``__contains__``. An example
- of this is the ``JSONFileCache`` class in the CLI.
-
- :type profile_name: str
- :param profile_name: The name of the profile.
-
- :type prompter: callable
- :param prompter: A callable that returns input provided
- by the user (i.e raw_input, getpass.getpass, etc.).
-
- :type credential_sourcer: CanonicalNameCredentialSourcer
- :param credential_sourcer: A credential provider that takes a
- configuration, which is used to provide the source credentials
- for the STS call.
- """
- #: The cache used to first check for assumed credentials.
- #: This is checked before making the AssumeRole API
- #: calls and can be useful if you have short lived
- #: scripts and you'd like to avoid calling AssumeRole
- #: until the credentials are expired.
- self.cache = cache
- self._load_config = load_config
- # client_creator is a callable that creates function.
- # It's basically session.create_client
- self._client_creator = client_creator
- self._profile_name = profile_name
- self._prompter = prompter
- # The _loaded_config attribute will be populated from the
- # load_config() function once the configuration is actually
- # loaded. The reason we go through all this instead of just
- # requiring that the loaded_config be passed to us is to that
- # we can defer configuration loaded until we actually try
- # to load credentials (as opposed to when the object is
- # instantiated).
- self._loaded_config = {}
- self._credential_sourcer = credential_sourcer
+ """
+ :type load_config: callable
+ :param load_config: A function that accepts no arguments, and
+ when called, will return the full configuration dictionary
+ for the session (``session.full_config``).
+
+ :type client_creator: callable
+ :param client_creator: A factory function that will create
+ a client when called. Has the same interface as
+ ``botocore.session.Session.create_client``.
+
+ :type cache: dict
+ :param cache: An object that supports ``__getitem__``,
+ ``__setitem__``, and ``__contains__``. An example
+ of this is the ``JSONFileCache`` class in the CLI.
+
+ :type profile_name: str
+ :param profile_name: The name of the profile.
+
+ :type prompter: callable
+ :param prompter: A callable that returns input provided
+ by the user (i.e raw_input, getpass.getpass, etc.).
+
+ :type credential_sourcer: CanonicalNameCredentialSourcer
+ :param credential_sourcer: A credential provider that takes a
+ configuration, which is used to provide the source credentials
+ for the STS call.
+ """
+ #: The cache used to first check for assumed credentials.
+ #: This is checked before making the AssumeRole API
+ #: calls and can be useful if you have short lived
+ #: scripts and you'd like to avoid calling AssumeRole
+ #: until the credentials are expired.
+ self.cache = cache
+ self._load_config = load_config
+ # client_creator is a callable that creates function.
+ # It's basically session.create_client
+ self._client_creator = client_creator
+ self._profile_name = profile_name
+ self._prompter = prompter
+ # The _loaded_config attribute will be populated from the
+ # load_config() function once the configuration is actually
+ # loaded. The reason we go through all this instead of just
+ # requiring that the loaded_config be passed to us is to that
+ # we can defer configuration loaded until we actually try
+ # to load credentials (as opposed to when the object is
+ # instantiated).
+ self._loaded_config = {}
+ self._credential_sourcer = credential_sourcer
self._profile_provider_builder = profile_provider_builder
- self._visited_profiles = [self._profile_name]
-
- def load(self):
- self._loaded_config = self._load_config()
+ self._visited_profiles = [self._profile_name]
+
+ def load(self):
+ self._loaded_config = self._load_config()
profiles = self._loaded_config.get('profiles', {})
profile = profiles.get(self._profile_name, {})
if self._has_assume_role_config_vars(profile):
- return self._load_creds_via_assume_role(self._profile_name)
-
+ return self._load_creds_via_assume_role(self._profile_name)
+
def _has_assume_role_config_vars(self, profile):
return (
self.ROLE_CONFIG_VAR in profile and
@@ -1416,114 +1416,114 @@ class AssumeRoleProvider(CredentialProvider):
# prevent the case when we're doing an assume role chain.
self.WEB_IDENTITY_TOKE_FILE_VAR not in profile
)
-
- def _load_creds_via_assume_role(self, profile_name):
- role_config = self._get_role_config(profile_name)
- source_credentials = self._resolve_source_credentials(
- role_config, profile_name
- )
-
- extra_args = {}
- role_session_name = role_config.get('role_session_name')
- if role_session_name is not None:
- extra_args['RoleSessionName'] = role_session_name
-
- external_id = role_config.get('external_id')
- if external_id is not None:
- extra_args['ExternalId'] = external_id
-
- mfa_serial = role_config.get('mfa_serial')
- if mfa_serial is not None:
- extra_args['SerialNumber'] = mfa_serial
-
+
+ def _load_creds_via_assume_role(self, profile_name):
+ role_config = self._get_role_config(profile_name)
+ source_credentials = self._resolve_source_credentials(
+ role_config, profile_name
+ )
+
+ extra_args = {}
+ role_session_name = role_config.get('role_session_name')
+ if role_session_name is not None:
+ extra_args['RoleSessionName'] = role_session_name
+
+ external_id = role_config.get('external_id')
+ if external_id is not None:
+ extra_args['ExternalId'] = external_id
+
+ mfa_serial = role_config.get('mfa_serial')
+ if mfa_serial is not None:
+ extra_args['SerialNumber'] = mfa_serial
+
duration_seconds = role_config.get('duration_seconds')
if duration_seconds is not None:
extra_args['DurationSeconds'] = duration_seconds
- fetcher = AssumeRoleCredentialFetcher(
- client_creator=self._client_creator,
- source_credentials=source_credentials,
- role_arn=role_config['role_arn'],
- extra_args=extra_args,
- mfa_prompter=self._prompter,
- cache=self.cache,
- )
- refresher = fetcher.fetch_credentials
- if mfa_serial is not None:
- refresher = create_mfa_serial_refresher(refresher)
-
- # The initial credentials are empty and the expiration time is set
- # to now so that we can delay the call to assume role until it is
- # strictly needed.
- return DeferredRefreshableCredentials(
- method=self.METHOD,
- refresh_using=refresher,
- time_fetcher=_local_now
- )
-
- def _get_role_config(self, profile_name):
- """Retrieves and validates the role configuration for the profile."""
- profiles = self._loaded_config.get('profiles', {})
-
- profile = profiles[profile_name]
- source_profile = profile.get('source_profile')
- role_arn = profile['role_arn']
- credential_source = profile.get('credential_source')
- mfa_serial = profile.get('mfa_serial')
- external_id = profile.get('external_id')
- role_session_name = profile.get('role_session_name')
+ fetcher = AssumeRoleCredentialFetcher(
+ client_creator=self._client_creator,
+ source_credentials=source_credentials,
+ role_arn=role_config['role_arn'],
+ extra_args=extra_args,
+ mfa_prompter=self._prompter,
+ cache=self.cache,
+ )
+ refresher = fetcher.fetch_credentials
+ if mfa_serial is not None:
+ refresher = create_mfa_serial_refresher(refresher)
+
+ # The initial credentials are empty and the expiration time is set
+ # to now so that we can delay the call to assume role until it is
+ # strictly needed.
+ return DeferredRefreshableCredentials(
+ method=self.METHOD,
+ refresh_using=refresher,
+ time_fetcher=_local_now
+ )
+
+ def _get_role_config(self, profile_name):
+ """Retrieves and validates the role configuration for the profile."""
+ profiles = self._loaded_config.get('profiles', {})
+
+ profile = profiles[profile_name]
+ source_profile = profile.get('source_profile')
+ role_arn = profile['role_arn']
+ credential_source = profile.get('credential_source')
+ mfa_serial = profile.get('mfa_serial')
+ external_id = profile.get('external_id')
+ role_session_name = profile.get('role_session_name')
duration_seconds = profile.get('duration_seconds')
-
- role_config = {
- 'role_arn': role_arn,
- 'external_id': external_id,
- 'mfa_serial': mfa_serial,
- 'role_session_name': role_session_name,
- 'source_profile': source_profile,
- 'credential_source': credential_source
- }
-
+
+ role_config = {
+ 'role_arn': role_arn,
+ 'external_id': external_id,
+ 'mfa_serial': mfa_serial,
+ 'role_session_name': role_session_name,
+ 'source_profile': source_profile,
+ 'credential_source': credential_source
+ }
+
if duration_seconds is not None:
try:
role_config['duration_seconds'] = int(duration_seconds)
except ValueError:
pass
- # Either the credential source or the source profile must be
- # specified, but not both.
- if credential_source is not None and source_profile is not None:
- raise InvalidConfigError(
- error_msg=(
- 'The profile "%s" contains both source_profile and '
- 'credential_source.' % profile_name
- )
- )
- elif credential_source is None and source_profile is None:
- raise PartialCredentialsError(
- provider=self.METHOD,
- cred_var='source_profile or credential_source'
- )
- elif credential_source is not None:
- self._validate_credential_source(
- profile_name, credential_source)
- else:
- self._validate_source_profile(profile_name, source_profile)
-
- return role_config
-
- def _validate_credential_source(self, parent_profile, credential_source):
- if self._credential_sourcer is None:
- raise InvalidConfigError(error_msg=(
- 'The credential_source "%s" is specified in profile "%s", '
- 'but no source provider was configured.' % (
- credential_source, parent_profile)
- ))
- if not self._credential_sourcer.is_supported(credential_source):
- raise InvalidConfigError(error_msg=(
- 'The credential source "%s" referenced in profile "%s" is not '
- 'valid.' % (credential_source, parent_profile)
- ))
-
+ # Either the credential source or the source profile must be
+ # specified, but not both.
+ if credential_source is not None and source_profile is not None:
+ raise InvalidConfigError(
+ error_msg=(
+ 'The profile "%s" contains both source_profile and '
+ 'credential_source.' % profile_name
+ )
+ )
+ elif credential_source is None and source_profile is None:
+ raise PartialCredentialsError(
+ provider=self.METHOD,
+ cred_var='source_profile or credential_source'
+ )
+ elif credential_source is not None:
+ self._validate_credential_source(
+ profile_name, credential_source)
+ else:
+ self._validate_source_profile(profile_name, source_profile)
+
+ return role_config
+
+ def _validate_credential_source(self, parent_profile, credential_source):
+ if self._credential_sourcer is None:
+ raise InvalidConfigError(error_msg=(
+ 'The credential_source "%s" is specified in profile "%s", '
+ 'but no source provider was configured.' % (
+ credential_source, parent_profile)
+ ))
+ if not self._credential_sourcer.is_supported(credential_source):
+ raise InvalidConfigError(error_msg=(
+ 'The credential source "%s" referenced in profile "%s" is not '
+ 'valid.' % (credential_source, parent_profile)
+ ))
+
def _source_profile_has_credentials(self, profile):
return any([
self._has_static_credentials(profile),
@@ -1532,68 +1532,68 @@ class AssumeRoleProvider(CredentialProvider):
def _validate_source_profile(self, parent_profile_name,
source_profile_name):
- profiles = self._loaded_config.get('profiles', {})
+ profiles = self._loaded_config.get('profiles', {})
if source_profile_name not in profiles:
- raise InvalidConfigError(
- error_msg=(
- 'The source_profile "%s" referenced in '
- 'the profile "%s" does not exist.' % (
+ raise InvalidConfigError(
+ error_msg=(
+ 'The source_profile "%s" referenced in '
+ 'the profile "%s" does not exist.' % (
source_profile_name, parent_profile_name)
- )
- )
-
+ )
+ )
+
source_profile = profiles[source_profile_name]
- # Make sure we aren't going into an infinite loop. If we haven't
- # visited the profile yet, we're good.
+ # Make sure we aren't going into an infinite loop. If we haven't
+ # visited the profile yet, we're good.
if source_profile_name not in self._visited_profiles:
- return
-
- # If we have visited the profile and the profile isn't simply
- # referencing itself, that's an infinite loop.
+ return
+
+ # If we have visited the profile and the profile isn't simply
+ # referencing itself, that's an infinite loop.
if source_profile_name != parent_profile_name:
- raise InfiniteLoopConfigError(
+ raise InfiniteLoopConfigError(
source_profile=source_profile_name,
- visited_profiles=self._visited_profiles
- )
-
- # A profile is allowed to reference itself so that it can source
- # static credentials and have configuration all in the same
- # profile. This will only ever work for the top level assume
- # role because the static credentials will otherwise take
- # precedence.
+ visited_profiles=self._visited_profiles
+ )
+
+ # A profile is allowed to reference itself so that it can source
+ # static credentials and have configuration all in the same
+ # profile. This will only ever work for the top level assume
+ # role because the static credentials will otherwise take
+ # precedence.
if not self._has_static_credentials(source_profile):
- raise InfiniteLoopConfigError(
+ raise InfiniteLoopConfigError(
source_profile=source_profile_name,
- visited_profiles=self._visited_profiles
- )
-
- def _has_static_credentials(self, profile):
- static_keys = ['aws_secret_access_key', 'aws_access_key_id']
- return any(static_key in profile for static_key in static_keys)
-
- def _resolve_source_credentials(self, role_config, profile_name):
- credential_source = role_config.get('credential_source')
- if credential_source is not None:
- return self._resolve_credentials_from_source(
- credential_source, profile_name
- )
-
- source_profile = role_config['source_profile']
- self._visited_profiles.append(source_profile)
- return self._resolve_credentials_from_profile(source_profile)
-
- def _resolve_credentials_from_profile(self, profile_name):
- profiles = self._loaded_config.get('profiles', {})
- profile = profiles[profile_name]
-
+ visited_profiles=self._visited_profiles
+ )
+
+ def _has_static_credentials(self, profile):
+ static_keys = ['aws_secret_access_key', 'aws_access_key_id']
+ return any(static_key in profile for static_key in static_keys)
+
+ def _resolve_source_credentials(self, role_config, profile_name):
+ credential_source = role_config.get('credential_source')
+ if credential_source is not None:
+ return self._resolve_credentials_from_source(
+ credential_source, profile_name
+ )
+
+ source_profile = role_config['source_profile']
+ self._visited_profiles.append(source_profile)
+ return self._resolve_credentials_from_profile(source_profile)
+
+ def _resolve_credentials_from_profile(self, profile_name):
+ profiles = self._loaded_config.get('profiles', {})
+ profile = profiles[profile_name]
+
if self._has_static_credentials(profile) and \
not self._profile_provider_builder:
# This is only here for backwards compatibility. If this provider
# isn't given a profile provider builder we still want to be able
# handle the basic static credential case as we would before the
# provile provider builder parameter was added.
- return self._resolve_static_credentials_from_profile(profile)
+ return self._resolve_static_credentials_from_profile(profile)
elif self._has_static_credentials(profile) or \
not self._has_assume_role_config_vars(profile):
profile_providers = self._profile_provider_builder.providers(
@@ -1610,35 +1610,35 @@ class AssumeRoleProvider(CredentialProvider):
error_msg=error_message % profile_name,
)
return credentials
-
- return self._load_creds_via_assume_role(profile_name)
-
- def _resolve_static_credentials_from_profile(self, profile):
- try:
- return Credentials(
- access_key=profile['aws_access_key_id'],
- secret_key=profile['aws_secret_access_key'],
- token=profile.get('aws_session_token')
- )
- except KeyError as e:
- raise PartialCredentialsError(
- provider=self.METHOD, cred_var=str(e))
-
- def _resolve_credentials_from_source(self, credential_source,
- profile_name):
- credentials = self._credential_sourcer.source_credentials(
- credential_source)
- if credentials is None:
- raise CredentialRetrievalError(
- provider=credential_source,
- error_msg=(
- 'No credentials found in credential_source referenced '
- 'in profile %s' % profile_name
- )
- )
- return credentials
-
-
+
+ return self._load_creds_via_assume_role(profile_name)
+
+ def _resolve_static_credentials_from_profile(self, profile):
+ try:
+ return Credentials(
+ access_key=profile['aws_access_key_id'],
+ secret_key=profile['aws_secret_access_key'],
+ token=profile.get('aws_session_token')
+ )
+ except KeyError as e:
+ raise PartialCredentialsError(
+ provider=self.METHOD, cred_var=str(e))
+
+ def _resolve_credentials_from_source(self, credential_source,
+ profile_name):
+ credentials = self._credential_sourcer.source_credentials(
+ credential_source)
+ if credentials is None:
+ raise CredentialRetrievalError(
+ provider=credential_source,
+ error_msg=(
+ 'No credentials found in credential_source referenced '
+ 'in profile %s' % profile_name
+ )
+ )
+ return credentials
+
+
class AssumeRoleWithWebIdentityProvider(CredentialProvider):
METHOD = 'assume-role-with-web-identity'
CANONICAL_NAME = None
@@ -1728,261 +1728,261 @@ class AssumeRoleWithWebIdentityProvider(CredentialProvider):
)
-class CanonicalNameCredentialSourcer(object):
- def __init__(self, providers):
- self._providers = providers
-
- def is_supported(self, source_name):
- """Validates a given source name.
-
- :type source_name: str
- :param source_name: The value of credential_source in the config
- file. This is the canonical name of the credential provider.
-
- :rtype: bool
- :returns: True if the credential provider is supported,
- False otherwise.
- """
- return source_name in [p.CANONICAL_NAME for p in self._providers]
-
- def source_credentials(self, source_name):
- """Loads source credentials based on the provided configuration.
-
- :type source_name: str
- :param source_name: The value of credential_source in the config
- file. This is the canonical name of the credential provider.
-
- :rtype: Credentials
- """
- source = self._get_provider(source_name)
- if isinstance(source, CredentialResolver):
- return source.load_credentials()
- return source.load()
-
- def _get_provider(self, canonical_name):
- """Return a credential provider by its canonical name.
-
- :type canonical_name: str
- :param canonical_name: The canonical name of the provider.
-
- :raises UnknownCredentialError: Raised if no
- credential provider by the provided name
- is found.
- """
- provider = self._get_provider_by_canonical_name(canonical_name)
-
- # The AssumeRole provider should really be part of the SharedConfig
- # provider rather than being its own thing, but it is not. It is
- # effectively part of both the SharedConfig provider and the
- # SharedCredentials provider now due to the way it behaves.
- # Therefore if we want either of those providers we should return
- # the AssumeRole provider with it.
- if canonical_name.lower() in ['sharedconfig', 'sharedcredentials']:
- assume_role_provider = self._get_provider_by_method('assume-role')
- if assume_role_provider is not None:
- # The SharedConfig or SharedCredentials provider may not be
- # present if it was removed for some reason, but the
- # AssumeRole provider could still be present. In that case,
- # return the assume role provider by itself.
- if provider is None:
- return assume_role_provider
-
- # If both are present, return them both as a
- # CredentialResolver so that calling code can treat them as
- # a single entity.
- return CredentialResolver([assume_role_provider, provider])
-
- if provider is None:
- raise UnknownCredentialError(name=canonical_name)
-
- return provider
-
- def _get_provider_by_canonical_name(self, canonical_name):
- """Return a credential provider by its canonical name.
-
- This function is strict, it does not attempt to address
- compatibility issues.
- """
- for provider in self._providers:
- name = provider.CANONICAL_NAME
- # Canonical names are case-insensitive
- if name and name.lower() == canonical_name.lower():
- return provider
-
- def _get_provider_by_method(self, method):
- """Return a credential provider by its METHOD name."""
- for provider in self._providers:
- if provider.METHOD == method:
- return provider
-
-
-class ContainerProvider(CredentialProvider):
- METHOD = 'container-role'
- CANONICAL_NAME = 'EcsContainer'
- ENV_VAR = 'AWS_CONTAINER_CREDENTIALS_RELATIVE_URI'
- ENV_VAR_FULL = 'AWS_CONTAINER_CREDENTIALS_FULL_URI'
- ENV_VAR_AUTH_TOKEN = 'AWS_CONTAINER_AUTHORIZATION_TOKEN'
-
- def __init__(self, environ=None, fetcher=None):
- if environ is None:
- environ = os.environ
- if fetcher is None:
- fetcher = ContainerMetadataFetcher()
- self._environ = environ
- self._fetcher = fetcher
-
- def load(self):
- # This cred provider is only triggered if the self.ENV_VAR is set,
- # which only happens if you opt into this feature.
- if self.ENV_VAR in self._environ or self.ENV_VAR_FULL in self._environ:
- return self._retrieve_or_fail()
-
- def _retrieve_or_fail(self):
- if self._provided_relative_uri():
- full_uri = self._fetcher.full_url(self._environ[self.ENV_VAR])
- else:
- full_uri = self._environ[self.ENV_VAR_FULL]
- headers = self._build_headers()
- fetcher = self._create_fetcher(full_uri, headers)
- creds = fetcher()
- return RefreshableCredentials(
- access_key=creds['access_key'],
- secret_key=creds['secret_key'],
- token=creds['token'],
- method=self.METHOD,
- expiry_time=_parse_if_needed(creds['expiry_time']),
- refresh_using=fetcher,
- )
-
- def _build_headers(self):
- headers = {}
- auth_token = self._environ.get(self.ENV_VAR_AUTH_TOKEN)
- if auth_token is not None:
- return {
- 'Authorization': auth_token
- }
-
- def _create_fetcher(self, full_uri, headers):
- def fetch_creds():
- try:
- response = self._fetcher.retrieve_full_uri(
- full_uri, headers=headers)
- except MetadataRetrievalError as e:
- logger.debug("Error retrieving container metadata: %s", e,
- exc_info=True)
- raise CredentialRetrievalError(provider=self.METHOD,
- error_msg=str(e))
- return {
- 'access_key': response['AccessKeyId'],
- 'secret_key': response['SecretAccessKey'],
- 'token': response['Token'],
- 'expiry_time': response['Expiration'],
- }
-
- return fetch_creds
-
- def _provided_relative_uri(self):
- return self.ENV_VAR in self._environ
-
-
-class CredentialResolver(object):
- def __init__(self, providers):
- """
-
- :param providers: A list of ``CredentialProvider`` instances.
-
- """
- self.providers = providers
-
- def insert_before(self, name, credential_provider):
- """
- Inserts a new instance of ``CredentialProvider`` into the chain that
- will be tried before an existing one.
-
- :param name: The short name of the credentials you'd like to insert the
- new credentials before. (ex. ``env`` or ``config``). Existing names
- & ordering can be discovered via ``self.available_methods``.
- :type name: string
-
- :param cred_instance: An instance of the new ``Credentials`` object
- you'd like to add to the chain.
- :type cred_instance: A subclass of ``Credentials``
- """
- try:
- offset = [p.METHOD for p in self.providers].index(name)
- except ValueError:
- raise UnknownCredentialError(name=name)
- self.providers.insert(offset, credential_provider)
-
- def insert_after(self, name, credential_provider):
- """
- Inserts a new type of ``Credentials`` instance into the chain that will
- be tried after an existing one.
-
- :param name: The short name of the credentials you'd like to insert the
- new credentials after. (ex. ``env`` or ``config``). Existing names
- & ordering can be discovered via ``self.available_methods``.
- :type name: string
-
- :param cred_instance: An instance of the new ``Credentials`` object
- you'd like to add to the chain.
- :type cred_instance: A subclass of ``Credentials``
- """
- offset = self._get_provider_offset(name)
- self.providers.insert(offset + 1, credential_provider)
-
- def remove(self, name):
- """
- Removes a given ``Credentials`` instance from the chain.
-
- :param name: The short name of the credentials instance to remove.
- :type name: string
- """
- available_methods = [p.METHOD for p in self.providers]
- if name not in available_methods:
- # It's not present. Fail silently.
- return
-
- offset = available_methods.index(name)
- self.providers.pop(offset)
-
- def get_provider(self, name):
- """Return a credential provider by name.
-
- :type name: str
- :param name: The name of the provider.
-
- :raises UnknownCredentialError: Raised if no
- credential provider by the provided name
- is found.
- """
- return self.providers[self._get_provider_offset(name)]
-
- def _get_provider_offset(self, name):
- try:
- return [p.METHOD for p in self.providers].index(name)
- except ValueError:
- raise UnknownCredentialError(name=name)
-
- def load_credentials(self):
- """
- Goes through the credentials chain, returning the first ``Credentials``
- that could be loaded.
- """
- # First provider to return a non-None response wins.
- for provider in self.providers:
- logger.debug("Looking for credentials via: %s", provider.METHOD)
- creds = provider.load()
- if creds is not None:
- return creds
-
- # If we got here, no credentials could be found.
- # This feels like it should be an exception, but historically, ``None``
- # is returned.
- #
- # +1
- # -js
- return None
+class CanonicalNameCredentialSourcer(object):
+ def __init__(self, providers):
+ self._providers = providers
+
+ def is_supported(self, source_name):
+ """Validates a given source name.
+
+ :type source_name: str
+ :param source_name: The value of credential_source in the config
+ file. This is the canonical name of the credential provider.
+
+ :rtype: bool
+ :returns: True if the credential provider is supported,
+ False otherwise.
+ """
+ return source_name in [p.CANONICAL_NAME for p in self._providers]
+
+ def source_credentials(self, source_name):
+ """Loads source credentials based on the provided configuration.
+
+ :type source_name: str
+ :param source_name: The value of credential_source in the config
+ file. This is the canonical name of the credential provider.
+
+ :rtype: Credentials
+ """
+ source = self._get_provider(source_name)
+ if isinstance(source, CredentialResolver):
+ return source.load_credentials()
+ return source.load()
+
+ def _get_provider(self, canonical_name):
+ """Return a credential provider by its canonical name.
+
+ :type canonical_name: str
+ :param canonical_name: The canonical name of the provider.
+
+ :raises UnknownCredentialError: Raised if no
+ credential provider by the provided name
+ is found.
+ """
+ provider = self._get_provider_by_canonical_name(canonical_name)
+
+ # The AssumeRole provider should really be part of the SharedConfig
+ # provider rather than being its own thing, but it is not. It is
+ # effectively part of both the SharedConfig provider and the
+ # SharedCredentials provider now due to the way it behaves.
+ # Therefore if we want either of those providers we should return
+ # the AssumeRole provider with it.
+ if canonical_name.lower() in ['sharedconfig', 'sharedcredentials']:
+ assume_role_provider = self._get_provider_by_method('assume-role')
+ if assume_role_provider is not None:
+ # The SharedConfig or SharedCredentials provider may not be
+ # present if it was removed for some reason, but the
+ # AssumeRole provider could still be present. In that case,
+ # return the assume role provider by itself.
+ if provider is None:
+ return assume_role_provider
+
+ # If both are present, return them both as a
+ # CredentialResolver so that calling code can treat them as
+ # a single entity.
+ return CredentialResolver([assume_role_provider, provider])
+
+ if provider is None:
+ raise UnknownCredentialError(name=canonical_name)
+
+ return provider
+
+ def _get_provider_by_canonical_name(self, canonical_name):
+ """Return a credential provider by its canonical name.
+
+ This function is strict, it does not attempt to address
+ compatibility issues.
+ """
+ for provider in self._providers:
+ name = provider.CANONICAL_NAME
+ # Canonical names are case-insensitive
+ if name and name.lower() == canonical_name.lower():
+ return provider
+
+ def _get_provider_by_method(self, method):
+ """Return a credential provider by its METHOD name."""
+ for provider in self._providers:
+ if provider.METHOD == method:
+ return provider
+
+
+class ContainerProvider(CredentialProvider):
+ METHOD = 'container-role'
+ CANONICAL_NAME = 'EcsContainer'
+ ENV_VAR = 'AWS_CONTAINER_CREDENTIALS_RELATIVE_URI'
+ ENV_VAR_FULL = 'AWS_CONTAINER_CREDENTIALS_FULL_URI'
+ ENV_VAR_AUTH_TOKEN = 'AWS_CONTAINER_AUTHORIZATION_TOKEN'
+
+ def __init__(self, environ=None, fetcher=None):
+ if environ is None:
+ environ = os.environ
+ if fetcher is None:
+ fetcher = ContainerMetadataFetcher()
+ self._environ = environ
+ self._fetcher = fetcher
+
+ def load(self):
+ # This cred provider is only triggered if the self.ENV_VAR is set,
+ # which only happens if you opt into this feature.
+ if self.ENV_VAR in self._environ or self.ENV_VAR_FULL in self._environ:
+ return self._retrieve_or_fail()
+
+ def _retrieve_or_fail(self):
+ if self._provided_relative_uri():
+ full_uri = self._fetcher.full_url(self._environ[self.ENV_VAR])
+ else:
+ full_uri = self._environ[self.ENV_VAR_FULL]
+ headers = self._build_headers()
+ fetcher = self._create_fetcher(full_uri, headers)
+ creds = fetcher()
+ return RefreshableCredentials(
+ access_key=creds['access_key'],
+ secret_key=creds['secret_key'],
+ token=creds['token'],
+ method=self.METHOD,
+ expiry_time=_parse_if_needed(creds['expiry_time']),
+ refresh_using=fetcher,
+ )
+
+ def _build_headers(self):
+ headers = {}
+ auth_token = self._environ.get(self.ENV_VAR_AUTH_TOKEN)
+ if auth_token is not None:
+ return {
+ 'Authorization': auth_token
+ }
+
+ def _create_fetcher(self, full_uri, headers):
+ def fetch_creds():
+ try:
+ response = self._fetcher.retrieve_full_uri(
+ full_uri, headers=headers)
+ except MetadataRetrievalError as e:
+ logger.debug("Error retrieving container metadata: %s", e,
+ exc_info=True)
+ raise CredentialRetrievalError(provider=self.METHOD,
+ error_msg=str(e))
+ return {
+ 'access_key': response['AccessKeyId'],
+ 'secret_key': response['SecretAccessKey'],
+ 'token': response['Token'],
+ 'expiry_time': response['Expiration'],
+ }
+
+ return fetch_creds
+
+ def _provided_relative_uri(self):
+ return self.ENV_VAR in self._environ
+
+
+class CredentialResolver(object):
+ def __init__(self, providers):
+ """
+
+ :param providers: A list of ``CredentialProvider`` instances.
+
+ """
+ self.providers = providers
+
+ def insert_before(self, name, credential_provider):
+ """
+ Inserts a new instance of ``CredentialProvider`` into the chain that
+ will be tried before an existing one.
+
+ :param name: The short name of the credentials you'd like to insert the
+ new credentials before. (ex. ``env`` or ``config``). Existing names
+ & ordering can be discovered via ``self.available_methods``.
+ :type name: string
+
+ :param cred_instance: An instance of the new ``Credentials`` object
+ you'd like to add to the chain.
+ :type cred_instance: A subclass of ``Credentials``
+ """
+ try:
+ offset = [p.METHOD for p in self.providers].index(name)
+ except ValueError:
+ raise UnknownCredentialError(name=name)
+ self.providers.insert(offset, credential_provider)
+
+ def insert_after(self, name, credential_provider):
+ """
+ Inserts a new type of ``Credentials`` instance into the chain that will
+ be tried after an existing one.
+
+ :param name: The short name of the credentials you'd like to insert the
+ new credentials after. (ex. ``env`` or ``config``). Existing names
+ & ordering can be discovered via ``self.available_methods``.
+ :type name: string
+
+ :param cred_instance: An instance of the new ``Credentials`` object
+ you'd like to add to the chain.
+ :type cred_instance: A subclass of ``Credentials``
+ """
+ offset = self._get_provider_offset(name)
+ self.providers.insert(offset + 1, credential_provider)
+
+ def remove(self, name):
+ """
+ Removes a given ``Credentials`` instance from the chain.
+
+ :param name: The short name of the credentials instance to remove.
+ :type name: string
+ """
+ available_methods = [p.METHOD for p in self.providers]
+ if name not in available_methods:
+ # It's not present. Fail silently.
+ return
+
+ offset = available_methods.index(name)
+ self.providers.pop(offset)
+
+ def get_provider(self, name):
+ """Return a credential provider by name.
+
+ :type name: str
+ :param name: The name of the provider.
+
+ :raises UnknownCredentialError: Raised if no
+ credential provider by the provided name
+ is found.
+ """
+ return self.providers[self._get_provider_offset(name)]
+
+ def _get_provider_offset(self, name):
+ try:
+ return [p.METHOD for p in self.providers].index(name)
+ except ValueError:
+ raise UnknownCredentialError(name=name)
+
+ def load_credentials(self):
+ """
+ Goes through the credentials chain, returning the first ``Credentials``
+ that could be loaded.
+ """
+ # First provider to return a non-None response wins.
+ for provider in self.providers:
+ logger.debug("Looking for credentials via: %s", provider.METHOD)
+ creds = provider.load()
+ if creds is not None:
+ return creds
+
+ # If we got here, no credentials could be found.
+ # This feels like it should be an exception, but historically, ``None``
+ # is returned.
+ #
+ # +1
+ # -js
+ return None
class SSOCredentialFetcher(CachedCredentialFetcher):
diff --git a/contrib/python/botocore/botocore/docs/__init__.py b/contrib/python/botocore/botocore/docs/__init__.py
index 3f27b4bbf8..b76f7990e8 100644
--- a/contrib/python/botocore/botocore/docs/__init__.py
+++ b/contrib/python/botocore/botocore/docs/__init__.py
@@ -1,38 +1,38 @@
-# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-import os
-
-from botocore.docs.service import ServiceDocumenter
-
-
-def generate_docs(root_dir, session):
- """Generates the reference documentation for botocore
-
- This will go through every available AWS service and output ReSTructured
- text files documenting each service.
-
- :param root_dir: The directory to write the reference files to. Each
- service's reference documentation is loacated at
- root_dir/reference/services/service-name.rst
- """
- services_doc_path = os.path.join(root_dir, 'reference', 'services')
- if not os.path.exists(services_doc_path):
- os.makedirs(services_doc_path)
-
- # Generate reference docs and write them out.
- for service_name in session.get_available_services():
- docs = ServiceDocumenter(service_name, session).document_service()
- service_doc_path = os.path.join(
- services_doc_path, service_name + '.rst')
- with open(service_doc_path, 'wb') as f:
- f.write(docs)
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+import os
+
+from botocore.docs.service import ServiceDocumenter
+
+
+def generate_docs(root_dir, session):
+ """Generates the reference documentation for botocore
+
+ This will go through every available AWS service and output ReSTructured
+ text files documenting each service.
+
+ :param root_dir: The directory to write the reference files to. Each
+ service's reference documentation is loacated at
+ root_dir/reference/services/service-name.rst
+ """
+ services_doc_path = os.path.join(root_dir, 'reference', 'services')
+ if not os.path.exists(services_doc_path):
+ os.makedirs(services_doc_path)
+
+ # Generate reference docs and write them out.
+ for service_name in session.get_available_services():
+ docs = ServiceDocumenter(service_name, session).document_service()
+ service_doc_path = os.path.join(
+ services_doc_path, service_name + '.rst')
+ with open(service_doc_path, 'wb') as f:
+ f.write(docs)
diff --git a/contrib/python/botocore/botocore/docs/bcdoc/__init__.py b/contrib/python/botocore/botocore/docs/bcdoc/__init__.py
index f6f469079f..b687f69da2 100644
--- a/contrib/python/botocore/botocore/docs/bcdoc/__init__.py
+++ b/contrib/python/botocore/botocore/docs/bcdoc/__init__.py
@@ -1,13 +1,13 @@
-# Copyright 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-__version__ = '0.16.0'
+# Copyright 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+__version__ = '0.16.0'
diff --git a/contrib/python/botocore/botocore/docs/bcdoc/docstringparser.py b/contrib/python/botocore/botocore/docs/bcdoc/docstringparser.py
index 1da540469f..868bd5d891 100644
--- a/contrib/python/botocore/botocore/docs/bcdoc/docstringparser.py
+++ b/contrib/python/botocore/botocore/docs/bcdoc/docstringparser.py
@@ -1,200 +1,200 @@
-# Copyright 2012-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-from botocore.compat import six
-
-
-class DocStringParser(six.moves.html_parser.HTMLParser):
- """
- A simple HTML parser. Focused on converting the subset of HTML
- that appears in the documentation strings of the JSON models into
- simple ReST format.
- """
-
- def __init__(self, doc):
- self.tree = None
- self.doc = doc
- six.moves.html_parser.HTMLParser.__init__(self)
-
- def reset(self):
- six.moves.html_parser.HTMLParser.reset(self)
- self.tree = HTMLTree(self.doc)
-
- def feed(self, data):
- # HTMLParser is an old style class, so the super() method will not work.
- six.moves.html_parser.HTMLParser.feed(self, data)
- self.tree.write()
- self.tree = HTMLTree(self.doc)
-
- def close(self):
- six.moves.html_parser.HTMLParser.close(self)
- # Write if there is anything remaining.
- self.tree.write()
- self.tree = HTMLTree(self.doc)
-
- def handle_starttag(self, tag, attrs):
- self.tree.add_tag(tag, attrs=attrs)
-
- def handle_endtag(self, tag):
- self.tree.add_tag(tag, is_start=False)
-
- def handle_data(self, data):
- self.tree.add_data(data)
-
-
-class HTMLTree(object):
- """
- A tree which handles HTML nodes. Designed to work with a python HTML parser,
- meaning that the current_node will be the most recently opened tag. When
- a tag is closed, the current_node moves up to the parent node.
- """
- def __init__(self, doc):
- self.doc = doc
- self.head = StemNode()
- self.current_node = self.head
- self.unhandled_tags = []
-
- def add_tag(self, tag, attrs=None, is_start=True):
- if not self._doc_has_handler(tag, is_start):
- self.unhandled_tags.append(tag)
- return
-
- if is_start:
- if tag == 'li':
- node = LineItemNode(attrs)
- else:
- node = TagNode(tag, attrs)
- self.current_node.add_child(node)
- self.current_node = node
- else:
- self.current_node = self.current_node.parent
-
- def _doc_has_handler(self, tag, is_start):
- if is_start:
- handler_name = 'start_%s' % tag
- else:
- handler_name = 'end_%s' % tag
-
- return hasattr(self.doc.style, handler_name)
-
- def add_data(self, data):
- self.current_node.add_child(DataNode(data))
-
- def write(self):
- self.head.write(self.doc)
-
-
-class Node(object):
- def __init__(self, parent=None):
- self.parent = parent
-
- def write(self, doc):
- raise NotImplementedError
-
-
-class StemNode(Node):
- def __init__(self, parent=None):
- super(StemNode, self).__init__(parent)
- self.children = []
-
- def add_child(self, child):
- child.parent = self
- self.children.append(child)
-
- def write(self, doc):
- self._write_children(doc)
-
- def _write_children(self, doc):
- for child in self.children:
- child.write(doc)
-
-
-class TagNode(StemNode):
- """
- A generic Tag node. It will verify that handlers exist before writing.
- """
- def __init__(self, tag, attrs=None, parent=None):
- super(TagNode, self).__init__(parent)
- self.attrs = attrs
- self.tag = tag
-
- def write(self, doc):
- self._write_start(doc)
- self._write_children(doc)
- self._write_end(doc)
-
- def _write_start(self, doc):
- handler_name = 'start_%s' % self.tag
- if hasattr(doc.style, handler_name):
- getattr(doc.style, handler_name)(self.attrs)
-
- def _write_end(self, doc):
- handler_name = 'end_%s' % self.tag
- if hasattr(doc.style, handler_name):
- getattr(doc.style, handler_name)()
-
-
-class LineItemNode(TagNode):
- def __init__(self, attrs=None, parent=None):
- super(LineItemNode, self).__init__('li', attrs, parent)
-
- def write(self, doc):
- self._lstrip(self)
- super(LineItemNode, self).write(doc)
-
- def _lstrip(self, node):
- """
- Traverses the tree, stripping out whitespace until text data is found
- :param node: The node to strip
- :return: True if non-whitespace data was found, False otherwise
- """
- for child in node.children:
- if isinstance(child, DataNode):
- child.lstrip()
- if child.data:
- return True
- else:
- found = self._lstrip(child)
- if found:
- return True
-
- return False
-
-
-class DataNode(Node):
- """
- A Node that contains only string data.
- """
- def __init__(self, data, parent=None):
- super(DataNode, self).__init__(parent)
- if not isinstance(data, six.string_types):
- raise ValueError("Expecting string type, %s given." % type(data))
- self.data = data
-
- def lstrip(self):
- self.data = self.data.lstrip()
-
- def write(self, doc):
- if not self.data:
- return
-
- if self.data.isspace():
- str_data = ' '
- else:
- end_space = self.data[-1].isspace()
- words = self.data.split()
- words = doc.translate_words(words)
- str_data = ' '.join(words)
- if end_space:
- str_data += ' '
-
- doc.handle_data(str_data)
+# Copyright 2012-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+from botocore.compat import six
+
+
+class DocStringParser(six.moves.html_parser.HTMLParser):
+ """
+ A simple HTML parser. Focused on converting the subset of HTML
+ that appears in the documentation strings of the JSON models into
+ simple ReST format.
+ """
+
+ def __init__(self, doc):
+ self.tree = None
+ self.doc = doc
+ six.moves.html_parser.HTMLParser.__init__(self)
+
+ def reset(self):
+ six.moves.html_parser.HTMLParser.reset(self)
+ self.tree = HTMLTree(self.doc)
+
+ def feed(self, data):
+ # HTMLParser is an old style class, so the super() method will not work.
+ six.moves.html_parser.HTMLParser.feed(self, data)
+ self.tree.write()
+ self.tree = HTMLTree(self.doc)
+
+ def close(self):
+ six.moves.html_parser.HTMLParser.close(self)
+ # Write if there is anything remaining.
+ self.tree.write()
+ self.tree = HTMLTree(self.doc)
+
+ def handle_starttag(self, tag, attrs):
+ self.tree.add_tag(tag, attrs=attrs)
+
+ def handle_endtag(self, tag):
+ self.tree.add_tag(tag, is_start=False)
+
+ def handle_data(self, data):
+ self.tree.add_data(data)
+
+
+class HTMLTree(object):
+ """
+ A tree which handles HTML nodes. Designed to work with a python HTML parser,
+ meaning that the current_node will be the most recently opened tag. When
+ a tag is closed, the current_node moves up to the parent node.
+ """
+ def __init__(self, doc):
+ self.doc = doc
+ self.head = StemNode()
+ self.current_node = self.head
+ self.unhandled_tags = []
+
+ def add_tag(self, tag, attrs=None, is_start=True):
+ if not self._doc_has_handler(tag, is_start):
+ self.unhandled_tags.append(tag)
+ return
+
+ if is_start:
+ if tag == 'li':
+ node = LineItemNode(attrs)
+ else:
+ node = TagNode(tag, attrs)
+ self.current_node.add_child(node)
+ self.current_node = node
+ else:
+ self.current_node = self.current_node.parent
+
+ def _doc_has_handler(self, tag, is_start):
+ if is_start:
+ handler_name = 'start_%s' % tag
+ else:
+ handler_name = 'end_%s' % tag
+
+ return hasattr(self.doc.style, handler_name)
+
+ def add_data(self, data):
+ self.current_node.add_child(DataNode(data))
+
+ def write(self):
+ self.head.write(self.doc)
+
+
+class Node(object):
+ def __init__(self, parent=None):
+ self.parent = parent
+
+ def write(self, doc):
+ raise NotImplementedError
+
+
+class StemNode(Node):
+ def __init__(self, parent=None):
+ super(StemNode, self).__init__(parent)
+ self.children = []
+
+ def add_child(self, child):
+ child.parent = self
+ self.children.append(child)
+
+ def write(self, doc):
+ self._write_children(doc)
+
+ def _write_children(self, doc):
+ for child in self.children:
+ child.write(doc)
+
+
+class TagNode(StemNode):
+ """
+ A generic Tag node. It will verify that handlers exist before writing.
+ """
+ def __init__(self, tag, attrs=None, parent=None):
+ super(TagNode, self).__init__(parent)
+ self.attrs = attrs
+ self.tag = tag
+
+ def write(self, doc):
+ self._write_start(doc)
+ self._write_children(doc)
+ self._write_end(doc)
+
+ def _write_start(self, doc):
+ handler_name = 'start_%s' % self.tag
+ if hasattr(doc.style, handler_name):
+ getattr(doc.style, handler_name)(self.attrs)
+
+ def _write_end(self, doc):
+ handler_name = 'end_%s' % self.tag
+ if hasattr(doc.style, handler_name):
+ getattr(doc.style, handler_name)()
+
+
+class LineItemNode(TagNode):
+ def __init__(self, attrs=None, parent=None):
+ super(LineItemNode, self).__init__('li', attrs, parent)
+
+ def write(self, doc):
+ self._lstrip(self)
+ super(LineItemNode, self).write(doc)
+
+ def _lstrip(self, node):
+ """
+ Traverses the tree, stripping out whitespace until text data is found
+ :param node: The node to strip
+ :return: True if non-whitespace data was found, False otherwise
+ """
+ for child in node.children:
+ if isinstance(child, DataNode):
+ child.lstrip()
+ if child.data:
+ return True
+ else:
+ found = self._lstrip(child)
+ if found:
+ return True
+
+ return False
+
+
+class DataNode(Node):
+ """
+ A Node that contains only string data.
+ """
+ def __init__(self, data, parent=None):
+ super(DataNode, self).__init__(parent)
+ if not isinstance(data, six.string_types):
+ raise ValueError("Expecting string type, %s given." % type(data))
+ self.data = data
+
+ def lstrip(self):
+ self.data = self.data.lstrip()
+
+ def write(self, doc):
+ if not self.data:
+ return
+
+ if self.data.isspace():
+ str_data = ' '
+ else:
+ end_space = self.data[-1].isspace()
+ words = self.data.split()
+ words = doc.translate_words(words)
+ str_data = ' '.join(words)
+ if end_space:
+ str_data += ' '
+
+ doc.handle_data(str_data)
diff --git a/contrib/python/botocore/botocore/docs/bcdoc/restdoc.py b/contrib/python/botocore/botocore/docs/bcdoc/restdoc.py
index 5b671eb9f7..9f91e1c350 100644
--- a/contrib/python/botocore/botocore/docs/bcdoc/restdoc.py
+++ b/contrib/python/botocore/botocore/docs/bcdoc/restdoc.py
@@ -1,218 +1,218 @@
-# Copyright 2012-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-import logging
-
-from botocore.compat import OrderedDict
-from botocore.docs.bcdoc.docstringparser import DocStringParser
-from botocore.docs.bcdoc.style import ReSTStyle
-
-LOG = logging.getLogger('bcdocs')
-
-
-class ReSTDocument(object):
-
- def __init__(self, target='man'):
- self.style = ReSTStyle(self)
- self.target = target
- self.parser = DocStringParser(self)
- self.keep_data = True
- self.do_translation = False
- self.translation_map = {}
- self.hrefs = {}
- self._writes = []
- self._last_doc_string = None
-
- def _write(self, s):
- if self.keep_data and s is not None:
- self._writes.append(s)
-
- def write(self, content):
- """
- Write content into the document.
- """
- self._write(content)
-
- def writeln(self, content):
- """
- Write content on a newline.
- """
- self._write('%s%s\n' % (self.style.spaces(), content))
-
- def peek_write(self):
- """
- Returns the last content written to the document without
- removing it from the stack.
- """
- return self._writes[-1]
-
- def pop_write(self):
- """
- Removes and returns the last content written to the stack.
- """
- return self._writes.pop()
-
- def push_write(self, s):
- """
- Places new content on the stack.
- """
- self._writes.append(s)
-
- def getvalue(self):
- """
- Returns the current content of the document as a string.
- """
- if self.hrefs:
- self.style.new_paragraph()
- for refname, link in self.hrefs.items():
- self.style.link_target_definition(refname, link)
- return ''.join(self._writes).encode('utf-8')
-
- def translate_words(self, words):
- return [self.translation_map.get(w, w) for w in words]
-
- def handle_data(self, data):
- if data and self.keep_data:
- self._write(data)
-
- def include_doc_string(self, doc_string):
- if doc_string:
- try:
- start = len(self._writes)
- self.parser.feed(doc_string)
- self.parser.close()
- end = len(self._writes)
- self._last_doc_string = (start, end)
- except Exception:
- LOG.debug('Error parsing doc string', exc_info=True)
- LOG.debug(doc_string)
-
- def remove_last_doc_string(self):
- # Removes all writes inserted by last doc string
- if self._last_doc_string is not None:
- start, end = self._last_doc_string
- del self._writes[start:end]
-
-
-class DocumentStructure(ReSTDocument):
- def __init__(self, name, section_names=None, target='man', context=None):
- """Provides a Hierarichial structure to a ReSTDocument
-
- You can write to it similiar to as you can to a ReSTDocument but
- has an innate structure for more orginaztion and abstraction.
-
- :param name: The name of the document
- :param section_names: A list of sections to be included
- in the document.
- :param target: The target documentation of the Document structure
- :param context: A dictionary of data to store with the strucuture. These
- are only stored per section not the entire structure.
- """
- super(DocumentStructure, self).__init__(target=target)
- self._name = name
- self._structure = OrderedDict()
- self._path = [self._name]
- self._context = {}
- if context is not None:
- self._context = context
- if section_names is not None:
- self._generate_structure(section_names)
-
- @property
- def name(self):
- """The name of the document structure"""
- return self._name
-
- @property
- def path(self):
- """
- A list of where to find a particular document structure in the
- overlying document structure.
- """
- return self._path
-
- @path.setter
- def path(self, value):
- self._path = value
-
- @property
- def available_sections(self):
- return list(self._structure)
-
- @property
- def context(self):
- return self._context
-
- def _generate_structure(self, section_names):
- for section_name in section_names:
- self.add_new_section(section_name)
-
- def add_new_section(self, name, context=None):
- """Adds a new section to the current document structure
-
- This document structure will be considered a section to the
- current document structure but will in itself be an entirely
- new document structure that can be written to and have sections
- as well
-
- :param name: The name of the section.
- :param context: A dictionary of data to store with the strucuture. These
- are only stored per section not the entire structure.
- :rtype: DocumentStructure
- :returns: A new document structure to add to but lives as a section
- to the document structure it was instantiated from.
- """
- # Add a new section
- section = self.__class__(name=name, target=self.target,
- context=context)
- section.path = self.path + [name]
- # Indent the section apporpriately as well
- section.style.indentation = self.style.indentation
- section.translation_map = self.translation_map
- section.hrefs = self.hrefs
- self._structure[name] = section
- return section
-
- def get_section(self, name):
- """Retrieve a section"""
- return self._structure[name]
-
- def delete_section(self, name):
- """Delete a section"""
- del self._structure[name]
-
- def flush_structure(self):
- """Flushes a doc structure to a ReSTructed string
-
- The document is flushed out in a DFS style where sections and their
- subsections' values are added to the string as they are visited.
- """
- # We are at the root flush the links at the beginning of the
- # document
- if len(self.path) == 1:
- if self.hrefs:
- self.style.new_paragraph()
- for refname, link in self.hrefs.items():
- self.style.link_target_definition(refname, link)
- value = self.getvalue()
- for name, section in self._structure.items():
- value += section.flush_structure()
- return value
-
- def getvalue(self):
- return ''.join(self._writes).encode('utf-8')
-
- def remove_all_sections(self):
- self._structure = OrderedDict()
-
- def clear_text(self):
- self._writes = []
+# Copyright 2012-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+import logging
+
+from botocore.compat import OrderedDict
+from botocore.docs.bcdoc.docstringparser import DocStringParser
+from botocore.docs.bcdoc.style import ReSTStyle
+
+LOG = logging.getLogger('bcdocs')
+
+
+class ReSTDocument(object):
+
+ def __init__(self, target='man'):
+ self.style = ReSTStyle(self)
+ self.target = target
+ self.parser = DocStringParser(self)
+ self.keep_data = True
+ self.do_translation = False
+ self.translation_map = {}
+ self.hrefs = {}
+ self._writes = []
+ self._last_doc_string = None
+
+ def _write(self, s):
+ if self.keep_data and s is not None:
+ self._writes.append(s)
+
+ def write(self, content):
+ """
+ Write content into the document.
+ """
+ self._write(content)
+
+ def writeln(self, content):
+ """
+ Write content on a newline.
+ """
+ self._write('%s%s\n' % (self.style.spaces(), content))
+
+ def peek_write(self):
+ """
+ Returns the last content written to the document without
+ removing it from the stack.
+ """
+ return self._writes[-1]
+
+ def pop_write(self):
+ """
+ Removes and returns the last content written to the stack.
+ """
+ return self._writes.pop()
+
+ def push_write(self, s):
+ """
+ Places new content on the stack.
+ """
+ self._writes.append(s)
+
+ def getvalue(self):
+ """
+ Returns the current content of the document as a string.
+ """
+ if self.hrefs:
+ self.style.new_paragraph()
+ for refname, link in self.hrefs.items():
+ self.style.link_target_definition(refname, link)
+ return ''.join(self._writes).encode('utf-8')
+
+ def translate_words(self, words):
+ return [self.translation_map.get(w, w) for w in words]
+
+ def handle_data(self, data):
+ if data and self.keep_data:
+ self._write(data)
+
+ def include_doc_string(self, doc_string):
+ if doc_string:
+ try:
+ start = len(self._writes)
+ self.parser.feed(doc_string)
+ self.parser.close()
+ end = len(self._writes)
+ self._last_doc_string = (start, end)
+ except Exception:
+ LOG.debug('Error parsing doc string', exc_info=True)
+ LOG.debug(doc_string)
+
+ def remove_last_doc_string(self):
+ # Removes all writes inserted by last doc string
+ if self._last_doc_string is not None:
+ start, end = self._last_doc_string
+ del self._writes[start:end]
+
+
+class DocumentStructure(ReSTDocument):
+ def __init__(self, name, section_names=None, target='man', context=None):
+ """Provides a Hierarichial structure to a ReSTDocument
+
+ You can write to it similiar to as you can to a ReSTDocument but
+ has an innate structure for more orginaztion and abstraction.
+
+ :param name: The name of the document
+ :param section_names: A list of sections to be included
+ in the document.
+ :param target: The target documentation of the Document structure
+ :param context: A dictionary of data to store with the strucuture. These
+ are only stored per section not the entire structure.
+ """
+ super(DocumentStructure, self).__init__(target=target)
+ self._name = name
+ self._structure = OrderedDict()
+ self._path = [self._name]
+ self._context = {}
+ if context is not None:
+ self._context = context
+ if section_names is not None:
+ self._generate_structure(section_names)
+
+ @property
+ def name(self):
+ """The name of the document structure"""
+ return self._name
+
+ @property
+ def path(self):
+ """
+ A list of where to find a particular document structure in the
+ overlying document structure.
+ """
+ return self._path
+
+ @path.setter
+ def path(self, value):
+ self._path = value
+
+ @property
+ def available_sections(self):
+ return list(self._structure)
+
+ @property
+ def context(self):
+ return self._context
+
+ def _generate_structure(self, section_names):
+ for section_name in section_names:
+ self.add_new_section(section_name)
+
+ def add_new_section(self, name, context=None):
+ """Adds a new section to the current document structure
+
+ This document structure will be considered a section to the
+ current document structure but will in itself be an entirely
+ new document structure that can be written to and have sections
+ as well
+
+ :param name: The name of the section.
+ :param context: A dictionary of data to store with the strucuture. These
+ are only stored per section not the entire structure.
+ :rtype: DocumentStructure
+ :returns: A new document structure to add to but lives as a section
+ to the document structure it was instantiated from.
+ """
+ # Add a new section
+ section = self.__class__(name=name, target=self.target,
+ context=context)
+ section.path = self.path + [name]
+ # Indent the section apporpriately as well
+ section.style.indentation = self.style.indentation
+ section.translation_map = self.translation_map
+ section.hrefs = self.hrefs
+ self._structure[name] = section
+ return section
+
+ def get_section(self, name):
+ """Retrieve a section"""
+ return self._structure[name]
+
+ def delete_section(self, name):
+ """Delete a section"""
+ del self._structure[name]
+
+ def flush_structure(self):
+ """Flushes a doc structure to a ReSTructed string
+
+ The document is flushed out in a DFS style where sections and their
+ subsections' values are added to the string as they are visited.
+ """
+ # We are at the root flush the links at the beginning of the
+ # document
+ if len(self.path) == 1:
+ if self.hrefs:
+ self.style.new_paragraph()
+ for refname, link in self.hrefs.items():
+ self.style.link_target_definition(refname, link)
+ value = self.getvalue()
+ for name, section in self._structure.items():
+ value += section.flush_structure()
+ return value
+
+ def getvalue(self):
+ return ''.join(self._writes).encode('utf-8')
+
+ def remove_all_sections(self):
+ self._structure = OrderedDict()
+
+ def clear_text(self):
+ self._writes = []
diff --git a/contrib/python/botocore/botocore/docs/bcdoc/style.py b/contrib/python/botocore/botocore/docs/bcdoc/style.py
index 4004b39dff..4470d65d3c 100644
--- a/contrib/python/botocore/botocore/docs/bcdoc/style.py
+++ b/contrib/python/botocore/botocore/docs/bcdoc/style.py
@@ -1,93 +1,93 @@
-# Copyright 2012-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-
-import logging
-
-logger = logging.getLogger('bcdocs')
-
-
-class BaseStyle(object):
-
- def __init__(self, doc, indent_width=2):
- self.doc = doc
- self.indent_width = indent_width
- self._indent = 0
- self.keep_data = True
-
- @property
- def indentation(self):
- return self._indent
-
- @indentation.setter
- def indentation(self, value):
- self._indent = value
-
- def new_paragraph(self):
- return '\n%s' % self.spaces()
-
- def indent(self):
- self._indent += 1
-
- def dedent(self):
- if self._indent > 0:
- self._indent -= 1
-
- def spaces(self):
- return ' ' * (self._indent * self.indent_width)
-
- def bold(self, s):
- return s
-
- def ref(self, link, title=None):
- return link
-
- def h2(self, s):
- return s
-
- def h3(self, s):
- return s
-
- def underline(self, s):
- return s
-
- def italics(self, s):
- return s
-
-
-class ReSTStyle(BaseStyle):
-
- def __init__(self, doc, indent_width=2):
- BaseStyle.__init__(self, doc, indent_width)
- self.do_p = True
- self.a_href = None
- self.list_depth = 0
-
- def new_paragraph(self):
- self.doc.write('\n\n%s' % self.spaces())
-
- def new_line(self):
- self.doc.write('\n%s' % self.spaces())
-
- def _start_inline(self, markup):
- self.doc.write(markup)
-
- def _end_inline(self, markup):
- # Sometimes the HTML markup has whitespace between the end
- # of the text inside the inline markup and the closing element
- # (e.g. <b>foobar </b>). This trailing space will cause
- # problems in the ReST inline markup so we remove it here
- # by popping the last item written off the stack, striping
- # the whitespace and then pushing it back on the stack.
+# Copyright 2012-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+
+import logging
+
+logger = logging.getLogger('bcdocs')
+
+
+class BaseStyle(object):
+
+ def __init__(self, doc, indent_width=2):
+ self.doc = doc
+ self.indent_width = indent_width
+ self._indent = 0
+ self.keep_data = True
+
+ @property
+ def indentation(self):
+ return self._indent
+
+ @indentation.setter
+ def indentation(self, value):
+ self._indent = value
+
+ def new_paragraph(self):
+ return '\n%s' % self.spaces()
+
+ def indent(self):
+ self._indent += 1
+
+ def dedent(self):
+ if self._indent > 0:
+ self._indent -= 1
+
+ def spaces(self):
+ return ' ' * (self._indent * self.indent_width)
+
+ def bold(self, s):
+ return s
+
+ def ref(self, link, title=None):
+ return link
+
+ def h2(self, s):
+ return s
+
+ def h3(self, s):
+ return s
+
+ def underline(self, s):
+ return s
+
+ def italics(self, s):
+ return s
+
+
+class ReSTStyle(BaseStyle):
+
+ def __init__(self, doc, indent_width=2):
+ BaseStyle.__init__(self, doc, indent_width)
+ self.do_p = True
+ self.a_href = None
+ self.list_depth = 0
+
+ def new_paragraph(self):
+ self.doc.write('\n\n%s' % self.spaces())
+
+ def new_line(self):
+ self.doc.write('\n%s' % self.spaces())
+
+ def _start_inline(self, markup):
+ self.doc.write(markup)
+
+ def _end_inline(self, markup):
+ # Sometimes the HTML markup has whitespace between the end
+ # of the text inside the inline markup and the closing element
+ # (e.g. <b>foobar </b>). This trailing space will cause
+ # problems in the ReST inline markup so we remove it here
+ # by popping the last item written off the stack, striping
+ # the whitespace and then pushing it back on the stack.
last_write = self.doc.pop_write().rstrip(' ')
# Sometimes, for whatever reason, a tag like <b/> is present. This
@@ -98,318 +98,318 @@ class ReSTStyle(BaseStyle):
return
self.doc.push_write(last_write)
- self.doc.write(markup + ' ')
-
- def start_bold(self, attrs=None):
- self._start_inline('**')
-
- def end_bold(self):
- self._end_inline('**')
-
- def start_b(self, attrs=None):
- self.doc.do_translation = True
- self.start_bold(attrs)
-
- def end_b(self):
- self.doc.do_translation = False
- self.end_bold()
-
- def bold(self, s):
- if s:
- self.start_bold()
- self.doc.write(s)
- self.end_bold()
-
- def ref(self, title, link=None):
- if link is None:
- link = title
- self.doc.write(':doc:`%s <%s>`' % (title, link))
-
- def _heading(self, s, border_char):
- border = border_char * len(s)
- self.new_paragraph()
- self.doc.write('%s\n%s\n%s' % (border, s, border))
- self.new_paragraph()
-
- def h1(self, s):
- self._heading(s, '*')
-
- def h2(self, s):
- self._heading(s, '=')
-
- def h3(self, s):
- self._heading(s, '-')
-
- def start_italics(self, attrs=None):
- self._start_inline('*')
-
- def end_italics(self):
- self._end_inline('*')
-
- def italics(self, s):
- if s:
- self.start_italics()
- self.doc.write(s)
- self.end_italics()
-
- def start_p(self, attrs=None):
- if self.do_p:
- self.doc.write('\n\n%s' % self.spaces())
-
- def end_p(self):
- if self.do_p:
- self.doc.write('\n\n%s' % self.spaces())
-
- def start_code(self, attrs=None):
- self.doc.do_translation = True
- self._start_inline('``')
-
- def end_code(self):
- self.doc.do_translation = False
- self._end_inline('``')
-
- def code(self, s):
- if s:
- self.start_code()
- self.doc.write(s)
- self.end_code()
-
- def start_note(self, attrs=None):
- self.new_paragraph()
- self.doc.write('.. note::')
- self.indent()
- self.new_paragraph()
-
- def end_note(self):
- self.dedent()
- self.new_paragraph()
-
- def start_important(self, attrs=None):
- self.new_paragraph()
- self.doc.write('.. warning::')
- self.indent()
- self.new_paragraph()
-
- def end_important(self):
- self.dedent()
- self.new_paragraph()
-
- def start_danger(self, attrs=None):
- self.new_paragraph()
- self.doc.write('.. danger::')
- self.indent()
- self.new_paragraph()
-
- def end_danger(self):
- self.dedent()
- self.new_paragraph()
-
- def start_a(self, attrs=None):
- if attrs:
- for attr_key, attr_value in attrs:
- if attr_key == 'href':
- self.a_href = attr_value
- self.doc.write('`')
- else:
- # There are some model documentation that
- # looks like this: <a>DescribeInstances</a>.
- # In this case we just write out an empty
- # string.
- self.doc.write(' ')
- self.doc.do_translation = True
-
- def link_target_definition(self, refname, link):
- self.doc.writeln('.. _%s: %s' % (refname, link))
-
- def sphinx_reference_label(self, label, text=None):
- if text is None:
- text = label
- if self.doc.target == 'html':
- self.doc.write(':ref:`%s <%s>`' % (text, label))
- else:
- self.doc.write(text)
-
- def end_a(self):
- self.doc.do_translation = False
- if self.a_href:
- last_write = self.doc.pop_write()
- last_write = last_write.rstrip(' ')
- if last_write and last_write != '`':
- if ':' in last_write:
- last_write = last_write.replace(':', r'\:')
- self.doc.push_write(last_write)
- self.doc.push_write(' <%s>`__' % self.a_href)
- elif last_write == '`':
- # Look at start_a(). It will do a self.doc.write('`')
- # which is the start of the link title. If that is the
- # case then there was no link text. We should just
- # use an inline link. The syntax of this is
- # `<http://url>`_
- self.doc.push_write('`<%s>`__' % self.a_href)
- else:
- self.doc.push_write(self.a_href)
- self.doc.hrefs[self.a_href] = self.a_href
- self.doc.write('`__')
- self.a_href = None
- self.doc.write(' ')
-
- def start_i(self, attrs=None):
- self.doc.do_translation = True
- self.start_italics()
-
- def end_i(self):
- self.doc.do_translation = False
- self.end_italics()
-
- def start_li(self, attrs=None):
- self.new_line()
- self.do_p = False
- self.doc.write('* ')
-
- def end_li(self):
- self.do_p = True
- self.new_line()
-
- def li(self, s):
- if s:
- self.start_li()
- self.doc.writeln(s)
- self.end_li()
-
- def start_ul(self, attrs=None):
- if self.list_depth != 0:
- self.indent()
- self.list_depth += 1
- self.new_paragraph()
-
- def end_ul(self):
- self.list_depth -= 1
- if self.list_depth != 0:
- self.dedent()
- self.new_paragraph()
-
- def start_ol(self, attrs=None):
- # TODO: Need to control the bullets used for LI items
- if self.list_depth != 0:
- self.indent()
- self.list_depth += 1
- self.new_paragraph()
-
- def end_ol(self):
- self.list_depth -= 1
- if self.list_depth != 0:
- self.dedent()
- self.new_paragraph()
-
- def start_examples(self, attrs=None):
- self.doc.keep_data = False
-
- def end_examples(self):
- self.doc.keep_data = True
-
- def start_fullname(self, attrs=None):
- self.doc.keep_data = False
-
- def end_fullname(self):
- self.doc.keep_data = True
-
- def start_codeblock(self, attrs=None):
- self.doc.write('::')
- self.indent()
- self.new_paragraph()
-
- def end_codeblock(self):
- self.dedent()
- self.new_paragraph()
-
- def codeblock(self, code):
- """
- Literal code blocks are introduced by ending a paragraph with
- the special marker ::. The literal block must be indented
- (and, like all paragraphs, separated from the surrounding
- ones by blank lines).
- """
- self.start_codeblock()
- self.doc.writeln(code)
- self.end_codeblock()
-
- def toctree(self):
- if self.doc.target == 'html':
- self.doc.write('\n.. toctree::\n')
- self.doc.write(' :maxdepth: 1\n')
- self.doc.write(' :titlesonly:\n\n')
- else:
- self.start_ul()
-
- def tocitem(self, item, file_name=None):
- if self.doc.target == 'man':
- self.li(item)
- else:
- if file_name:
- self.doc.writeln(' %s' % file_name)
- else:
- self.doc.writeln(' %s' % item)
-
- def hidden_toctree(self):
- if self.doc.target == 'html':
- self.doc.write('\n.. toctree::\n')
- self.doc.write(' :maxdepth: 1\n')
- self.doc.write(' :hidden:\n\n')
-
- def hidden_tocitem(self, item):
- if self.doc.target == 'html':
- self.tocitem(item)
-
- def table_of_contents(self, title=None, depth=None):
- self.doc.write('.. contents:: ')
- if title is not None:
- self.doc.writeln(title)
- if depth is not None:
- self.doc.writeln(' :depth: %s' % depth)
-
- def start_sphinx_py_class(self, class_name):
- self.new_paragraph()
- self.doc.write('.. py:class:: %s' % class_name)
- self.indent()
- self.new_paragraph()
-
- def end_sphinx_py_class(self):
- self.dedent()
- self.new_paragraph()
-
- def start_sphinx_py_method(self, method_name, parameters=None):
- self.new_paragraph()
- content = '.. py:method:: %s' % method_name
- if parameters is not None:
- content += '(%s)' % parameters
- self.doc.write(content)
- self.indent()
- self.new_paragraph()
-
- def end_sphinx_py_method(self):
- self.dedent()
- self.new_paragraph()
-
- def start_sphinx_py_attr(self, attr_name):
- self.new_paragraph()
- self.doc.write('.. py:attribute:: %s' % attr_name)
- self.indent()
- self.new_paragraph()
-
- def end_sphinx_py_attr(self):
- self.dedent()
- self.new_paragraph()
-
- def write_py_doc_string(self, docstring):
- docstring_lines = docstring.splitlines()
- for docstring_line in docstring_lines:
- self.doc.writeln(docstring_line)
-
- def external_link(self, title, link):
- if self.doc.target == 'html':
- self.doc.write('`%s <%s>`_' % (title, link))
- else:
- self.doc.write(title)
+ self.doc.write(markup + ' ')
+
+ def start_bold(self, attrs=None):
+ self._start_inline('**')
+
+ def end_bold(self):
+ self._end_inline('**')
+
+ def start_b(self, attrs=None):
+ self.doc.do_translation = True
+ self.start_bold(attrs)
+
+ def end_b(self):
+ self.doc.do_translation = False
+ self.end_bold()
+
+ def bold(self, s):
+ if s:
+ self.start_bold()
+ self.doc.write(s)
+ self.end_bold()
+
+ def ref(self, title, link=None):
+ if link is None:
+ link = title
+ self.doc.write(':doc:`%s <%s>`' % (title, link))
+
+ def _heading(self, s, border_char):
+ border = border_char * len(s)
+ self.new_paragraph()
+ self.doc.write('%s\n%s\n%s' % (border, s, border))
+ self.new_paragraph()
+
+ def h1(self, s):
+ self._heading(s, '*')
+
+ def h2(self, s):
+ self._heading(s, '=')
+
+ def h3(self, s):
+ self._heading(s, '-')
+
+ def start_italics(self, attrs=None):
+ self._start_inline('*')
+
+ def end_italics(self):
+ self._end_inline('*')
+
+ def italics(self, s):
+ if s:
+ self.start_italics()
+ self.doc.write(s)
+ self.end_italics()
+
+ def start_p(self, attrs=None):
+ if self.do_p:
+ self.doc.write('\n\n%s' % self.spaces())
+
+ def end_p(self):
+ if self.do_p:
+ self.doc.write('\n\n%s' % self.spaces())
+
+ def start_code(self, attrs=None):
+ self.doc.do_translation = True
+ self._start_inline('``')
+
+ def end_code(self):
+ self.doc.do_translation = False
+ self._end_inline('``')
+
+ def code(self, s):
+ if s:
+ self.start_code()
+ self.doc.write(s)
+ self.end_code()
+
+ def start_note(self, attrs=None):
+ self.new_paragraph()
+ self.doc.write('.. note::')
+ self.indent()
+ self.new_paragraph()
+
+ def end_note(self):
+ self.dedent()
+ self.new_paragraph()
+
+ def start_important(self, attrs=None):
+ self.new_paragraph()
+ self.doc.write('.. warning::')
+ self.indent()
+ self.new_paragraph()
+
+ def end_important(self):
+ self.dedent()
+ self.new_paragraph()
+
+ def start_danger(self, attrs=None):
+ self.new_paragraph()
+ self.doc.write('.. danger::')
+ self.indent()
+ self.new_paragraph()
+
+ def end_danger(self):
+ self.dedent()
+ self.new_paragraph()
+
+ def start_a(self, attrs=None):
+ if attrs:
+ for attr_key, attr_value in attrs:
+ if attr_key == 'href':
+ self.a_href = attr_value
+ self.doc.write('`')
+ else:
+ # There are some model documentation that
+ # looks like this: <a>DescribeInstances</a>.
+ # In this case we just write out an empty
+ # string.
+ self.doc.write(' ')
+ self.doc.do_translation = True
+
+ def link_target_definition(self, refname, link):
+ self.doc.writeln('.. _%s: %s' % (refname, link))
+
+ def sphinx_reference_label(self, label, text=None):
+ if text is None:
+ text = label
+ if self.doc.target == 'html':
+ self.doc.write(':ref:`%s <%s>`' % (text, label))
+ else:
+ self.doc.write(text)
+
+ def end_a(self):
+ self.doc.do_translation = False
+ if self.a_href:
+ last_write = self.doc.pop_write()
+ last_write = last_write.rstrip(' ')
+ if last_write and last_write != '`':
+ if ':' in last_write:
+ last_write = last_write.replace(':', r'\:')
+ self.doc.push_write(last_write)
+ self.doc.push_write(' <%s>`__' % self.a_href)
+ elif last_write == '`':
+ # Look at start_a(). It will do a self.doc.write('`')
+ # which is the start of the link title. If that is the
+ # case then there was no link text. We should just
+ # use an inline link. The syntax of this is
+ # `<http://url>`_
+ self.doc.push_write('`<%s>`__' % self.a_href)
+ else:
+ self.doc.push_write(self.a_href)
+ self.doc.hrefs[self.a_href] = self.a_href
+ self.doc.write('`__')
+ self.a_href = None
+ self.doc.write(' ')
+
+ def start_i(self, attrs=None):
+ self.doc.do_translation = True
+ self.start_italics()
+
+ def end_i(self):
+ self.doc.do_translation = False
+ self.end_italics()
+
+ def start_li(self, attrs=None):
+ self.new_line()
+ self.do_p = False
+ self.doc.write('* ')
+
+ def end_li(self):
+ self.do_p = True
+ self.new_line()
+
+ def li(self, s):
+ if s:
+ self.start_li()
+ self.doc.writeln(s)
+ self.end_li()
+
+ def start_ul(self, attrs=None):
+ if self.list_depth != 0:
+ self.indent()
+ self.list_depth += 1
+ self.new_paragraph()
+
+ def end_ul(self):
+ self.list_depth -= 1
+ if self.list_depth != 0:
+ self.dedent()
+ self.new_paragraph()
+
+ def start_ol(self, attrs=None):
+ # TODO: Need to control the bullets used for LI items
+ if self.list_depth != 0:
+ self.indent()
+ self.list_depth += 1
+ self.new_paragraph()
+
+ def end_ol(self):
+ self.list_depth -= 1
+ if self.list_depth != 0:
+ self.dedent()
+ self.new_paragraph()
+
+ def start_examples(self, attrs=None):
+ self.doc.keep_data = False
+
+ def end_examples(self):
+ self.doc.keep_data = True
+
+ def start_fullname(self, attrs=None):
+ self.doc.keep_data = False
+
+ def end_fullname(self):
+ self.doc.keep_data = True
+
+ def start_codeblock(self, attrs=None):
+ self.doc.write('::')
+ self.indent()
+ self.new_paragraph()
+
+ def end_codeblock(self):
+ self.dedent()
+ self.new_paragraph()
+
+ def codeblock(self, code):
+ """
+ Literal code blocks are introduced by ending a paragraph with
+ the special marker ::. The literal block must be indented
+ (and, like all paragraphs, separated from the surrounding
+ ones by blank lines).
+ """
+ self.start_codeblock()
+ self.doc.writeln(code)
+ self.end_codeblock()
+
+ def toctree(self):
+ if self.doc.target == 'html':
+ self.doc.write('\n.. toctree::\n')
+ self.doc.write(' :maxdepth: 1\n')
+ self.doc.write(' :titlesonly:\n\n')
+ else:
+ self.start_ul()
+
+ def tocitem(self, item, file_name=None):
+ if self.doc.target == 'man':
+ self.li(item)
+ else:
+ if file_name:
+ self.doc.writeln(' %s' % file_name)
+ else:
+ self.doc.writeln(' %s' % item)
+
+ def hidden_toctree(self):
+ if self.doc.target == 'html':
+ self.doc.write('\n.. toctree::\n')
+ self.doc.write(' :maxdepth: 1\n')
+ self.doc.write(' :hidden:\n\n')
+
+ def hidden_tocitem(self, item):
+ if self.doc.target == 'html':
+ self.tocitem(item)
+
+ def table_of_contents(self, title=None, depth=None):
+ self.doc.write('.. contents:: ')
+ if title is not None:
+ self.doc.writeln(title)
+ if depth is not None:
+ self.doc.writeln(' :depth: %s' % depth)
+
+ def start_sphinx_py_class(self, class_name):
+ self.new_paragraph()
+ self.doc.write('.. py:class:: %s' % class_name)
+ self.indent()
+ self.new_paragraph()
+
+ def end_sphinx_py_class(self):
+ self.dedent()
+ self.new_paragraph()
+
+ def start_sphinx_py_method(self, method_name, parameters=None):
+ self.new_paragraph()
+ content = '.. py:method:: %s' % method_name
+ if parameters is not None:
+ content += '(%s)' % parameters
+ self.doc.write(content)
+ self.indent()
+ self.new_paragraph()
+
+ def end_sphinx_py_method(self):
+ self.dedent()
+ self.new_paragraph()
+
+ def start_sphinx_py_attr(self, attr_name):
+ self.new_paragraph()
+ self.doc.write('.. py:attribute:: %s' % attr_name)
+ self.indent()
+ self.new_paragraph()
+
+ def end_sphinx_py_attr(self):
+ self.dedent()
+ self.new_paragraph()
+
+ def write_py_doc_string(self, docstring):
+ docstring_lines = docstring.splitlines()
+ for docstring_line in docstring_lines:
+ self.doc.writeln(docstring_line)
+
+ def external_link(self, title, link):
+ if self.doc.target == 'html':
+ self.doc.write('`%s <%s>`_' % (title, link))
+ else:
+ self.doc.write(title)
def internal_link(self, title, page):
if self.doc.target == 'html':
diff --git a/contrib/python/botocore/botocore/docs/client.py b/contrib/python/botocore/botocore/docs/client.py
index 54bbea5b65..f136bcd2c1 100644
--- a/contrib/python/botocore/botocore/docs/client.py
+++ b/contrib/python/botocore/botocore/docs/client.py
@@ -1,104 +1,104 @@
-# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-import inspect
-
-from botocore.docs.utils import get_official_service_name
-from botocore.docs.method import document_custom_method
-from botocore.docs.method import document_model_driven_method
-from botocore.docs.method import get_instance_public_methods
-from botocore.docs.sharedexample import document_shared_examples
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+import inspect
+
+from botocore.docs.utils import get_official_service_name
+from botocore.docs.method import document_custom_method
+from botocore.docs.method import document_model_driven_method
+from botocore.docs.method import get_instance_public_methods
+from botocore.docs.sharedexample import document_shared_examples
from botocore.docs.example import ResponseExampleDocumenter
from botocore.docs.params import ResponseParamsDocumenter
from botocore.docs.utils import DocumentedShape
from botocore.compat import OrderedDict
-
-
-class ClientDocumenter(object):
- def __init__(self, client, shared_examples=None):
- self._client = client
- self._shared_examples = shared_examples
- if self._shared_examples is None:
- self._shared_examples = {}
- self._service_name = self._client.meta.service_model.service_name
-
- def document_client(self, section):
- """Documents a client and its methods
-
- :param section: The section to write to.
- """
- self._add_title(section)
- self._add_class_signature(section)
- client_methods = get_instance_public_methods(self._client)
- self._add_client_intro(section, client_methods)
- self._add_client_methods(section, client_methods)
-
- def _add_title(self, section):
- section.style.h2('Client')
-
- def _add_client_intro(self, section, client_methods):
- section = section.add_new_section('intro')
- # Write out the top level description for the client.
- official_service_name = get_official_service_name(
- self._client.meta.service_model)
- section.write(
- 'A low-level client representing %s' % official_service_name)
+
+
+class ClientDocumenter(object):
+ def __init__(self, client, shared_examples=None):
+ self._client = client
+ self._shared_examples = shared_examples
+ if self._shared_examples is None:
+ self._shared_examples = {}
+ self._service_name = self._client.meta.service_model.service_name
+
+ def document_client(self, section):
+ """Documents a client and its methods
+
+ :param section: The section to write to.
+ """
+ self._add_title(section)
+ self._add_class_signature(section)
+ client_methods = get_instance_public_methods(self._client)
+ self._add_client_intro(section, client_methods)
+ self._add_client_methods(section, client_methods)
+
+ def _add_title(self, section):
+ section.style.h2('Client')
+
+ def _add_client_intro(self, section, client_methods):
+ section = section.add_new_section('intro')
+ # Write out the top level description for the client.
+ official_service_name = get_official_service_name(
+ self._client.meta.service_model)
+ section.write(
+ 'A low-level client representing %s' % official_service_name)
section.style.new_line()
section.include_doc_string(self._client.meta.service_model.documentation)
-
- # Write out the client example instantiation.
- self._add_client_creation_example(section)
-
- # List out all of the possible client methods.
- section.style.new_line()
- section.write('These are the available methods:')
- section.style.new_line()
- class_name = self._client.__class__.__name__
- for method_name in sorted(client_methods):
- section.style.li(':py:meth:`~%s.Client.%s`' % (
- class_name, method_name))
-
- def _add_class_signature(self, section):
- section.style.start_sphinx_py_class(
- class_name='%s.Client' % self._client.__class__.__name__)
-
- def _add_client_creation_example(self, section):
- section.style.start_codeblock()
- section.style.new_line()
- section.write(
- 'client = session.create_client(\'{service}\')'.format(
- service=self._service_name)
- )
- section.style.end_codeblock()
-
- def _add_client_methods(self, section, client_methods):
- section = section.add_new_section('methods')
- for method_name in sorted(client_methods):
- self._add_client_method(
- section, method_name, client_methods[method_name])
-
- def _add_client_method(self, section, method_name, method):
- section = section.add_new_section(method_name)
- if self._is_custom_method(method_name):
- self._add_custom_method(section, method_name, method)
- else:
- self._add_model_driven_method(section, method_name)
-
- def _is_custom_method(self, method_name):
- return method_name not in self._client.meta.method_to_api_mapping
-
- def _add_custom_method(self, section, method_name, method):
- document_custom_method(section, method_name, method)
-
+
+ # Write out the client example instantiation.
+ self._add_client_creation_example(section)
+
+ # List out all of the possible client methods.
+ section.style.new_line()
+ section.write('These are the available methods:')
+ section.style.new_line()
+ class_name = self._client.__class__.__name__
+ for method_name in sorted(client_methods):
+ section.style.li(':py:meth:`~%s.Client.%s`' % (
+ class_name, method_name))
+
+ def _add_class_signature(self, section):
+ section.style.start_sphinx_py_class(
+ class_name='%s.Client' % self._client.__class__.__name__)
+
+ def _add_client_creation_example(self, section):
+ section.style.start_codeblock()
+ section.style.new_line()
+ section.write(
+ 'client = session.create_client(\'{service}\')'.format(
+ service=self._service_name)
+ )
+ section.style.end_codeblock()
+
+ def _add_client_methods(self, section, client_methods):
+ section = section.add_new_section('methods')
+ for method_name in sorted(client_methods):
+ self._add_client_method(
+ section, method_name, client_methods[method_name])
+
+ def _add_client_method(self, section, method_name, method):
+ section = section.add_new_section(method_name)
+ if self._is_custom_method(method_name):
+ self._add_custom_method(section, method_name, method)
+ else:
+ self._add_model_driven_method(section, method_name)
+
+ def _is_custom_method(self, method_name):
+ return method_name not in self._client.meta.method_to_api_mapping
+
+ def _add_custom_method(self, section, method_name, method):
+ document_custom_method(section, method_name, method)
+
def _add_method_exceptions_list(self, section, operation_model):
error_section = section.add_new_section('exceptions')
error_section.style.new_line()
@@ -109,28 +109,28 @@ class ClientDocumenter(object):
class_name = '%s.Client.exceptions.%s' % (client_name, error.name)
error_section.style.li(':py:class:`%s`' % class_name)
- def _add_model_driven_method(self, section, method_name):
- service_model = self._client.meta.service_model
- operation_name = self._client.meta.method_to_api_mapping[method_name]
- operation_model = service_model.operation_model(operation_name)
-
- example_prefix = 'response = client.%s' % method_name
- document_model_driven_method(
- section, method_name, operation_model,
- event_emitter=self._client.meta.events,
- method_description=operation_model.documentation,
- example_prefix=example_prefix,
- )
-
+ def _add_model_driven_method(self, section, method_name):
+ service_model = self._client.meta.service_model
+ operation_name = self._client.meta.method_to_api_mapping[method_name]
+ operation_model = service_model.operation_model(operation_name)
+
+ example_prefix = 'response = client.%s' % method_name
+ document_model_driven_method(
+ section, method_name, operation_model,
+ event_emitter=self._client.meta.events,
+ method_description=operation_model.documentation,
+ example_prefix=example_prefix,
+ )
+
# Add any modeled exceptions
if operation_model.error_shapes:
self._add_method_exceptions_list(section, operation_model)
- # Add the shared examples
- shared_examples = self._shared_examples.get(operation_name)
- if shared_examples:
- document_shared_examples(
- section, operation_model, example_prefix, shared_examples)
+ # Add the shared examples
+ shared_examples = self._shared_examples.get(operation_name)
+ if shared_examples:
+ document_shared_examples(
+ section, operation_model, example_prefix, shared_examples)
class ClientExceptionsDocumenter(object):
diff --git a/contrib/python/botocore/botocore/docs/docstring.py b/contrib/python/botocore/botocore/docs/docstring.py
index fe8d02d4ca..33c68932e2 100644
--- a/contrib/python/botocore/botocore/docs/docstring.py
+++ b/contrib/python/botocore/botocore/docs/docstring.py
@@ -1,96 +1,96 @@
-# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-from botocore.docs.method import document_model_driven_method
-from botocore.docs.waiter import document_wait_method
-from botocore.docs.paginator import document_paginate_method
-from botocore.docs.bcdoc.restdoc import DocumentStructure
-
-
-class LazyLoadedDocstring(str):
- """Used for lazily loading docstrings
-
- You can instantiate this class and assign it to a __doc__ value.
- The docstring will not be generated till accessed via __doc__ or
- help(). Note that all docstring classes **must** subclass from
- this class. It cannot be used directly as a docstring.
- """
- def __init__(self, *args, **kwargs):
- """
- The args and kwargs are the same as the underlying document
- generation function. These just get proxied to the underlying
- function.
- """
- super(LazyLoadedDocstring, self).__init__()
- self._gen_args = args
- self._gen_kwargs = kwargs
- self._docstring = None
-
- def __new__(cls, *args, **kwargs):
- # Needed in order to sub class from str with args and kwargs
- return super(LazyLoadedDocstring, cls).__new__(cls)
-
- def _write_docstring(self, *args, **kwargs):
- raise NotImplementedError(
- '_write_docstring is not implemented. Please subclass from '
- 'this class and provide your own _write_docstring method'
- )
-
- def expandtabs(self, tabsize=8):
- """Expands tabs to spaces
-
- So this is a big hack in order to get lazy loaded docstring work
- for the ``help()``. In the ``help()`` function, ``pydoc`` and
- ``inspect`` are used. At some point the ``inspect.cleandoc``
- method is called. To clean the docs ``expandtabs`` is called
- and that is where we override the method to generate and return the
- docstrings.
- """
- if self._docstring is None:
- self._generate()
- return self._docstring.expandtabs(tabsize)
-
- def __str__(self):
- return self._generate()
-
- # __doc__ of target will use either __repr__ or __str__ of this class.
- __repr__ = __str__
-
- def _generate(self):
- # Generate the docstring if it is not already cached.
- if self._docstring is None:
- self._docstring = self._create_docstring()
- return self._docstring
-
- def _create_docstring(self):
- docstring_structure = DocumentStructure('docstring', target='html')
- # Call the document method function with the args and kwargs
- # passed to the class.
- self._write_docstring(
- docstring_structure, *self._gen_args,
- **self._gen_kwargs)
- return docstring_structure.flush_structure().decode('utf-8')
-
-
-class ClientMethodDocstring(LazyLoadedDocstring):
- def _write_docstring(self, *args, **kwargs):
- document_model_driven_method(*args, **kwargs)
-
-
-class WaiterDocstring(LazyLoadedDocstring):
- def _write_docstring(self, *args, **kwargs):
- document_wait_method(*args, **kwargs)
-
-
-class PaginatorDocstring(LazyLoadedDocstring):
- def _write_docstring(self, *args, **kwargs):
- document_paginate_method(*args, **kwargs)
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+from botocore.docs.method import document_model_driven_method
+from botocore.docs.waiter import document_wait_method
+from botocore.docs.paginator import document_paginate_method
+from botocore.docs.bcdoc.restdoc import DocumentStructure
+
+
+class LazyLoadedDocstring(str):
+ """Used for lazily loading docstrings
+
+ You can instantiate this class and assign it to a __doc__ value.
+ The docstring will not be generated till accessed via __doc__ or
+ help(). Note that all docstring classes **must** subclass from
+ this class. It cannot be used directly as a docstring.
+ """
+ def __init__(self, *args, **kwargs):
+ """
+ The args and kwargs are the same as the underlying document
+ generation function. These just get proxied to the underlying
+ function.
+ """
+ super(LazyLoadedDocstring, self).__init__()
+ self._gen_args = args
+ self._gen_kwargs = kwargs
+ self._docstring = None
+
+ def __new__(cls, *args, **kwargs):
+ # Needed in order to sub class from str with args and kwargs
+ return super(LazyLoadedDocstring, cls).__new__(cls)
+
+ def _write_docstring(self, *args, **kwargs):
+ raise NotImplementedError(
+ '_write_docstring is not implemented. Please subclass from '
+ 'this class and provide your own _write_docstring method'
+ )
+
+ def expandtabs(self, tabsize=8):
+ """Expands tabs to spaces
+
+ So this is a big hack in order to get lazy loaded docstring work
+ for the ``help()``. In the ``help()`` function, ``pydoc`` and
+ ``inspect`` are used. At some point the ``inspect.cleandoc``
+ method is called. To clean the docs ``expandtabs`` is called
+ and that is where we override the method to generate and return the
+ docstrings.
+ """
+ if self._docstring is None:
+ self._generate()
+ return self._docstring.expandtabs(tabsize)
+
+ def __str__(self):
+ return self._generate()
+
+ # __doc__ of target will use either __repr__ or __str__ of this class.
+ __repr__ = __str__
+
+ def _generate(self):
+ # Generate the docstring if it is not already cached.
+ if self._docstring is None:
+ self._docstring = self._create_docstring()
+ return self._docstring
+
+ def _create_docstring(self):
+ docstring_structure = DocumentStructure('docstring', target='html')
+ # Call the document method function with the args and kwargs
+ # passed to the class.
+ self._write_docstring(
+ docstring_structure, *self._gen_args,
+ **self._gen_kwargs)
+ return docstring_structure.flush_structure().decode('utf-8')
+
+
+class ClientMethodDocstring(LazyLoadedDocstring):
+ def _write_docstring(self, *args, **kwargs):
+ document_model_driven_method(*args, **kwargs)
+
+
+class WaiterDocstring(LazyLoadedDocstring):
+ def _write_docstring(self, *args, **kwargs):
+ document_wait_method(*args, **kwargs)
+
+
+class PaginatorDocstring(LazyLoadedDocstring):
+ def _write_docstring(self, *args, **kwargs):
+ document_paginate_method(*args, **kwargs)
diff --git a/contrib/python/botocore/botocore/docs/example.py b/contrib/python/botocore/botocore/docs/example.py
index fcb1fb7e08..c0f6bbb68a 100644
--- a/contrib/python/botocore/botocore/docs/example.py
+++ b/contrib/python/botocore/botocore/docs/example.py
@@ -1,208 +1,208 @@
-# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-from botocore.docs.shape import ShapeDocumenter
-from botocore.docs.utils import py_default
-
-
-class BaseExampleDocumenter(ShapeDocumenter):
- def document_example(self, section, shape, prefix=None, include=None,
- exclude=None):
- """Generates an example based on a shape
-
- :param section: The section to write the documentation to.
-
- :param shape: The shape of the operation.
-
- :param prefix: Anything to be included before the example
-
- :type include: Dictionary where keys are parameter names and
- values are the shapes of the parameter names.
- :param include: The parameter shapes to include in the documentation.
-
- :type exclude: List of the names of the parameters to exclude.
- :param exclude: The names of the parameters to exclude from
- documentation.
- """
- history = []
- section.style.new_line()
- section.style.start_codeblock()
- if prefix is not None:
- section.write(prefix)
- self.traverse_and_document_shape(
- section=section, shape=shape, history=history,
- include=include, exclude=exclude)
-
- def document_recursive_shape(self, section, shape, **kwargs):
- section.write('{\'... recursive ...\'}')
-
- def document_shape_default(self, section, shape, history, include=None,
- exclude=None, **kwargs):
- py_type = self._get_special_py_default(shape)
- if py_type is None:
- py_type = py_default(shape.type_name)
-
- if self._context.get('streaming_shape') == shape:
- py_type = 'StreamingBody()'
- section.write(py_type)
-
- def document_shape_type_string(self, section, shape, history,
- include=None, exclude=None, **kwargs):
- if 'enum' in shape.metadata:
- for i, enum in enumerate(shape.metadata['enum']):
- section.write('\'%s\'' % enum)
- if i < len(shape.metadata['enum']) - 1:
- section.write('|')
- else:
- self.document_shape_default(section, shape, history)
-
- def document_shape_type_list(self, section, shape, history, include=None,
- exclude=None, **kwargs):
- param_shape = shape.member
- list_section = section.add_new_section('list-value')
- self._start_nested_param(list_section, '[')
- param_section = list_section.add_new_section(
- 'member', context={'shape': param_shape.name})
- self.traverse_and_document_shape(
- section=param_section, shape=param_shape, history=history)
- ending_comma_section = list_section.add_new_section('ending-comma')
- ending_comma_section.write(',')
- ending_bracket_section = list_section.add_new_section(
- 'ending-bracket')
- self._end_nested_param(ending_bracket_section, ']')
-
- def document_shape_type_structure(self, section, shape, history,
- include=None, exclude=None, **kwargs):
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+from botocore.docs.shape import ShapeDocumenter
+from botocore.docs.utils import py_default
+
+
+class BaseExampleDocumenter(ShapeDocumenter):
+ def document_example(self, section, shape, prefix=None, include=None,
+ exclude=None):
+ """Generates an example based on a shape
+
+ :param section: The section to write the documentation to.
+
+ :param shape: The shape of the operation.
+
+ :param prefix: Anything to be included before the example
+
+ :type include: Dictionary where keys are parameter names and
+ values are the shapes of the parameter names.
+ :param include: The parameter shapes to include in the documentation.
+
+ :type exclude: List of the names of the parameters to exclude.
+ :param exclude: The names of the parameters to exclude from
+ documentation.
+ """
+ history = []
+ section.style.new_line()
+ section.style.start_codeblock()
+ if prefix is not None:
+ section.write(prefix)
+ self.traverse_and_document_shape(
+ section=section, shape=shape, history=history,
+ include=include, exclude=exclude)
+
+ def document_recursive_shape(self, section, shape, **kwargs):
+ section.write('{\'... recursive ...\'}')
+
+ def document_shape_default(self, section, shape, history, include=None,
+ exclude=None, **kwargs):
+ py_type = self._get_special_py_default(shape)
+ if py_type is None:
+ py_type = py_default(shape.type_name)
+
+ if self._context.get('streaming_shape') == shape:
+ py_type = 'StreamingBody()'
+ section.write(py_type)
+
+ def document_shape_type_string(self, section, shape, history,
+ include=None, exclude=None, **kwargs):
+ if 'enum' in shape.metadata:
+ for i, enum in enumerate(shape.metadata['enum']):
+ section.write('\'%s\'' % enum)
+ if i < len(shape.metadata['enum']) - 1:
+ section.write('|')
+ else:
+ self.document_shape_default(section, shape, history)
+
+ def document_shape_type_list(self, section, shape, history, include=None,
+ exclude=None, **kwargs):
+ param_shape = shape.member
+ list_section = section.add_new_section('list-value')
+ self._start_nested_param(list_section, '[')
+ param_section = list_section.add_new_section(
+ 'member', context={'shape': param_shape.name})
+ self.traverse_and_document_shape(
+ section=param_section, shape=param_shape, history=history)
+ ending_comma_section = list_section.add_new_section('ending-comma')
+ ending_comma_section.write(',')
+ ending_bracket_section = list_section.add_new_section(
+ 'ending-bracket')
+ self._end_nested_param(ending_bracket_section, ']')
+
+ def document_shape_type_structure(self, section, shape, history,
+ include=None, exclude=None, **kwargs):
if not shape.members:
section.write('{}')
return
- section = section.add_new_section('structure-value')
- self._start_nested_param(section, '{')
-
- input_members = self._add_members_to_shape(shape.members, include)
-
- for i, param in enumerate(input_members):
- if exclude and param in exclude:
- continue
- param_section = section.add_new_section(param)
- param_section.write('\'%s\': ' % param)
- param_shape = input_members[param]
- param_value_section = param_section.add_new_section(
- 'member-value', context={'shape': param_shape.name})
- self.traverse_and_document_shape(
- section=param_value_section, shape=param_shape,
- history=history, name=param)
- if i < len(input_members) - 1:
- ending_comma_section = param_section.add_new_section(
- 'ending-comma')
- ending_comma_section.write(',')
- ending_comma_section.style.new_line()
- self._end_structure(section, '{', '}')
-
- def document_shape_type_map(self, section, shape, history,
- include=None, exclude=None, **kwargs):
- map_section = section.add_new_section('map-value')
- self._start_nested_param(map_section, '{')
- value_shape = shape.value
- key_section = map_section.add_new_section(
- 'key', context={'shape': shape.key.name})
- key_section.write('\'string\': ')
- value_section = map_section.add_new_section(
- 'value', context={'shape': value_shape.name})
- self.traverse_and_document_shape(
- section=value_section, shape=value_shape, history=history)
- end_bracket_section = map_section.add_new_section('ending-bracket')
- self._end_nested_param(end_bracket_section, '}')
-
- def _add_members_to_shape(self, members, include):
- if include:
- members = members.copy()
- for param in include:
- members[param.name] = param
- return members
-
- def _start_nested_param(self, section, start=None):
- if start is not None:
- section.write(start)
- section.style.indent()
- section.style.indent()
- section.style.new_line()
-
- def _end_nested_param(self, section, end=None):
- section.style.dedent()
- section.style.dedent()
- section.style.new_line()
- if end is not None:
- section.write(end)
-
- def _end_structure(self, section, start, end):
- # If there are no members in the strucuture, then make sure the
- # start and the end bracket are on the same line, by removing all
- # previous text and writing the start and end.
- if not section.available_sections:
- section.clear_text()
- section.write(start + end)
- self._end_nested_param(section)
- else:
- end_bracket_section = section.add_new_section('ending-bracket')
- self._end_nested_param(end_bracket_section, end)
-
-
-class ResponseExampleDocumenter(BaseExampleDocumenter):
- EVENT_NAME = 'response-example'
-
+ section = section.add_new_section('structure-value')
+ self._start_nested_param(section, '{')
+
+ input_members = self._add_members_to_shape(shape.members, include)
+
+ for i, param in enumerate(input_members):
+ if exclude and param in exclude:
+ continue
+ param_section = section.add_new_section(param)
+ param_section.write('\'%s\': ' % param)
+ param_shape = input_members[param]
+ param_value_section = param_section.add_new_section(
+ 'member-value', context={'shape': param_shape.name})
+ self.traverse_and_document_shape(
+ section=param_value_section, shape=param_shape,
+ history=history, name=param)
+ if i < len(input_members) - 1:
+ ending_comma_section = param_section.add_new_section(
+ 'ending-comma')
+ ending_comma_section.write(',')
+ ending_comma_section.style.new_line()
+ self._end_structure(section, '{', '}')
+
+ def document_shape_type_map(self, section, shape, history,
+ include=None, exclude=None, **kwargs):
+ map_section = section.add_new_section('map-value')
+ self._start_nested_param(map_section, '{')
+ value_shape = shape.value
+ key_section = map_section.add_new_section(
+ 'key', context={'shape': shape.key.name})
+ key_section.write('\'string\': ')
+ value_section = map_section.add_new_section(
+ 'value', context={'shape': value_shape.name})
+ self.traverse_and_document_shape(
+ section=value_section, shape=value_shape, history=history)
+ end_bracket_section = map_section.add_new_section('ending-bracket')
+ self._end_nested_param(end_bracket_section, '}')
+
+ def _add_members_to_shape(self, members, include):
+ if include:
+ members = members.copy()
+ for param in include:
+ members[param.name] = param
+ return members
+
+ def _start_nested_param(self, section, start=None):
+ if start is not None:
+ section.write(start)
+ section.style.indent()
+ section.style.indent()
+ section.style.new_line()
+
+ def _end_nested_param(self, section, end=None):
+ section.style.dedent()
+ section.style.dedent()
+ section.style.new_line()
+ if end is not None:
+ section.write(end)
+
+ def _end_structure(self, section, start, end):
+ # If there are no members in the strucuture, then make sure the
+ # start and the end bracket are on the same line, by removing all
+ # previous text and writing the start and end.
+ if not section.available_sections:
+ section.clear_text()
+ section.write(start + end)
+ self._end_nested_param(section)
+ else:
+ end_bracket_section = section.add_new_section('ending-bracket')
+ self._end_nested_param(end_bracket_section, end)
+
+
+class ResponseExampleDocumenter(BaseExampleDocumenter):
+ EVENT_NAME = 'response-example'
+
def document_shape_type_event_stream(self, section, shape, history,
**kwargs):
section.write('EventStream(')
self.document_shape_type_structure(section, shape, history, **kwargs)
end_section = section.add_new_section('event-stream-end')
end_section.write(')')
-
-
-class RequestExampleDocumenter(BaseExampleDocumenter):
- EVENT_NAME = 'request-example'
-
- def document_shape_type_structure(self, section, shape, history,
- include=None, exclude=None, **kwargs):
- param_format = '\'%s\''
- operator = ': '
- start = '{'
- end = '}'
-
- if len(history) <= 1:
- operator = '='
- start = '('
- end = ')'
- param_format = '%s'
- section = section.add_new_section('structure-value')
- self._start_nested_param(section, start)
- input_members = self._add_members_to_shape(shape.members, include)
-
- for i, param in enumerate(input_members):
- if exclude and param in exclude:
- continue
- param_section = section.add_new_section(param)
- param_section.write(param_format % param)
- param_section.write(operator)
- param_shape = input_members[param]
- param_value_section = param_section.add_new_section(
- 'member-value', context={'shape': param_shape.name})
- self.traverse_and_document_shape(
- section=param_value_section, shape=param_shape,
- history=history, name=param)
- if i < len(input_members) - 1:
- ending_comma_section = param_section.add_new_section(
- 'ending-comma')
- ending_comma_section.write(',')
- ending_comma_section.style.new_line()
- self._end_structure(section, start, end)
+
+
+class RequestExampleDocumenter(BaseExampleDocumenter):
+ EVENT_NAME = 'request-example'
+
+ def document_shape_type_structure(self, section, shape, history,
+ include=None, exclude=None, **kwargs):
+ param_format = '\'%s\''
+ operator = ': '
+ start = '{'
+ end = '}'
+
+ if len(history) <= 1:
+ operator = '='
+ start = '('
+ end = ')'
+ param_format = '%s'
+ section = section.add_new_section('structure-value')
+ self._start_nested_param(section, start)
+ input_members = self._add_members_to_shape(shape.members, include)
+
+ for i, param in enumerate(input_members):
+ if exclude and param in exclude:
+ continue
+ param_section = section.add_new_section(param)
+ param_section.write(param_format % param)
+ param_section.write(operator)
+ param_shape = input_members[param]
+ param_value_section = param_section.add_new_section(
+ 'member-value', context={'shape': param_shape.name})
+ self.traverse_and_document_shape(
+ section=param_value_section, shape=param_shape,
+ history=history, name=param)
+ if i < len(input_members) - 1:
+ ending_comma_section = param_section.add_new_section(
+ 'ending-comma')
+ ending_comma_section.write(',')
+ ending_comma_section.style.new_line()
+ self._end_structure(section, start, end)
diff --git a/contrib/python/botocore/botocore/docs/method.py b/contrib/python/botocore/botocore/docs/method.py
index 1667c4bf20..cbb4c74a6c 100644
--- a/contrib/python/botocore/botocore/docs/method.py
+++ b/contrib/python/botocore/botocore/docs/method.py
@@ -1,242 +1,242 @@
-# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-import inspect
-
-from botocore.docs.params import RequestParamsDocumenter
-from botocore.docs.params import ResponseParamsDocumenter
-from botocore.docs.example import ResponseExampleDocumenter
-from botocore.docs.example import RequestExampleDocumenter
-
-
-AWS_DOC_BASE = 'https://docs.aws.amazon.com/goto/WebAPI'
-
-
-def get_instance_public_methods(instance):
- """Retrieves an objects public methods
-
- :param instance: The instance of the class to inspect
- :rtype: dict
- :returns: A dictionary that represents an instance's methods where
- the keys are the name of the methods and the
- values are the handler to the method.
- """
- instance_members = inspect.getmembers(instance)
- instance_methods = {}
- for name, member in instance_members:
- if not name.startswith('_'):
- if inspect.ismethod(member):
- instance_methods[name] = member
- return instance_methods
-
-
-def document_model_driven_signature(section, name, operation_model,
- include=None, exclude=None):
- """Documents the signature of a model-driven method
-
- :param section: The section to write the documentation to.
-
- :param name: The name of the method
-
- :param operation_model: The operation model for the method
-
- :type include: Dictionary where keys are parameter names and
- values are the shapes of the parameter names.
- :param include: The parameter shapes to include in the documentation.
-
- :type exclude: List of the names of the parameters to exclude.
- :param exclude: The names of the parameters to exclude from
- documentation.
- """
- params = {}
- if operation_model.input_shape:
- params = operation_model.input_shape.members
-
- parameter_names = list(params.keys())
-
- if include is not None:
- for member in include:
- parameter_names.append(member.name)
-
- if exclude is not None:
- for member in exclude:
- if member in parameter_names:
- parameter_names.remove(member)
-
- signature_params = ''
- if parameter_names:
- signature_params = '**kwargs'
- section.style.start_sphinx_py_method(name, signature_params)
-
-
-def document_custom_signature(section, name, method,
- include=None, exclude=None):
- """Documents the signature of a custom method
-
- :param section: The section to write the documentation to.
-
- :param name: The name of the method
-
- :param method: The handle to the method being documented
-
- :type include: Dictionary where keys are parameter names and
- values are the shapes of the parameter names.
- :param include: The parameter shapes to include in the documentation.
-
- :type exclude: List of the names of the parameters to exclude.
- :param exclude: The names of the parameters to exclude from
- documentation.
- """
- args, varargs, keywords, defaults = inspect.getargspec(method)
- args = args[1:]
- signature_params = inspect.formatargspec(
- args, varargs, keywords, defaults)
- signature_params = signature_params.lstrip('(')
- signature_params = signature_params.rstrip(')')
- section.style.start_sphinx_py_method(name, signature_params)
-
-
-def document_custom_method(section, method_name, method):
- """Documents a non-data driven method
-
- :param section: The section to write the documentation to.
-
- :param method_name: The name of the method
-
- :param method: The handle to the method being documented
- """
- document_custom_signature(
- section, method_name, method)
- method_intro_section = section.add_new_section('method-intro')
- method_intro_section.writeln('')
- doc_string = inspect.getdoc(method)
- if doc_string is not None:
- method_intro_section.style.write_py_doc_string(doc_string)
-
-
-def document_model_driven_method(section, method_name, operation_model,
- event_emitter, method_description=None,
- example_prefix=None, include_input=None,
- include_output=None, exclude_input=None,
- exclude_output=None, document_output=True,
- include_signature=True):
- """Documents an individual method
-
- :param section: The section to write to
-
- :param method_name: The name of the method
-
- :param operation_model: The model of the operation
-
- :param event_emitter: The event emitter to use to emit events
-
- :param example_prefix: The prefix to use in the method example.
-
- :type include_input: Dictionary where keys are parameter names and
- values are the shapes of the parameter names.
- :param include_input: The parameter shapes to include in the
- input documentation.
-
- :type include_output: Dictionary where keys are parameter names and
- values are the shapes of the parameter names.
- :param include_input: The parameter shapes to include in the
- output documentation.
-
- :type exclude_input: List of the names of the parameters to exclude.
- :param exclude_input: The names of the parameters to exclude from
- input documentation.
-
- :type exclude_output: List of the names of the parameters to exclude.
- :param exclude_input: The names of the parameters to exclude from
- output documentation.
-
- :param document_output: A boolean flag to indicate whether to
- document the output.
-
- :param include_signature: Whether or not to include the signature.
- It is useful for generating docstrings.
- """
- # Add the signature if specified.
- if include_signature:
- document_model_driven_signature(
- section, method_name, operation_model, include=include_input,
- exclude=exclude_input)
-
- # Add the description for the method.
- method_intro_section = section.add_new_section('method-intro')
- method_intro_section.include_doc_string(method_description)
- if operation_model.deprecated:
- method_intro_section.style.start_danger()
- method_intro_section.writeln(
- 'This operation is deprecated and may not function as '
- 'expected. This operation should not be used going forward '
- 'and is only kept for the purpose of backwards compatiblity.')
- method_intro_section.style.end_danger()
- service_uid = operation_model.service_model.metadata.get('uid')
- if service_uid is not None:
- method_intro_section.style.new_paragraph()
- method_intro_section.write("See also: ")
- link = '%s/%s/%s' % (AWS_DOC_BASE, service_uid,
- operation_model.name)
- method_intro_section.style.external_link(title="AWS API Documentation",
- link=link)
- method_intro_section.writeln('')
-
- # Add the example section.
- example_section = section.add_new_section('example')
- example_section.style.new_paragraph()
- example_section.style.bold('Request Syntax')
-
- context = {
- 'special_shape_types': {
- 'streaming_input_shape': operation_model.get_streaming_input(),
- 'streaming_output_shape': operation_model.get_streaming_output(),
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+import inspect
+
+from botocore.docs.params import RequestParamsDocumenter
+from botocore.docs.params import ResponseParamsDocumenter
+from botocore.docs.example import ResponseExampleDocumenter
+from botocore.docs.example import RequestExampleDocumenter
+
+
+AWS_DOC_BASE = 'https://docs.aws.amazon.com/goto/WebAPI'
+
+
+def get_instance_public_methods(instance):
+ """Retrieves an objects public methods
+
+ :param instance: The instance of the class to inspect
+ :rtype: dict
+ :returns: A dictionary that represents an instance's methods where
+ the keys are the name of the methods and the
+ values are the handler to the method.
+ """
+ instance_members = inspect.getmembers(instance)
+ instance_methods = {}
+ for name, member in instance_members:
+ if not name.startswith('_'):
+ if inspect.ismethod(member):
+ instance_methods[name] = member
+ return instance_methods
+
+
+def document_model_driven_signature(section, name, operation_model,
+ include=None, exclude=None):
+ """Documents the signature of a model-driven method
+
+ :param section: The section to write the documentation to.
+
+ :param name: The name of the method
+
+ :param operation_model: The operation model for the method
+
+ :type include: Dictionary where keys are parameter names and
+ values are the shapes of the parameter names.
+ :param include: The parameter shapes to include in the documentation.
+
+ :type exclude: List of the names of the parameters to exclude.
+ :param exclude: The names of the parameters to exclude from
+ documentation.
+ """
+ params = {}
+ if operation_model.input_shape:
+ params = operation_model.input_shape.members
+
+ parameter_names = list(params.keys())
+
+ if include is not None:
+ for member in include:
+ parameter_names.append(member.name)
+
+ if exclude is not None:
+ for member in exclude:
+ if member in parameter_names:
+ parameter_names.remove(member)
+
+ signature_params = ''
+ if parameter_names:
+ signature_params = '**kwargs'
+ section.style.start_sphinx_py_method(name, signature_params)
+
+
+def document_custom_signature(section, name, method,
+ include=None, exclude=None):
+ """Documents the signature of a custom method
+
+ :param section: The section to write the documentation to.
+
+ :param name: The name of the method
+
+ :param method: The handle to the method being documented
+
+ :type include: Dictionary where keys are parameter names and
+ values are the shapes of the parameter names.
+ :param include: The parameter shapes to include in the documentation.
+
+ :type exclude: List of the names of the parameters to exclude.
+ :param exclude: The names of the parameters to exclude from
+ documentation.
+ """
+ args, varargs, keywords, defaults = inspect.getargspec(method)
+ args = args[1:]
+ signature_params = inspect.formatargspec(
+ args, varargs, keywords, defaults)
+ signature_params = signature_params.lstrip('(')
+ signature_params = signature_params.rstrip(')')
+ section.style.start_sphinx_py_method(name, signature_params)
+
+
+def document_custom_method(section, method_name, method):
+ """Documents a non-data driven method
+
+ :param section: The section to write the documentation to.
+
+ :param method_name: The name of the method
+
+ :param method: The handle to the method being documented
+ """
+ document_custom_signature(
+ section, method_name, method)
+ method_intro_section = section.add_new_section('method-intro')
+ method_intro_section.writeln('')
+ doc_string = inspect.getdoc(method)
+ if doc_string is not None:
+ method_intro_section.style.write_py_doc_string(doc_string)
+
+
+def document_model_driven_method(section, method_name, operation_model,
+ event_emitter, method_description=None,
+ example_prefix=None, include_input=None,
+ include_output=None, exclude_input=None,
+ exclude_output=None, document_output=True,
+ include_signature=True):
+ """Documents an individual method
+
+ :param section: The section to write to
+
+ :param method_name: The name of the method
+
+ :param operation_model: The model of the operation
+
+ :param event_emitter: The event emitter to use to emit events
+
+ :param example_prefix: The prefix to use in the method example.
+
+ :type include_input: Dictionary where keys are parameter names and
+ values are the shapes of the parameter names.
+ :param include_input: The parameter shapes to include in the
+ input documentation.
+
+ :type include_output: Dictionary where keys are parameter names and
+ values are the shapes of the parameter names.
+ :param include_input: The parameter shapes to include in the
+ output documentation.
+
+ :type exclude_input: List of the names of the parameters to exclude.
+ :param exclude_input: The names of the parameters to exclude from
+ input documentation.
+
+ :type exclude_output: List of the names of the parameters to exclude.
+ :param exclude_input: The names of the parameters to exclude from
+ output documentation.
+
+ :param document_output: A boolean flag to indicate whether to
+ document the output.
+
+ :param include_signature: Whether or not to include the signature.
+ It is useful for generating docstrings.
+ """
+ # Add the signature if specified.
+ if include_signature:
+ document_model_driven_signature(
+ section, method_name, operation_model, include=include_input,
+ exclude=exclude_input)
+
+ # Add the description for the method.
+ method_intro_section = section.add_new_section('method-intro')
+ method_intro_section.include_doc_string(method_description)
+ if operation_model.deprecated:
+ method_intro_section.style.start_danger()
+ method_intro_section.writeln(
+ 'This operation is deprecated and may not function as '
+ 'expected. This operation should not be used going forward '
+ 'and is only kept for the purpose of backwards compatiblity.')
+ method_intro_section.style.end_danger()
+ service_uid = operation_model.service_model.metadata.get('uid')
+ if service_uid is not None:
+ method_intro_section.style.new_paragraph()
+ method_intro_section.write("See also: ")
+ link = '%s/%s/%s' % (AWS_DOC_BASE, service_uid,
+ operation_model.name)
+ method_intro_section.style.external_link(title="AWS API Documentation",
+ link=link)
+ method_intro_section.writeln('')
+
+ # Add the example section.
+ example_section = section.add_new_section('example')
+ example_section.style.new_paragraph()
+ example_section.style.bold('Request Syntax')
+
+ context = {
+ 'special_shape_types': {
+ 'streaming_input_shape': operation_model.get_streaming_input(),
+ 'streaming_output_shape': operation_model.get_streaming_output(),
'eventstream_output_shape': operation_model.get_event_stream_output(),
- },
- }
-
- if operation_model.input_shape:
- RequestExampleDocumenter(
- service_name=operation_model.service_model.service_name,
- operation_name=operation_model.name,
- event_emitter=event_emitter, context=context).document_example(
- example_section, operation_model.input_shape,
- prefix=example_prefix, include=include_input,
- exclude=exclude_input)
- else:
- example_section.style.new_paragraph()
- example_section.style.start_codeblock()
- example_section.write(example_prefix + '()')
-
- # Add the request parameter documentation.
- request_params_section = section.add_new_section('request-params')
- if operation_model.input_shape:
- RequestParamsDocumenter(
- service_name=operation_model.service_model.service_name,
- operation_name=operation_model.name,
- event_emitter=event_emitter, context=context).document_params(
- request_params_section, operation_model.input_shape,
- include=include_input, exclude=exclude_input)
-
- # Add the return value documentation
- return_section = section.add_new_section('return')
- return_section.style.new_line()
- if operation_model.output_shape is not None and document_output:
- return_section.write(':rtype: dict')
- return_section.style.new_line()
- return_section.write(':returns: ')
- return_section.style.indent()
- return_section.style.new_line()
-
+ },
+ }
+
+ if operation_model.input_shape:
+ RequestExampleDocumenter(
+ service_name=operation_model.service_model.service_name,
+ operation_name=operation_model.name,
+ event_emitter=event_emitter, context=context).document_example(
+ example_section, operation_model.input_shape,
+ prefix=example_prefix, include=include_input,
+ exclude=exclude_input)
+ else:
+ example_section.style.new_paragraph()
+ example_section.style.start_codeblock()
+ example_section.write(example_prefix + '()')
+
+ # Add the request parameter documentation.
+ request_params_section = section.add_new_section('request-params')
+ if operation_model.input_shape:
+ RequestParamsDocumenter(
+ service_name=operation_model.service_model.service_name,
+ operation_name=operation_model.name,
+ event_emitter=event_emitter, context=context).document_params(
+ request_params_section, operation_model.input_shape,
+ include=include_input, exclude=exclude_input)
+
+ # Add the return value documentation
+ return_section = section.add_new_section('return')
+ return_section.style.new_line()
+ if operation_model.output_shape is not None and document_output:
+ return_section.write(':rtype: dict')
+ return_section.style.new_line()
+ return_section.write(':returns: ')
+ return_section.style.indent()
+ return_section.style.new_line()
+
# If the operation is an event stream, describe the tagged union
event_stream_output = operation_model.get_event_stream_output()
if event_stream_output:
@@ -251,31 +251,31 @@ def document_model_driven_method(section, method_name, operation_model,
)
event_section.style.new_line()
- # Add an example return value
- return_example_section = return_section.add_new_section('example')
- return_example_section.style.new_line()
- return_example_section.style.bold('Response Syntax')
- return_example_section.style.new_paragraph()
- ResponseExampleDocumenter(
- service_name=operation_model.service_model.service_name,
- operation_name=operation_model.name,
- event_emitter=event_emitter,
- context=context).document_example(
- return_example_section, operation_model.output_shape,
- include=include_output, exclude=exclude_output)
-
- # Add a description for the return value
- return_description_section = return_section.add_new_section(
- 'description')
- return_description_section.style.new_line()
- return_description_section.style.bold('Response Structure')
- return_description_section.style.new_paragraph()
- ResponseParamsDocumenter(
- service_name=operation_model.service_model.service_name,
- operation_name=operation_model.name,
- event_emitter=event_emitter,
- context=context).document_params(
- return_description_section, operation_model.output_shape,
- include=include_output, exclude=exclude_output)
- else:
- return_section.write(':returns: None')
+ # Add an example return value
+ return_example_section = return_section.add_new_section('example')
+ return_example_section.style.new_line()
+ return_example_section.style.bold('Response Syntax')
+ return_example_section.style.new_paragraph()
+ ResponseExampleDocumenter(
+ service_name=operation_model.service_model.service_name,
+ operation_name=operation_model.name,
+ event_emitter=event_emitter,
+ context=context).document_example(
+ return_example_section, operation_model.output_shape,
+ include=include_output, exclude=exclude_output)
+
+ # Add a description for the return value
+ return_description_section = return_section.add_new_section(
+ 'description')
+ return_description_section.style.new_line()
+ return_description_section.style.bold('Response Structure')
+ return_description_section.style.new_paragraph()
+ ResponseParamsDocumenter(
+ service_name=operation_model.service_model.service_name,
+ operation_name=operation_model.name,
+ event_emitter=event_emitter,
+ context=context).document_params(
+ return_description_section, operation_model.output_shape,
+ include=include_output, exclude=exclude_output)
+ else:
+ return_section.write(':returns: None')
diff --git a/contrib/python/botocore/botocore/docs/paginator.py b/contrib/python/botocore/botocore/docs/paginator.py
index 88a5730475..03c17d6309 100644
--- a/contrib/python/botocore/botocore/docs/paginator.py
+++ b/contrib/python/botocore/botocore/docs/paginator.py
@@ -1,177 +1,177 @@
-# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-from botocore import xform_name
-from botocore.compat import OrderedDict
-from botocore.docs.utils import DocumentedShape
-from botocore.utils import get_service_module_name
-from botocore.docs.method import document_model_driven_method
-
-
-class PaginatorDocumenter(object):
- def __init__(self, client, service_paginator_model):
- self._client = client
- self._service_name = self._client.meta.service_model.service_name
- self._service_paginator_model = service_paginator_model
-
- def document_paginators(self, section):
- """Documents the various paginators for a service
-
- param section: The section to write to.
- """
- section.style.h2('Paginators')
- section.style.new_line()
- section.writeln('The available paginators are:')
-
- paginator_names = sorted(
- self._service_paginator_model._paginator_config)
-
- # List the available paginators and then document each paginator.
- for paginator_name in paginator_names:
- section.style.li(
- ':py:class:`%s.Paginator.%s`' % (
- self._client.__class__.__name__, paginator_name))
- self._add_paginator(section, paginator_name)
-
- def _add_paginator(self, section, paginator_name):
- section = section.add_new_section(paginator_name)
-
- # Docment the paginator class
- section.style.start_sphinx_py_class(
- class_name='%s.Paginator.%s' % (
- self._client.__class__.__name__, paginator_name))
- section.style.start_codeblock()
- section.style.new_line()
-
- # Document how to instantiate the paginator.
- section.write(
- 'paginator = client.get_paginator(\'%s\')' % xform_name(
- paginator_name)
- )
- section.style.end_codeblock()
- section.style.new_line()
- # Get the pagination model for the particular paginator.
- paginator_config = self._service_paginator_model.get_paginator(
- paginator_name)
- document_paginate_method(
- section=section,
- paginator_name=paginator_name,
- event_emitter=self._client.meta.events,
- service_model=self._client.meta.service_model,
- paginator_config=paginator_config
- )
-
-
-def document_paginate_method(section, paginator_name, event_emitter,
- service_model, paginator_config,
- include_signature=True):
- """Documents the paginate method of a paginator
-
- :param section: The section to write to
-
- :param paginator_name: The name of the paginator. It is snake cased.
-
- :param event_emitter: The event emitter to use to emit events
-
- :param service_model: The service model
-
- :param paginator_config: The paginator config associated to a particular
- paginator.
-
- :param include_signature: Whether or not to include the signature.
- It is useful for generating docstrings.
- """
- # Retrieve the operation model of the underlying operation.
- operation_model = service_model.operation_model(
- paginator_name)
-
- # Add representations of the request and response parameters
- # we want to include in the description of the paginate method.
- # These are parameters we expose via the botocore interface.
- pagination_config_members = OrderedDict()
-
- pagination_config_members['MaxItems'] = DocumentedShape(
- name='MaxItems', type_name='integer',
- documentation=(
- '<p>The total number of items to return. If the total '
- 'number of items available is more than the value '
- 'specified in max-items then a <code>NextToken</code> '
- 'will be provided in the output that you can use to '
- 'resume pagination.</p>'))
-
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+from botocore import xform_name
+from botocore.compat import OrderedDict
+from botocore.docs.utils import DocumentedShape
+from botocore.utils import get_service_module_name
+from botocore.docs.method import document_model_driven_method
+
+
+class PaginatorDocumenter(object):
+ def __init__(self, client, service_paginator_model):
+ self._client = client
+ self._service_name = self._client.meta.service_model.service_name
+ self._service_paginator_model = service_paginator_model
+
+ def document_paginators(self, section):
+ """Documents the various paginators for a service
+
+ param section: The section to write to.
+ """
+ section.style.h2('Paginators')
+ section.style.new_line()
+ section.writeln('The available paginators are:')
+
+ paginator_names = sorted(
+ self._service_paginator_model._paginator_config)
+
+ # List the available paginators and then document each paginator.
+ for paginator_name in paginator_names:
+ section.style.li(
+ ':py:class:`%s.Paginator.%s`' % (
+ self._client.__class__.__name__, paginator_name))
+ self._add_paginator(section, paginator_name)
+
+ def _add_paginator(self, section, paginator_name):
+ section = section.add_new_section(paginator_name)
+
+ # Docment the paginator class
+ section.style.start_sphinx_py_class(
+ class_name='%s.Paginator.%s' % (
+ self._client.__class__.__name__, paginator_name))
+ section.style.start_codeblock()
+ section.style.new_line()
+
+ # Document how to instantiate the paginator.
+ section.write(
+ 'paginator = client.get_paginator(\'%s\')' % xform_name(
+ paginator_name)
+ )
+ section.style.end_codeblock()
+ section.style.new_line()
+ # Get the pagination model for the particular paginator.
+ paginator_config = self._service_paginator_model.get_paginator(
+ paginator_name)
+ document_paginate_method(
+ section=section,
+ paginator_name=paginator_name,
+ event_emitter=self._client.meta.events,
+ service_model=self._client.meta.service_model,
+ paginator_config=paginator_config
+ )
+
+
+def document_paginate_method(section, paginator_name, event_emitter,
+ service_model, paginator_config,
+ include_signature=True):
+ """Documents the paginate method of a paginator
+
+ :param section: The section to write to
+
+ :param paginator_name: The name of the paginator. It is snake cased.
+
+ :param event_emitter: The event emitter to use to emit events
+
+ :param service_model: The service model
+
+ :param paginator_config: The paginator config associated to a particular
+ paginator.
+
+ :param include_signature: Whether or not to include the signature.
+ It is useful for generating docstrings.
+ """
+ # Retrieve the operation model of the underlying operation.
+ operation_model = service_model.operation_model(
+ paginator_name)
+
+ # Add representations of the request and response parameters
+ # we want to include in the description of the paginate method.
+ # These are parameters we expose via the botocore interface.
+ pagination_config_members = OrderedDict()
+
+ pagination_config_members['MaxItems'] = DocumentedShape(
+ name='MaxItems', type_name='integer',
+ documentation=(
+ '<p>The total number of items to return. If the total '
+ 'number of items available is more than the value '
+ 'specified in max-items then a <code>NextToken</code> '
+ 'will be provided in the output that you can use to '
+ 'resume pagination.</p>'))
+
if paginator_config.get('limit_key', None):
pagination_config_members['PageSize'] = DocumentedShape(
name='PageSize', type_name='integer',
documentation='<p>The size of each page.<p>')
-
- pagination_config_members['StartingToken'] = DocumentedShape(
- name='StartingToken', type_name='string',
- documentation=(
- '<p>A token to specify where to start paginating. '
- 'This is the <code>NextToken</code> from a previous '
- 'response.</p>'))
-
- botocore_pagination_params = [
- DocumentedShape(
- name='PaginationConfig', type_name='structure',
- documentation=(
- '<p>A dictionary that provides parameters to control '
- 'pagination.</p>'),
- members=pagination_config_members)
- ]
-
- botocore_pagination_response_params = [
- DocumentedShape(
- name='NextToken', type_name='string',
- documentation=(
- '<p>A token to resume pagination.</p>'))
- ]
-
- service_pagination_params = []
-
- # Add the normal input token of the method to a list
- # of input paramters that we wish to hide since we expose our own.
- if isinstance(paginator_config['input_token'], list):
- service_pagination_params += paginator_config['input_token']
- else:
- service_pagination_params.append(paginator_config['input_token'])
-
- # Hide the limit key in the documentation.
- if paginator_config.get('limit_key', None):
- service_pagination_params.append(paginator_config['limit_key'])
-
- # Hide the output tokens in the documentation.
- service_pagination_response_params = []
- if isinstance(paginator_config['output_token'], list):
- service_pagination_response_params += paginator_config[
- 'output_token']
- else:
- service_pagination_response_params.append(paginator_config[
- 'output_token'])
-
- paginate_description = (
- 'Creates an iterator that will paginate through responses '
- 'from :py:meth:`{0}.Client.{1}`.'.format(
- get_service_module_name(service_model), xform_name(paginator_name))
- )
-
- document_model_driven_method(
- section, 'paginate', operation_model,
- event_emitter=event_emitter,
- method_description=paginate_description,
- example_prefix='response_iterator = paginator.paginate',
- include_input=botocore_pagination_params,
- include_output=botocore_pagination_response_params,
- exclude_input=service_pagination_params,
- exclude_output=service_pagination_response_params,
- include_signature=include_signature
- )
+
+ pagination_config_members['StartingToken'] = DocumentedShape(
+ name='StartingToken', type_name='string',
+ documentation=(
+ '<p>A token to specify where to start paginating. '
+ 'This is the <code>NextToken</code> from a previous '
+ 'response.</p>'))
+
+ botocore_pagination_params = [
+ DocumentedShape(
+ name='PaginationConfig', type_name='structure',
+ documentation=(
+ '<p>A dictionary that provides parameters to control '
+ 'pagination.</p>'),
+ members=pagination_config_members)
+ ]
+
+ botocore_pagination_response_params = [
+ DocumentedShape(
+ name='NextToken', type_name='string',
+ documentation=(
+ '<p>A token to resume pagination.</p>'))
+ ]
+
+ service_pagination_params = []
+
+ # Add the normal input token of the method to a list
+ # of input paramters that we wish to hide since we expose our own.
+ if isinstance(paginator_config['input_token'], list):
+ service_pagination_params += paginator_config['input_token']
+ else:
+ service_pagination_params.append(paginator_config['input_token'])
+
+ # Hide the limit key in the documentation.
+ if paginator_config.get('limit_key', None):
+ service_pagination_params.append(paginator_config['limit_key'])
+
+ # Hide the output tokens in the documentation.
+ service_pagination_response_params = []
+ if isinstance(paginator_config['output_token'], list):
+ service_pagination_response_params += paginator_config[
+ 'output_token']
+ else:
+ service_pagination_response_params.append(paginator_config[
+ 'output_token'])
+
+ paginate_description = (
+ 'Creates an iterator that will paginate through responses '
+ 'from :py:meth:`{0}.Client.{1}`.'.format(
+ get_service_module_name(service_model), xform_name(paginator_name))
+ )
+
+ document_model_driven_method(
+ section, 'paginate', operation_model,
+ event_emitter=event_emitter,
+ method_description=paginate_description,
+ example_prefix='response_iterator = paginator.paginate',
+ include_input=botocore_pagination_params,
+ include_output=botocore_pagination_response_params,
+ exclude_input=service_pagination_params,
+ exclude_output=service_pagination_response_params,
+ include_signature=include_signature
+ )
diff --git a/contrib/python/botocore/botocore/docs/params.py b/contrib/python/botocore/botocore/docs/params.py
index 410fa0301d..1ce4ddff58 100644
--- a/contrib/python/botocore/botocore/docs/params.py
+++ b/contrib/python/botocore/botocore/docs/params.py
@@ -1,220 +1,220 @@
-# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-from botocore.docs.shape import ShapeDocumenter
-from botocore.docs.utils import py_type_name
-
-
-class BaseParamsDocumenter(ShapeDocumenter):
- def document_params(self, section, shape, include=None, exclude=None):
- """Fills out the documentation for a section given a model shape.
-
- :param section: The section to write the documentation to.
-
- :param shape: The shape of the operation.
-
- :type include: Dictionary where keys are parameter names and
- values are the shapes of the parameter names.
- :param include: The parameter shapes to include in the documentation.
-
- :type exclude: List of the names of the parameters to exclude.
- :param exclude: The names of the parameters to exclude from
- documentation.
- """
- history = []
- self.traverse_and_document_shape(
- section=section, shape=shape, history=history,
- name=None, include=include, exclude=exclude)
-
- def document_recursive_shape(self, section, shape, **kwargs):
- self._add_member_documentation(section, shape, **kwargs)
-
- def document_shape_default(self, section, shape, history, include=None,
- exclude=None, **kwargs):
- self._add_member_documentation(section, shape, **kwargs)
-
- def document_shape_type_list(self, section, shape, history, include=None,
- exclude=None, **kwargs):
- self._add_member_documentation(section, shape, **kwargs)
- param_shape = shape.member
- param_section = section.add_new_section(
- param_shape.name, context={'shape': shape.member.name})
- self._start_nested_param(param_section)
- self.traverse_and_document_shape(
- section=param_section, shape=param_shape,
- history=history, name=None)
- section = section.add_new_section('end-list')
- self._end_nested_param(section)
-
- def document_shape_type_map(self, section, shape, history, include=None,
- exclude=None, **kwargs):
- self._add_member_documentation(section, shape, **kwargs)
-
- key_section = section.add_new_section(
- 'key', context={'shape': shape.key.name})
- self._start_nested_param(key_section)
- self._add_member_documentation(key_section, shape.key)
-
- param_section = section.add_new_section(
- shape.value.name, context={'shape': shape.value.name})
- param_section.style.indent()
- self._start_nested_param(param_section)
- self.traverse_and_document_shape(
- section=param_section, shape=shape.value,
- history=history, name=None)
-
- end_section = section.add_new_section('end-map')
- self._end_nested_param(end_section)
- self._end_nested_param(end_section)
-
- def document_shape_type_structure(self, section, shape, history,
- include=None, exclude=None,
- name=None, **kwargs):
- members = self._add_members_to_shape(shape.members, include)
- self._add_member_documentation(section, shape, name=name)
- for param in members:
- if exclude and param in exclude:
- continue
- param_shape = members[param]
- param_section = section.add_new_section(
- param, context={'shape': param_shape.name})
- self._start_nested_param(param_section)
- self.traverse_and_document_shape(
- section=param_section, shape=param_shape,
- history=history, name=param)
- section = section.add_new_section('end-structure')
- self._end_nested_param(section)
-
- def _add_member_documentation(self, section, shape, **kwargs):
- pass
-
- def _add_members_to_shape(self, members, include):
- if include:
- members = members.copy()
- for param in include:
- members[param.name] = param
- return members
-
- def _document_non_top_level_param_type(self, type_section, shape):
- special_py_type = self._get_special_py_type_name(shape)
- py_type = py_type_name(shape.type_name)
-
- type_format = '(%s) -- '
- if special_py_type is not None:
- # Special type can reference a linked class.
- # Italicizing it blows away the link.
- type_section.write(type_format % special_py_type)
- else:
- type_section.style.italics(type_format % py_type)
-
- def _start_nested_param(self, section):
- section.style.indent()
- section.style.new_line()
-
- def _end_nested_param(self, section):
- section.style.dedent()
- section.style.new_line()
-
-
-class ResponseParamsDocumenter(BaseParamsDocumenter):
- """Generates the description for the response parameters"""
-
- EVENT_NAME = 'response-params'
-
- def _add_member_documentation(self, section, shape, name=None, **kwargs):
- name_section = section.add_new_section('param-name')
- name_section.write('- ')
- if name is not None:
- name_section.style.bold('%s ' % name)
- type_section = section.add_new_section('param-type')
- self._document_non_top_level_param_type(type_section, shape)
-
- documentation_section = section.add_new_section('param-documentation')
- if shape.documentation:
- documentation_section.style.indent()
- documentation_section.include_doc_string(shape.documentation)
- section.style.new_paragraph()
-
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+from botocore.docs.shape import ShapeDocumenter
+from botocore.docs.utils import py_type_name
+
+
+class BaseParamsDocumenter(ShapeDocumenter):
+ def document_params(self, section, shape, include=None, exclude=None):
+ """Fills out the documentation for a section given a model shape.
+
+ :param section: The section to write the documentation to.
+
+ :param shape: The shape of the operation.
+
+ :type include: Dictionary where keys are parameter names and
+ values are the shapes of the parameter names.
+ :param include: The parameter shapes to include in the documentation.
+
+ :type exclude: List of the names of the parameters to exclude.
+ :param exclude: The names of the parameters to exclude from
+ documentation.
+ """
+ history = []
+ self.traverse_and_document_shape(
+ section=section, shape=shape, history=history,
+ name=None, include=include, exclude=exclude)
+
+ def document_recursive_shape(self, section, shape, **kwargs):
+ self._add_member_documentation(section, shape, **kwargs)
+
+ def document_shape_default(self, section, shape, history, include=None,
+ exclude=None, **kwargs):
+ self._add_member_documentation(section, shape, **kwargs)
+
+ def document_shape_type_list(self, section, shape, history, include=None,
+ exclude=None, **kwargs):
+ self._add_member_documentation(section, shape, **kwargs)
+ param_shape = shape.member
+ param_section = section.add_new_section(
+ param_shape.name, context={'shape': shape.member.name})
+ self._start_nested_param(param_section)
+ self.traverse_and_document_shape(
+ section=param_section, shape=param_shape,
+ history=history, name=None)
+ section = section.add_new_section('end-list')
+ self._end_nested_param(section)
+
+ def document_shape_type_map(self, section, shape, history, include=None,
+ exclude=None, **kwargs):
+ self._add_member_documentation(section, shape, **kwargs)
+
+ key_section = section.add_new_section(
+ 'key', context={'shape': shape.key.name})
+ self._start_nested_param(key_section)
+ self._add_member_documentation(key_section, shape.key)
+
+ param_section = section.add_new_section(
+ shape.value.name, context={'shape': shape.value.name})
+ param_section.style.indent()
+ self._start_nested_param(param_section)
+ self.traverse_and_document_shape(
+ section=param_section, shape=shape.value,
+ history=history, name=None)
+
+ end_section = section.add_new_section('end-map')
+ self._end_nested_param(end_section)
+ self._end_nested_param(end_section)
+
+ def document_shape_type_structure(self, section, shape, history,
+ include=None, exclude=None,
+ name=None, **kwargs):
+ members = self._add_members_to_shape(shape.members, include)
+ self._add_member_documentation(section, shape, name=name)
+ for param in members:
+ if exclude and param in exclude:
+ continue
+ param_shape = members[param]
+ param_section = section.add_new_section(
+ param, context={'shape': param_shape.name})
+ self._start_nested_param(param_section)
+ self.traverse_and_document_shape(
+ section=param_section, shape=param_shape,
+ history=history, name=param)
+ section = section.add_new_section('end-structure')
+ self._end_nested_param(section)
+
+ def _add_member_documentation(self, section, shape, **kwargs):
+ pass
+
+ def _add_members_to_shape(self, members, include):
+ if include:
+ members = members.copy()
+ for param in include:
+ members[param.name] = param
+ return members
+
+ def _document_non_top_level_param_type(self, type_section, shape):
+ special_py_type = self._get_special_py_type_name(shape)
+ py_type = py_type_name(shape.type_name)
+
+ type_format = '(%s) -- '
+ if special_py_type is not None:
+ # Special type can reference a linked class.
+ # Italicizing it blows away the link.
+ type_section.write(type_format % special_py_type)
+ else:
+ type_section.style.italics(type_format % py_type)
+
+ def _start_nested_param(self, section):
+ section.style.indent()
+ section.style.new_line()
+
+ def _end_nested_param(self, section):
+ section.style.dedent()
+ section.style.new_line()
+
+
+class ResponseParamsDocumenter(BaseParamsDocumenter):
+ """Generates the description for the response parameters"""
+
+ EVENT_NAME = 'response-params'
+
+ def _add_member_documentation(self, section, shape, name=None, **kwargs):
+ name_section = section.add_new_section('param-name')
+ name_section.write('- ')
+ if name is not None:
+ name_section.style.bold('%s ' % name)
+ type_section = section.add_new_section('param-type')
+ self._document_non_top_level_param_type(type_section, shape)
+
+ documentation_section = section.add_new_section('param-documentation')
+ if shape.documentation:
+ documentation_section.style.indent()
+ documentation_section.include_doc_string(shape.documentation)
+ section.style.new_paragraph()
+
def document_shape_type_event_stream(self, section, shape, history,
**kwargs):
self.document_shape_type_structure(section, shape, history, **kwargs)
-
-
-class RequestParamsDocumenter(BaseParamsDocumenter):
- """Generates the description for the request parameters"""
-
- EVENT_NAME = 'request-params'
-
- def document_shape_type_structure(self, section, shape, history,
- include=None, exclude=None, **kwargs):
- if len(history) > 1:
- self._add_member_documentation(section, shape, **kwargs)
- section.style.indent()
- members = self._add_members_to_shape(shape.members, include)
- for i, param in enumerate(members):
- if exclude and param in exclude:
- continue
- param_shape = members[param]
- param_section = section.add_new_section(
- param, context={'shape': param_shape.name})
- param_section.style.new_line()
- is_required = param in shape.required_members
- self.traverse_and_document_shape(
- section=param_section, shape=param_shape,
- history=history, name=param, is_required=is_required)
- section = section.add_new_section('end-structure')
- if len(history) > 1:
- section.style.dedent()
- section.style.new_line()
-
- def _add_member_documentation(self, section, shape, name=None,
- is_top_level_param=False, is_required=False,
- **kwargs):
- py_type = self._get_special_py_type_name(shape)
- if py_type is None:
- py_type = py_type_name(shape.type_name)
- if is_top_level_param:
- type_section = section.add_new_section('param-type')
- type_section.write(':type %s: %s' % (name, py_type))
- end_type_section = type_section.add_new_section('end-param-type')
- end_type_section.style.new_line()
- name_section = section.add_new_section('param-name')
- name_section.write(':param %s: ' % name)
-
- else:
- name_section = section.add_new_section('param-name')
- name_section.write('- ')
- if name is not None:
- name_section.style.bold('%s ' % name)
- type_section = section.add_new_section('param-type')
- self._document_non_top_level_param_type(type_section, shape)
-
- if is_required:
- is_required_section = section.add_new_section('is-required')
- is_required_section.style.indent()
- is_required_section.style.bold('[REQUIRED] ')
- if shape.documentation:
- documentation_section = section.add_new_section(
- 'param-documentation')
- documentation_section.style.indent()
- documentation_section.include_doc_string(shape.documentation)
- self._add_special_trait_documentation(documentation_section, shape)
- end_param_section = section.add_new_section('end-param')
- end_param_section.style.new_paragraph()
-
- def _add_special_trait_documentation(self, section, shape):
- if 'idempotencyToken' in shape.metadata:
- self._append_idempotency_documentation(section)
-
- def _append_idempotency_documentation(self, section):
- docstring = 'This field is autopopulated if not provided.'
- section.write(docstring)
+
+
+class RequestParamsDocumenter(BaseParamsDocumenter):
+ """Generates the description for the request parameters"""
+
+ EVENT_NAME = 'request-params'
+
+ def document_shape_type_structure(self, section, shape, history,
+ include=None, exclude=None, **kwargs):
+ if len(history) > 1:
+ self._add_member_documentation(section, shape, **kwargs)
+ section.style.indent()
+ members = self._add_members_to_shape(shape.members, include)
+ for i, param in enumerate(members):
+ if exclude and param in exclude:
+ continue
+ param_shape = members[param]
+ param_section = section.add_new_section(
+ param, context={'shape': param_shape.name})
+ param_section.style.new_line()
+ is_required = param in shape.required_members
+ self.traverse_and_document_shape(
+ section=param_section, shape=param_shape,
+ history=history, name=param, is_required=is_required)
+ section = section.add_new_section('end-structure')
+ if len(history) > 1:
+ section.style.dedent()
+ section.style.new_line()
+
+ def _add_member_documentation(self, section, shape, name=None,
+ is_top_level_param=False, is_required=False,
+ **kwargs):
+ py_type = self._get_special_py_type_name(shape)
+ if py_type is None:
+ py_type = py_type_name(shape.type_name)
+ if is_top_level_param:
+ type_section = section.add_new_section('param-type')
+ type_section.write(':type %s: %s' % (name, py_type))
+ end_type_section = type_section.add_new_section('end-param-type')
+ end_type_section.style.new_line()
+ name_section = section.add_new_section('param-name')
+ name_section.write(':param %s: ' % name)
+
+ else:
+ name_section = section.add_new_section('param-name')
+ name_section.write('- ')
+ if name is not None:
+ name_section.style.bold('%s ' % name)
+ type_section = section.add_new_section('param-type')
+ self._document_non_top_level_param_type(type_section, shape)
+
+ if is_required:
+ is_required_section = section.add_new_section('is-required')
+ is_required_section.style.indent()
+ is_required_section.style.bold('[REQUIRED] ')
+ if shape.documentation:
+ documentation_section = section.add_new_section(
+ 'param-documentation')
+ documentation_section.style.indent()
+ documentation_section.include_doc_string(shape.documentation)
+ self._add_special_trait_documentation(documentation_section, shape)
+ end_param_section = section.add_new_section('end-param')
+ end_param_section.style.new_paragraph()
+
+ def _add_special_trait_documentation(self, section, shape):
+ if 'idempotencyToken' in shape.metadata:
+ self._append_idempotency_documentation(section)
+
+ def _append_idempotency_documentation(self, section):
+ docstring = 'This field is autopopulated if not provided.'
+ section.write(docstring)
diff --git a/contrib/python/botocore/botocore/docs/service.py b/contrib/python/botocore/botocore/docs/service.py
index 6cec39edae..0d233e51d0 100644
--- a/contrib/python/botocore/botocore/docs/service.py
+++ b/contrib/python/botocore/botocore/docs/service.py
@@ -1,102 +1,102 @@
-# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-from botocore.exceptions import DataNotFoundError
-from botocore.docs.utils import get_official_service_name
-from botocore.docs.client import ClientDocumenter
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+from botocore.exceptions import DataNotFoundError
+from botocore.docs.utils import get_official_service_name
+from botocore.docs.client import ClientDocumenter
from botocore.docs.client import ClientExceptionsDocumenter
-from botocore.docs.waiter import WaiterDocumenter
-from botocore.docs.paginator import PaginatorDocumenter
-from botocore.docs.bcdoc.restdoc import DocumentStructure
-
-
-class ServiceDocumenter(object):
- def __init__(self, service_name, session):
- self._session = session
- self._service_name = service_name
-
- self._client = self._session.create_client(
- service_name, region_name='us-east-1', aws_access_key_id='foo',
- aws_secret_access_key='bar')
+from botocore.docs.waiter import WaiterDocumenter
+from botocore.docs.paginator import PaginatorDocumenter
+from botocore.docs.bcdoc.restdoc import DocumentStructure
+
+
+class ServiceDocumenter(object):
+ def __init__(self, service_name, session):
+ self._session = session
+ self._service_name = service_name
+
+ self._client = self._session.create_client(
+ service_name, region_name='us-east-1', aws_access_key_id='foo',
+ aws_secret_access_key='bar')
self._event_emitter = self._client.meta.events
-
- self.sections = [
- 'title',
- 'table-of-contents',
- 'client-api',
+
+ self.sections = [
+ 'title',
+ 'table-of-contents',
+ 'client-api',
'client-exceptions',
- 'paginator-api',
- 'waiter-api'
- ]
-
- def document_service(self):
- """Documents an entire service.
-
- :returns: The reStructured text of the documented service.
- """
- doc_structure = DocumentStructure(
- self._service_name, section_names=self.sections,
- target='html')
- self.title(doc_structure.get_section('title'))
- self.table_of_contents(doc_structure.get_section('table-of-contents'))
- self.client_api(doc_structure.get_section('client-api'))
+ 'paginator-api',
+ 'waiter-api'
+ ]
+
+ def document_service(self):
+ """Documents an entire service.
+
+ :returns: The reStructured text of the documented service.
+ """
+ doc_structure = DocumentStructure(
+ self._service_name, section_names=self.sections,
+ target='html')
+ self.title(doc_structure.get_section('title'))
+ self.table_of_contents(doc_structure.get_section('table-of-contents'))
+ self.client_api(doc_structure.get_section('client-api'))
self.client_exceptions(doc_structure.get_section('client-exceptions'))
- self.paginator_api(doc_structure.get_section('paginator-api'))
- self.waiter_api(doc_structure.get_section('waiter-api'))
- return doc_structure.flush_structure()
-
- def title(self, section):
- section.style.h1(self._client.__class__.__name__)
+ self.paginator_api(doc_structure.get_section('paginator-api'))
+ self.waiter_api(doc_structure.get_section('waiter-api'))
+ return doc_structure.flush_structure()
+
+ def title(self, section):
+ section.style.h1(self._client.__class__.__name__)
self._event_emitter.emit(
'docs.%s.%s' % ('title',
self._service_name),
section=section
)
-
- def table_of_contents(self, section):
- section.style.table_of_contents(title='Table of Contents', depth=2)
-
- def client_api(self, section):
- examples = None
- try:
- examples = self.get_examples(self._service_name)
- except DataNotFoundError:
- pass
-
- ClientDocumenter(self._client, examples).document_client(section)
-
+
+ def table_of_contents(self, section):
+ section.style.table_of_contents(title='Table of Contents', depth=2)
+
+ def client_api(self, section):
+ examples = None
+ try:
+ examples = self.get_examples(self._service_name)
+ except DataNotFoundError:
+ pass
+
+ ClientDocumenter(self._client, examples).document_client(section)
+
def client_exceptions(self, section):
ClientExceptionsDocumenter(self._client).document_exceptions(section)
- def paginator_api(self, section):
- try:
- service_paginator_model = self._session.get_paginator_model(
- self._service_name)
- except DataNotFoundError:
- return
- paginator_documenter = PaginatorDocumenter(
- self._client, service_paginator_model)
- paginator_documenter.document_paginators(section)
-
- def waiter_api(self, section):
- if self._client.waiter_names:
- service_waiter_model = self._session.get_waiter_model(
- self._service_name)
- waiter_documenter = WaiterDocumenter(
- self._client, service_waiter_model)
- waiter_documenter.document_waiters(section)
-
- def get_examples(self, service_name, api_version=None):
- loader = self._session.get_component('data_loader')
- examples = loader.load_service_model(
- service_name, 'examples-1', api_version)
- return examples['examples']
+ def paginator_api(self, section):
+ try:
+ service_paginator_model = self._session.get_paginator_model(
+ self._service_name)
+ except DataNotFoundError:
+ return
+ paginator_documenter = PaginatorDocumenter(
+ self._client, service_paginator_model)
+ paginator_documenter.document_paginators(section)
+
+ def waiter_api(self, section):
+ if self._client.waiter_names:
+ service_waiter_model = self._session.get_waiter_model(
+ self._service_name)
+ waiter_documenter = WaiterDocumenter(
+ self._client, service_waiter_model)
+ waiter_documenter.document_waiters(section)
+
+ def get_examples(self, service_name, api_version=None):
+ loader = self._session.get_component('data_loader')
+ examples = loader.load_service_model(
+ service_name, 'examples-1', api_version)
+ return examples['examples']
diff --git a/contrib/python/botocore/botocore/docs/shape.py b/contrib/python/botocore/botocore/docs/shape.py
index 71a41d3ddc..462da66000 100644
--- a/contrib/python/botocore/botocore/docs/shape.py
+++ b/contrib/python/botocore/botocore/docs/shape.py
@@ -1,123 +1,123 @@
-# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-
-
-# NOTE: This class should not be instantiated and its
-# ``traverse_and_document_shape`` method called directly. It should be
-# inherited from a Documenter class with the appropriate methods
-# and attributes.
-from botocore.utils import is_json_value_header
-
-
-class ShapeDocumenter(object):
- EVENT_NAME = ''
-
- def __init__(self, service_name, operation_name, event_emitter,
- context=None):
- self._service_name = service_name
- self._operation_name = operation_name
- self._event_emitter = event_emitter
- self._context = context
- if context is None:
- self._context = {
- 'special_shape_types': {}
- }
-
- def traverse_and_document_shape(self, section, shape, history,
- include=None, exclude=None, name=None,
- is_required=False):
- """Traverses and documents a shape
-
- Will take a self class and call its appropriate methods as a shape
- is traversed.
-
- :param section: The section to document.
-
- :param history: A list of the names of the shapes that have been
- traversed.
-
- :type include: Dictionary where keys are parameter names and
- values are the shapes of the parameter names.
- :param include: The parameter shapes to include in the documentation.
-
- :type exclude: List of the names of the parameters to exclude.
- :param exclude: The names of the parameters to exclude from
- documentation.
-
- :param name: The name of the shape.
-
- :param is_required: If the shape is a required member.
- """
- param_type = shape.type_name
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+
+
+# NOTE: This class should not be instantiated and its
+# ``traverse_and_document_shape`` method called directly. It should be
+# inherited from a Documenter class with the appropriate methods
+# and attributes.
+from botocore.utils import is_json_value_header
+
+
+class ShapeDocumenter(object):
+ EVENT_NAME = ''
+
+ def __init__(self, service_name, operation_name, event_emitter,
+ context=None):
+ self._service_name = service_name
+ self._operation_name = operation_name
+ self._event_emitter = event_emitter
+ self._context = context
+ if context is None:
+ self._context = {
+ 'special_shape_types': {}
+ }
+
+ def traverse_and_document_shape(self, section, shape, history,
+ include=None, exclude=None, name=None,
+ is_required=False):
+ """Traverses and documents a shape
+
+ Will take a self class and call its appropriate methods as a shape
+ is traversed.
+
+ :param section: The section to document.
+
+ :param history: A list of the names of the shapes that have been
+ traversed.
+
+ :type include: Dictionary where keys are parameter names and
+ values are the shapes of the parameter names.
+ :param include: The parameter shapes to include in the documentation.
+
+ :type exclude: List of the names of the parameters to exclude.
+ :param exclude: The names of the parameters to exclude from
+ documentation.
+
+ :param name: The name of the shape.
+
+ :param is_required: If the shape is a required member.
+ """
+ param_type = shape.type_name
if getattr(shape, 'serialization', {}).get('eventstream'):
param_type = 'event_stream'
- if shape.name in history:
- self.document_recursive_shape(section, shape, name=name)
- else:
- history.append(shape.name)
- is_top_level_param = (len(history) == 2)
+ if shape.name in history:
+ self.document_recursive_shape(section, shape, name=name)
+ else:
+ history.append(shape.name)
+ is_top_level_param = (len(history) == 2)
if hasattr(shape, 'is_document_type') and shape.is_document_type:
param_type = 'document'
- getattr(self, 'document_shape_type_%s' % param_type,
- self.document_shape_default)(
- section, shape, history=history, name=name,
- include=include, exclude=exclude,
- is_top_level_param=is_top_level_param,
- is_required=is_required)
- if is_top_level_param:
- self._event_emitter.emit(
- 'docs.%s.%s.%s.%s' % (self.EVENT_NAME,
- self._service_name,
- self._operation_name,
- name),
- section=section)
- at_overlying_method_section = (len(history) == 1)
- if at_overlying_method_section:
- self._event_emitter.emit(
- 'docs.%s.%s.%s.complete-section' % (self.EVENT_NAME,
- self._service_name,
- self._operation_name),
- section=section)
- history.pop()
-
- def _get_special_py_default(self, shape):
- special_defaults = {
+ getattr(self, 'document_shape_type_%s' % param_type,
+ self.document_shape_default)(
+ section, shape, history=history, name=name,
+ include=include, exclude=exclude,
+ is_top_level_param=is_top_level_param,
+ is_required=is_required)
+ if is_top_level_param:
+ self._event_emitter.emit(
+ 'docs.%s.%s.%s.%s' % (self.EVENT_NAME,
+ self._service_name,
+ self._operation_name,
+ name),
+ section=section)
+ at_overlying_method_section = (len(history) == 1)
+ if at_overlying_method_section:
+ self._event_emitter.emit(
+ 'docs.%s.%s.%s.complete-section' % (self.EVENT_NAME,
+ self._service_name,
+ self._operation_name),
+ section=section)
+ history.pop()
+
+ def _get_special_py_default(self, shape):
+ special_defaults = {
'document_type': '{...}|[...]|123|123.4|\'string\'|True|None',
- 'jsonvalue_header': '{...}|[...]|123|123.4|\'string\'|True|None',
- 'streaming_input_shape': 'b\'bytes\'|file',
+ 'jsonvalue_header': '{...}|[...]|123|123.4|\'string\'|True|None',
+ 'streaming_input_shape': 'b\'bytes\'|file',
'streaming_output_shape': 'StreamingBody()',
'eventstream_output_shape': 'EventStream()',
- }
- return self._get_value_for_special_type(shape, special_defaults)
-
- def _get_special_py_type_name(self, shape):
- special_type_names = {
+ }
+ return self._get_value_for_special_type(shape, special_defaults)
+
+ def _get_special_py_type_name(self, shape):
+ special_type_names = {
'document_type': ':ref:`document<document>`',
- 'jsonvalue_header': 'JSON serializable',
- 'streaming_input_shape': 'bytes or seekable file-like object',
+ 'jsonvalue_header': 'JSON serializable',
+ 'streaming_input_shape': 'bytes or seekable file-like object',
'streaming_output_shape': ':class:`.StreamingBody`',
'eventstream_output_shape': ':class:`.EventStream`',
- }
- return self._get_value_for_special_type(shape, special_type_names)
-
- def _get_value_for_special_type(self, shape, special_type_map):
- if is_json_value_header(shape):
- return special_type_map['jsonvalue_header']
+ }
+ return self._get_value_for_special_type(shape, special_type_names)
+
+ def _get_value_for_special_type(self, shape, special_type_map):
+ if is_json_value_header(shape):
+ return special_type_map['jsonvalue_header']
if hasattr(shape, 'is_document_type') and shape.is_document_type:
return special_type_map['document_type']
- for special_type, marked_shape in self._context[
- 'special_shape_types'].items():
- if special_type in special_type_map:
- if shape == marked_shape:
- return special_type_map[special_type]
- return None
+ for special_type, marked_shape in self._context[
+ 'special_shape_types'].items():
+ if special_type in special_type_map:
+ if shape == marked_shape:
+ return special_type_map[special_type]
+ return None
diff --git a/contrib/python/botocore/botocore/docs/sharedexample.py b/contrib/python/botocore/botocore/docs/sharedexample.py
index a9ca828876..1a31b6e481 100644
--- a/contrib/python/botocore/botocore/docs/sharedexample.py
+++ b/contrib/python/botocore/botocore/docs/sharedexample.py
@@ -1,223 +1,223 @@
-# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-import re
-import numbers
-from botocore.utils import parse_timestamp
-from botocore.docs.utils import escape_controls
-from botocore.compat import six
-
-
-class SharedExampleDocumenter(object):
- def document_shared_example(self, example, prefix, section,
- operation_model):
- """Documents a single shared example based on its definition.
-
- :param example: The model of the example
-
- :param prefix: The prefix to use in the method example.
-
- :param section: The section to write to.
-
- :param operation_model: The model of the operation used in the example
- """
- section.style.new_paragraph()
- section.write(example.get('description'))
- section.style.new_line()
- self.document_input(section, example, prefix,
- operation_model.input_shape)
- self.document_output(section, example, operation_model.output_shape)
-
- def document_input(self, section, example, prefix, shape):
- input_section = section.add_new_section('input')
- input_section.style.start_codeblock()
- if prefix is not None:
- input_section.write(prefix)
- params = example.get('input', {})
- comments = example.get('comments')
- if comments:
- comments = comments.get('input')
- param_section = input_section.add_new_section('parameters')
- self._document_params(param_section, params, comments, [], shape)
- closing_section = input_section.add_new_section('input-close')
- closing_section.style.new_line()
- closing_section.style.new_line()
- closing_section.write('print(response)')
- closing_section.style.end_codeblock()
-
- def document_output(self, section, example, shape):
- output_section = section.add_new_section('output')
- output_section.style.new_line()
- output_section.write('Expected Output:')
- output_section.style.new_line()
- output_section.style.start_codeblock()
- params = example.get('output', {})
-
- # There might not be an output, but we will return metadata anyway
- params['ResponseMetadata'] = {"...": "..."}
- comments = example.get('comments')
- if comments:
- comments = comments.get('output')
- self._document_dict(output_section, params, comments, [], shape, True)
- closing_section = output_section.add_new_section('output-close')
- closing_section.style.end_codeblock()
-
- def _document(self, section, value, comments, path, shape):
- """
- :param section: The section to add the docs to.
-
- :param value: The input / output values representing the parameters that
- are included in the example.
-
- :param comments: The dictionary containing all the comments to be
- applied to the example.
-
- :param path: A list describing where the documenter is in traversing the
- parameters. This is used to find the equivalent location
- in the comments dictionary.
- """
- if isinstance(value, dict):
- self._document_dict(section, value, comments, path, shape)
- elif isinstance(value, list):
- self._document_list(section, value, comments, path, shape)
- elif isinstance(value, numbers.Number):
- self._document_number(section, value, path)
- elif shape and shape.type_name == 'timestamp':
- self._document_datetime(section, value, path)
- else:
- self._document_str(section, value, path)
-
- def _document_dict(self, section, value, comments, path, shape,
- top_level=False):
- dict_section = section.add_new_section('dict-value')
- self._start_nested_value(dict_section, '{')
- for key, val in value.items():
- path.append('.%s' % key)
- item_section = dict_section.add_new_section(key)
- item_section.style.new_line()
- item_comment = self._get_comment(path, comments)
- if item_comment:
- item_section.write(item_comment)
- item_section.style.new_line()
- item_section.write("'%s': " % key)
-
- # Shape could be none if there is no output besides ResponseMetadata
- item_shape = None
- if shape:
- if shape.type_name == 'structure':
- item_shape = shape.members.get(key)
- elif shape.type_name == 'map':
- item_shape = shape.value
- self._document(item_section, val, comments, path, item_shape)
- path.pop()
- dict_section_end = dict_section.add_new_section('ending-brace')
- self._end_nested_value(dict_section_end, '}')
- if not top_level:
- dict_section_end.write(',')
-
- def _document_params(self, section, value, comments, path, shape):
- param_section = section.add_new_section('param-values')
- self._start_nested_value(param_section, '(')
- for key, val in value.items():
- path.append('.%s' % key)
- item_section = param_section.add_new_section(key)
- item_section.style.new_line()
- item_comment = self._get_comment(path, comments)
- if item_comment:
- item_section.write(item_comment)
- item_section.style.new_line()
- item_section.write(key + '=')
-
- # Shape could be none if there are no input parameters
- item_shape = None
- if shape:
- item_shape = shape.members.get(key)
- self._document(item_section, val, comments, path, item_shape)
- path.pop()
- param_section_end = param_section.add_new_section('ending-parenthesis')
- self._end_nested_value(param_section_end, ')')
-
- def _document_list(self, section, value, comments, path, shape):
- list_section = section.add_new_section('list-section')
- self._start_nested_value(list_section, '[')
- item_shape = shape.member
- for index, val in enumerate(value):
- item_section = list_section.add_new_section(index)
- item_section.style.new_line()
- path.append('[%s]' % index)
- item_comment = self._get_comment(path, comments)
- if item_comment:
- item_section.write(item_comment)
- item_section.style.new_line()
- self._document(item_section, val, comments, path, item_shape)
- path.pop()
- list_section_end = list_section.add_new_section('ending-bracket')
- self._end_nested_value(list_section_end, '],')
-
- def _document_str(self, section, value, path):
- # We do the string conversion because this might accept a type that
- # we don't specifically address.
- safe_value = escape_controls(value)
- section.write(u"'%s'," % six.text_type(safe_value))
-
- def _document_number(self, section, value, path):
- section.write("%s," % str(value))
-
- def _document_datetime(self, section, value, path):
- datetime_tuple = parse_timestamp(value).timetuple()
- datetime_str = str(datetime_tuple[0])
- for i in range(1, len(datetime_tuple)):
- datetime_str += ", " + str(datetime_tuple[i])
- section.write("datetime(%s)," % datetime_str)
-
- def _get_comment(self, path, comments):
- key = re.sub(r'^\.', '', ''.join(path))
- if comments and key in comments:
- return '# ' + comments[key]
- else:
- return ''
-
- def _start_nested_value(self, section, start):
- section.write(start)
- section.style.indent()
- section.style.indent()
-
- def _end_nested_value(self, section, end):
- section.style.dedent()
- section.style.dedent()
- section.style.new_line()
- section.write(end)
-
-
-def document_shared_examples(section, operation_model, example_prefix,
- shared_examples):
- """Documents the shared examples
-
- :param section: The section to write to.
-
- :param operation_model: The model of the operation.
-
- :param example_prefix: The prefix to use in the method example.
-
- :param shared_examples: The shared JSON examples from the model.
- """
- container_section = section.add_new_section('shared-examples')
- container_section.style.new_paragraph()
- container_section.style.bold('Examples')
- documenter = SharedExampleDocumenter()
- for example in shared_examples:
- documenter.document_shared_example(
- example=example,
- section=container_section.add_new_section(example['id']),
- prefix=example_prefix,
- operation_model=operation_model
- )
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+import re
+import numbers
+from botocore.utils import parse_timestamp
+from botocore.docs.utils import escape_controls
+from botocore.compat import six
+
+
+class SharedExampleDocumenter(object):
+ def document_shared_example(self, example, prefix, section,
+ operation_model):
+ """Documents a single shared example based on its definition.
+
+ :param example: The model of the example
+
+ :param prefix: The prefix to use in the method example.
+
+ :param section: The section to write to.
+
+ :param operation_model: The model of the operation used in the example
+ """
+ section.style.new_paragraph()
+ section.write(example.get('description'))
+ section.style.new_line()
+ self.document_input(section, example, prefix,
+ operation_model.input_shape)
+ self.document_output(section, example, operation_model.output_shape)
+
+ def document_input(self, section, example, prefix, shape):
+ input_section = section.add_new_section('input')
+ input_section.style.start_codeblock()
+ if prefix is not None:
+ input_section.write(prefix)
+ params = example.get('input', {})
+ comments = example.get('comments')
+ if comments:
+ comments = comments.get('input')
+ param_section = input_section.add_new_section('parameters')
+ self._document_params(param_section, params, comments, [], shape)
+ closing_section = input_section.add_new_section('input-close')
+ closing_section.style.new_line()
+ closing_section.style.new_line()
+ closing_section.write('print(response)')
+ closing_section.style.end_codeblock()
+
+ def document_output(self, section, example, shape):
+ output_section = section.add_new_section('output')
+ output_section.style.new_line()
+ output_section.write('Expected Output:')
+ output_section.style.new_line()
+ output_section.style.start_codeblock()
+ params = example.get('output', {})
+
+ # There might not be an output, but we will return metadata anyway
+ params['ResponseMetadata'] = {"...": "..."}
+ comments = example.get('comments')
+ if comments:
+ comments = comments.get('output')
+ self._document_dict(output_section, params, comments, [], shape, True)
+ closing_section = output_section.add_new_section('output-close')
+ closing_section.style.end_codeblock()
+
+ def _document(self, section, value, comments, path, shape):
+ """
+ :param section: The section to add the docs to.
+
+ :param value: The input / output values representing the parameters that
+ are included in the example.
+
+ :param comments: The dictionary containing all the comments to be
+ applied to the example.
+
+ :param path: A list describing where the documenter is in traversing the
+ parameters. This is used to find the equivalent location
+ in the comments dictionary.
+ """
+ if isinstance(value, dict):
+ self._document_dict(section, value, comments, path, shape)
+ elif isinstance(value, list):
+ self._document_list(section, value, comments, path, shape)
+ elif isinstance(value, numbers.Number):
+ self._document_number(section, value, path)
+ elif shape and shape.type_name == 'timestamp':
+ self._document_datetime(section, value, path)
+ else:
+ self._document_str(section, value, path)
+
+ def _document_dict(self, section, value, comments, path, shape,
+ top_level=False):
+ dict_section = section.add_new_section('dict-value')
+ self._start_nested_value(dict_section, '{')
+ for key, val in value.items():
+ path.append('.%s' % key)
+ item_section = dict_section.add_new_section(key)
+ item_section.style.new_line()
+ item_comment = self._get_comment(path, comments)
+ if item_comment:
+ item_section.write(item_comment)
+ item_section.style.new_line()
+ item_section.write("'%s': " % key)
+
+ # Shape could be none if there is no output besides ResponseMetadata
+ item_shape = None
+ if shape:
+ if shape.type_name == 'structure':
+ item_shape = shape.members.get(key)
+ elif shape.type_name == 'map':
+ item_shape = shape.value
+ self._document(item_section, val, comments, path, item_shape)
+ path.pop()
+ dict_section_end = dict_section.add_new_section('ending-brace')
+ self._end_nested_value(dict_section_end, '}')
+ if not top_level:
+ dict_section_end.write(',')
+
+ def _document_params(self, section, value, comments, path, shape):
+ param_section = section.add_new_section('param-values')
+ self._start_nested_value(param_section, '(')
+ for key, val in value.items():
+ path.append('.%s' % key)
+ item_section = param_section.add_new_section(key)
+ item_section.style.new_line()
+ item_comment = self._get_comment(path, comments)
+ if item_comment:
+ item_section.write(item_comment)
+ item_section.style.new_line()
+ item_section.write(key + '=')
+
+ # Shape could be none if there are no input parameters
+ item_shape = None
+ if shape:
+ item_shape = shape.members.get(key)
+ self._document(item_section, val, comments, path, item_shape)
+ path.pop()
+ param_section_end = param_section.add_new_section('ending-parenthesis')
+ self._end_nested_value(param_section_end, ')')
+
+ def _document_list(self, section, value, comments, path, shape):
+ list_section = section.add_new_section('list-section')
+ self._start_nested_value(list_section, '[')
+ item_shape = shape.member
+ for index, val in enumerate(value):
+ item_section = list_section.add_new_section(index)
+ item_section.style.new_line()
+ path.append('[%s]' % index)
+ item_comment = self._get_comment(path, comments)
+ if item_comment:
+ item_section.write(item_comment)
+ item_section.style.new_line()
+ self._document(item_section, val, comments, path, item_shape)
+ path.pop()
+ list_section_end = list_section.add_new_section('ending-bracket')
+ self._end_nested_value(list_section_end, '],')
+
+ def _document_str(self, section, value, path):
+ # We do the string conversion because this might accept a type that
+ # we don't specifically address.
+ safe_value = escape_controls(value)
+ section.write(u"'%s'," % six.text_type(safe_value))
+
+ def _document_number(self, section, value, path):
+ section.write("%s," % str(value))
+
+ def _document_datetime(self, section, value, path):
+ datetime_tuple = parse_timestamp(value).timetuple()
+ datetime_str = str(datetime_tuple[0])
+ for i in range(1, len(datetime_tuple)):
+ datetime_str += ", " + str(datetime_tuple[i])
+ section.write("datetime(%s)," % datetime_str)
+
+ def _get_comment(self, path, comments):
+ key = re.sub(r'^\.', '', ''.join(path))
+ if comments and key in comments:
+ return '# ' + comments[key]
+ else:
+ return ''
+
+ def _start_nested_value(self, section, start):
+ section.write(start)
+ section.style.indent()
+ section.style.indent()
+
+ def _end_nested_value(self, section, end):
+ section.style.dedent()
+ section.style.dedent()
+ section.style.new_line()
+ section.write(end)
+
+
+def document_shared_examples(section, operation_model, example_prefix,
+ shared_examples):
+ """Documents the shared examples
+
+ :param section: The section to write to.
+
+ :param operation_model: The model of the operation.
+
+ :param example_prefix: The prefix to use in the method example.
+
+ :param shared_examples: The shared JSON examples from the model.
+ """
+ container_section = section.add_new_section('shared-examples')
+ container_section.style.new_paragraph()
+ container_section.style.bold('Examples')
+ documenter = SharedExampleDocumenter()
+ for example in shared_examples:
+ documenter.document_shared_example(
+ example=example,
+ section=container_section.add_new_section(example['id']),
+ prefix=example_prefix,
+ operation_model=operation_model
+ )
diff --git a/contrib/python/botocore/botocore/docs/utils.py b/contrib/python/botocore/botocore/docs/utils.py
index abb4872482..a0d2d6623f 100644
--- a/contrib/python/botocore/botocore/docs/utils.py
+++ b/contrib/python/botocore/botocore/docs/utils.py
@@ -1,197 +1,197 @@
-# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-import re
-from collections import namedtuple
-
-
-def py_type_name(type_name):
- """Get the Python type name for a given model type.
-
- >>> py_type_name('list')
- 'list'
- >>> py_type_name('structure')
- 'dict'
-
- :rtype: string
- """
- return {
- 'blob': 'bytes',
- 'character': 'string',
- 'double': 'float',
- 'long': 'integer',
- 'map': 'dict',
- 'structure': 'dict',
- 'timestamp': 'datetime',
- }.get(type_name, type_name)
-
-
-def py_default(type_name):
- """Get the Python default value for a given model type.
-
- >>> py_default('string')
- '\'string\''
- >>> py_default('list')
- '[...]'
- >>> py_default('unknown')
- '...'
-
- :rtype: string
- """
- return {
- 'double': '123.0',
- 'long': '123',
- 'integer': '123',
- 'string': "'string'",
- 'blob': "b'bytes'",
- 'boolean': 'True|False',
- 'list': '[...]',
- 'map': '{...}',
- 'structure': '{...}',
- 'timestamp': 'datetime(2015, 1, 1)',
- }.get(type_name, '...')
-
-
-def get_official_service_name(service_model):
- """Generate the official name of an AWS Service
-
- :param service_model: The service model representing the service
- """
- official_name = service_model.metadata.get('serviceFullName')
- short_name = service_model.metadata.get('serviceAbbreviation', '')
- if short_name.startswith('Amazon'):
- short_name = short_name[7:]
- if short_name.startswith('AWS'):
- short_name = short_name[4:]
- if short_name and short_name.lower() not in official_name.lower():
- official_name += ' ({0})'.format(short_name)
- return official_name
-
-
-_DocumentedShape = namedtuple(
- 'DocumentedShape', ['name', 'type_name', 'documentation', 'metadata',
- 'members', 'required_members'])
-
-
-class DocumentedShape (_DocumentedShape):
- """Use this class to inject new shapes into a model for documentation"""
- def __new__(cls, name, type_name, documentation, metadata=None,
- members=None, required_members=None):
- if metadata is None:
- metadata = []
- if members is None:
- members = []
- if required_members is None:
- required_members = []
- return super(DocumentedShape, cls).__new__(
- cls, name, type_name, documentation, metadata, members,
- required_members)
-
-
-class AutoPopulatedParam(object):
- def __init__(self, name, param_description=None):
- self.name = name
- self.param_description = param_description
- if param_description is None:
- self.param_description = (
- 'Please note that this parameter is automatically populated '
- 'if it is not provided. Including this parameter is not '
- 'required\n')
-
- def document_auto_populated_param(self, event_name, section, **kwargs):
- """Documents auto populated parameters
-
- It will remove any required marks for the parameter, remove the
- parameter from the example, and add a snippet about the parameter
- being autopopulated in the description.
- """
- if event_name.startswith('docs.request-params'):
- if self.name in section.available_sections:
- section = section.get_section(self.name)
- if 'is-required' in section.available_sections:
- section.delete_section('is-required')
- description_section = section.get_section(
- 'param-documentation')
- description_section.writeln(self.param_description)
- elif event_name.startswith('docs.request-example'):
- section = section.get_section('structure-value')
- if self.name in section.available_sections:
- section.delete_section(self.name)
-
-
-class HideParamFromOperations(object):
- """Hides a single parameter from multiple operations.
-
- This method will remove a parameter from documentation and from
- examples. This method is typically used for things that are
- automatically populated because a user would be unable to provide
- a value (e.g., a checksum of a serialized XML request body)."""
- def __init__(self, service_name, parameter_name, operation_names):
- """
- :type service_name: str
- :param service_name: Name of the service to modify.
-
- :type parameter_name: str
- :param parameter_name: Name of the parameter to modify.
-
- :type operation_names: list
- :param operation_names: Operation names to modify.
- """
- self._parameter_name = parameter_name
- self._params_events = set()
- self._example_events = set()
- # Build up the sets of relevant event names.
- param_template = 'docs.request-params.%s.%s.complete-section'
- example_template = 'docs.request-example.%s.%s.complete-section'
- for name in operation_names:
- self._params_events.add(param_template % (service_name, name))
- self._example_events.add(example_template % (service_name, name))
-
- def hide_param(self, event_name, section, **kwargs):
- if event_name in self._example_events:
- # Modify the structure value for example events.
- section = section.get_section('structure-value')
- elif event_name not in self._params_events:
- return
- if self._parameter_name in section.available_sections:
- section.delete_section(self._parameter_name)
-
-
-class AppendParamDocumentation(object):
- """Appends documentation to a specific parameter"""
- def __init__(self, parameter_name, doc_string):
- self._parameter_name = parameter_name
- self._doc_string = doc_string
-
- def append_documentation(self, event_name, section, **kwargs):
- if self._parameter_name in section.available_sections:
- section = section.get_section(self._parameter_name)
- description_section = section.get_section(
- 'param-documentation')
- description_section.writeln(self._doc_string)
-
-
-_CONTROLS = {
- '\n': '\\n',
- '\r': '\\r',
- '\t': '\\t',
- '\b': '\\b',
- '\f': '\\f',
-}
-# Combines all CONTROLS keys into a big or regular expression
-_ESCAPE_CONTROLS_RE = re.compile('|'.join(map(re.escape, _CONTROLS)))
-# Based on the match get the appropriate replacement from CONTROLS
-_CONTROLS_MATCH_HANDLER = lambda match: _CONTROLS[match.group(0)]
-
-
-def escape_controls(value):
- return _ESCAPE_CONTROLS_RE.sub(_CONTROLS_MATCH_HANDLER, value)
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+import re
+from collections import namedtuple
+
+
+def py_type_name(type_name):
+ """Get the Python type name for a given model type.
+
+ >>> py_type_name('list')
+ 'list'
+ >>> py_type_name('structure')
+ 'dict'
+
+ :rtype: string
+ """
+ return {
+ 'blob': 'bytes',
+ 'character': 'string',
+ 'double': 'float',
+ 'long': 'integer',
+ 'map': 'dict',
+ 'structure': 'dict',
+ 'timestamp': 'datetime',
+ }.get(type_name, type_name)
+
+
+def py_default(type_name):
+ """Get the Python default value for a given model type.
+
+ >>> py_default('string')
+ '\'string\''
+ >>> py_default('list')
+ '[...]'
+ >>> py_default('unknown')
+ '...'
+
+ :rtype: string
+ """
+ return {
+ 'double': '123.0',
+ 'long': '123',
+ 'integer': '123',
+ 'string': "'string'",
+ 'blob': "b'bytes'",
+ 'boolean': 'True|False',
+ 'list': '[...]',
+ 'map': '{...}',
+ 'structure': '{...}',
+ 'timestamp': 'datetime(2015, 1, 1)',
+ }.get(type_name, '...')
+
+
+def get_official_service_name(service_model):
+ """Generate the official name of an AWS Service
+
+ :param service_model: The service model representing the service
+ """
+ official_name = service_model.metadata.get('serviceFullName')
+ short_name = service_model.metadata.get('serviceAbbreviation', '')
+ if short_name.startswith('Amazon'):
+ short_name = short_name[7:]
+ if short_name.startswith('AWS'):
+ short_name = short_name[4:]
+ if short_name and short_name.lower() not in official_name.lower():
+ official_name += ' ({0})'.format(short_name)
+ return official_name
+
+
+_DocumentedShape = namedtuple(
+ 'DocumentedShape', ['name', 'type_name', 'documentation', 'metadata',
+ 'members', 'required_members'])
+
+
+class DocumentedShape (_DocumentedShape):
+ """Use this class to inject new shapes into a model for documentation"""
+ def __new__(cls, name, type_name, documentation, metadata=None,
+ members=None, required_members=None):
+ if metadata is None:
+ metadata = []
+ if members is None:
+ members = []
+ if required_members is None:
+ required_members = []
+ return super(DocumentedShape, cls).__new__(
+ cls, name, type_name, documentation, metadata, members,
+ required_members)
+
+
+class AutoPopulatedParam(object):
+ def __init__(self, name, param_description=None):
+ self.name = name
+ self.param_description = param_description
+ if param_description is None:
+ self.param_description = (
+ 'Please note that this parameter is automatically populated '
+ 'if it is not provided. Including this parameter is not '
+ 'required\n')
+
+ def document_auto_populated_param(self, event_name, section, **kwargs):
+ """Documents auto populated parameters
+
+ It will remove any required marks for the parameter, remove the
+ parameter from the example, and add a snippet about the parameter
+ being autopopulated in the description.
+ """
+ if event_name.startswith('docs.request-params'):
+ if self.name in section.available_sections:
+ section = section.get_section(self.name)
+ if 'is-required' in section.available_sections:
+ section.delete_section('is-required')
+ description_section = section.get_section(
+ 'param-documentation')
+ description_section.writeln(self.param_description)
+ elif event_name.startswith('docs.request-example'):
+ section = section.get_section('structure-value')
+ if self.name in section.available_sections:
+ section.delete_section(self.name)
+
+
+class HideParamFromOperations(object):
+ """Hides a single parameter from multiple operations.
+
+ This method will remove a parameter from documentation and from
+ examples. This method is typically used for things that are
+ automatically populated because a user would be unable to provide
+ a value (e.g., a checksum of a serialized XML request body)."""
+ def __init__(self, service_name, parameter_name, operation_names):
+ """
+ :type service_name: str
+ :param service_name: Name of the service to modify.
+
+ :type parameter_name: str
+ :param parameter_name: Name of the parameter to modify.
+
+ :type operation_names: list
+ :param operation_names: Operation names to modify.
+ """
+ self._parameter_name = parameter_name
+ self._params_events = set()
+ self._example_events = set()
+ # Build up the sets of relevant event names.
+ param_template = 'docs.request-params.%s.%s.complete-section'
+ example_template = 'docs.request-example.%s.%s.complete-section'
+ for name in operation_names:
+ self._params_events.add(param_template % (service_name, name))
+ self._example_events.add(example_template % (service_name, name))
+
+ def hide_param(self, event_name, section, **kwargs):
+ if event_name in self._example_events:
+ # Modify the structure value for example events.
+ section = section.get_section('structure-value')
+ elif event_name not in self._params_events:
+ return
+ if self._parameter_name in section.available_sections:
+ section.delete_section(self._parameter_name)
+
+
+class AppendParamDocumentation(object):
+ """Appends documentation to a specific parameter"""
+ def __init__(self, parameter_name, doc_string):
+ self._parameter_name = parameter_name
+ self._doc_string = doc_string
+
+ def append_documentation(self, event_name, section, **kwargs):
+ if self._parameter_name in section.available_sections:
+ section = section.get_section(self._parameter_name)
+ description_section = section.get_section(
+ 'param-documentation')
+ description_section.writeln(self._doc_string)
+
+
+_CONTROLS = {
+ '\n': '\\n',
+ '\r': '\\r',
+ '\t': '\\t',
+ '\b': '\\b',
+ '\f': '\\f',
+}
+# Combines all CONTROLS keys into a big or regular expression
+_ESCAPE_CONTROLS_RE = re.compile('|'.join(map(re.escape, _CONTROLS)))
+# Based on the match get the appropriate replacement from CONTROLS
+_CONTROLS_MATCH_HANDLER = lambda match: _CONTROLS[match.group(0)]
+
+
+def escape_controls(value):
+ return _ESCAPE_CONTROLS_RE.sub(_CONTROLS_MATCH_HANDLER, value)
diff --git a/contrib/python/botocore/botocore/docs/waiter.py b/contrib/python/botocore/botocore/docs/waiter.py
index 04a340364e..a38049c2de 100644
--- a/contrib/python/botocore/botocore/docs/waiter.py
+++ b/contrib/python/botocore/botocore/docs/waiter.py
@@ -1,127 +1,127 @@
-# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-from botocore import xform_name
-from botocore.compat import OrderedDict
-from botocore.docs.utils import DocumentedShape
-from botocore.utils import get_service_module_name
-from botocore.docs.method import document_model_driven_method
-
-
-class WaiterDocumenter(object):
- def __init__(self, client, service_waiter_model):
- self._client = client
- self._service_name = self._client.meta.service_model.service_name
- self._service_waiter_model = service_waiter_model
-
- def document_waiters(self, section):
- """Documents the various waiters for a service.
-
- :param section: The section to write to.
- """
- section.style.h2('Waiters')
- section.style.new_line()
- section.writeln('The available waiters are:')
- for waiter_name in self._service_waiter_model.waiter_names:
- section.style.li(
- ':py:class:`%s.Waiter.%s`' % (
- self._client.__class__.__name__, waiter_name))
- self._add_single_waiter(section, waiter_name)
-
- def _add_single_waiter(self, section, waiter_name):
- section = section.add_new_section(waiter_name)
- section.style.start_sphinx_py_class(
- class_name='%s.Waiter.%s' % (
- self._client.__class__.__name__, waiter_name))
-
- # Add example on how to instantiate waiter.
- section.style.start_codeblock()
- section.style.new_line()
- section.write(
- 'waiter = client.get_waiter(\'%s\')' % xform_name(waiter_name)
- )
- section.style.end_codeblock()
-
- # Add information on the wait() method
- section.style.new_line()
- document_wait_method(
- section=section,
- waiter_name=waiter_name,
- event_emitter=self._client.meta.events,
- service_model=self._client.meta.service_model,
- service_waiter_model=self._service_waiter_model
- )
-
-
-def document_wait_method(section, waiter_name, event_emitter,
- service_model, service_waiter_model,
- include_signature=True):
- """Documents a the wait method of a waiter
-
- :param section: The section to write to
-
- :param waiter_name: The name of the waiter
-
- :param event_emitter: The event emitter to use to emit events
-
- :param service_model: The service model
-
- :param service_waiter_model: The waiter model associated to the service
-
- :param include_signature: Whether or not to include the signature.
- It is useful for generating docstrings.
- """
- waiter_model = service_waiter_model.get_waiter(waiter_name)
- operation_model = service_model.operation_model(
- waiter_model.operation)
-
- waiter_config_members = OrderedDict()
-
- waiter_config_members['Delay'] = DocumentedShape(
- name='Delay', type_name='integer',
- documentation=(
- '<p>The amount of time in seconds to wait between '
- 'attempts. Default: {0}</p>'.format(waiter_model.delay)))
-
- waiter_config_members['MaxAttempts'] = DocumentedShape(
- name='MaxAttempts', type_name='integer',
- documentation=(
- '<p>The maximum number of attempts to be made. '
- 'Default: {0}</p>'.format(waiter_model.max_attempts)))
-
- botocore_waiter_params = [
- DocumentedShape(
- name='WaiterConfig', type_name='structure',
- documentation=(
- '<p>A dictionary that provides parameters to control '
- 'waiting behavior.</p>'),
- members=waiter_config_members)
- ]
-
- wait_description = (
- 'Polls :py:meth:`{0}.Client.{1}` every {2} '
- 'seconds until a successful state is reached. An error is '
- 'returned after {3} failed checks.'.format(
- get_service_module_name(service_model),
- xform_name(waiter_model.operation),
- waiter_model.delay, waiter_model.max_attempts)
- )
-
- document_model_driven_method(
- section, 'wait', operation_model,
- event_emitter=event_emitter,
- method_description=wait_description,
- example_prefix='waiter.wait',
- include_input=botocore_waiter_params,
- document_output=False,
- include_signature=include_signature
- )
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+from botocore import xform_name
+from botocore.compat import OrderedDict
+from botocore.docs.utils import DocumentedShape
+from botocore.utils import get_service_module_name
+from botocore.docs.method import document_model_driven_method
+
+
+class WaiterDocumenter(object):
+ def __init__(self, client, service_waiter_model):
+ self._client = client
+ self._service_name = self._client.meta.service_model.service_name
+ self._service_waiter_model = service_waiter_model
+
+ def document_waiters(self, section):
+ """Documents the various waiters for a service.
+
+ :param section: The section to write to.
+ """
+ section.style.h2('Waiters')
+ section.style.new_line()
+ section.writeln('The available waiters are:')
+ for waiter_name in self._service_waiter_model.waiter_names:
+ section.style.li(
+ ':py:class:`%s.Waiter.%s`' % (
+ self._client.__class__.__name__, waiter_name))
+ self._add_single_waiter(section, waiter_name)
+
+ def _add_single_waiter(self, section, waiter_name):
+ section = section.add_new_section(waiter_name)
+ section.style.start_sphinx_py_class(
+ class_name='%s.Waiter.%s' % (
+ self._client.__class__.__name__, waiter_name))
+
+ # Add example on how to instantiate waiter.
+ section.style.start_codeblock()
+ section.style.new_line()
+ section.write(
+ 'waiter = client.get_waiter(\'%s\')' % xform_name(waiter_name)
+ )
+ section.style.end_codeblock()
+
+ # Add information on the wait() method
+ section.style.new_line()
+ document_wait_method(
+ section=section,
+ waiter_name=waiter_name,
+ event_emitter=self._client.meta.events,
+ service_model=self._client.meta.service_model,
+ service_waiter_model=self._service_waiter_model
+ )
+
+
+def document_wait_method(section, waiter_name, event_emitter,
+ service_model, service_waiter_model,
+ include_signature=True):
+ """Documents a the wait method of a waiter
+
+ :param section: The section to write to
+
+ :param waiter_name: The name of the waiter
+
+ :param event_emitter: The event emitter to use to emit events
+
+ :param service_model: The service model
+
+ :param service_waiter_model: The waiter model associated to the service
+
+ :param include_signature: Whether or not to include the signature.
+ It is useful for generating docstrings.
+ """
+ waiter_model = service_waiter_model.get_waiter(waiter_name)
+ operation_model = service_model.operation_model(
+ waiter_model.operation)
+
+ waiter_config_members = OrderedDict()
+
+ waiter_config_members['Delay'] = DocumentedShape(
+ name='Delay', type_name='integer',
+ documentation=(
+ '<p>The amount of time in seconds to wait between '
+ 'attempts. Default: {0}</p>'.format(waiter_model.delay)))
+
+ waiter_config_members['MaxAttempts'] = DocumentedShape(
+ name='MaxAttempts', type_name='integer',
+ documentation=(
+ '<p>The maximum number of attempts to be made. '
+ 'Default: {0}</p>'.format(waiter_model.max_attempts)))
+
+ botocore_waiter_params = [
+ DocumentedShape(
+ name='WaiterConfig', type_name='structure',
+ documentation=(
+ '<p>A dictionary that provides parameters to control '
+ 'waiting behavior.</p>'),
+ members=waiter_config_members)
+ ]
+
+ wait_description = (
+ 'Polls :py:meth:`{0}.Client.{1}` every {2} '
+ 'seconds until a successful state is reached. An error is '
+ 'returned after {3} failed checks.'.format(
+ get_service_module_name(service_model),
+ xform_name(waiter_model.operation),
+ waiter_model.delay, waiter_model.max_attempts)
+ )
+
+ document_model_driven_method(
+ section, 'wait', operation_model,
+ event_emitter=event_emitter,
+ method_description=wait_description,
+ example_prefix='waiter.wait',
+ include_input=botocore_waiter_params,
+ document_output=False,
+ include_signature=include_signature
+ )
diff --git a/contrib/python/botocore/botocore/endpoint.py b/contrib/python/botocore/botocore/endpoint.py
index 6efa438e73..ad87d2199d 100644
--- a/contrib/python/botocore/botocore/endpoint.py
+++ b/contrib/python/botocore/botocore/endpoint.py
@@ -1,109 +1,109 @@
-# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/
-# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-
+# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/
+# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+
import os
import logging
import time
import threading
-
+
from botocore.compat import six
-
-from botocore.awsrequest import create_request_object
+
+from botocore.awsrequest import create_request_object
from botocore.exceptions import HTTPClientError
from botocore.httpsession import URLLib3Session
from botocore.utils import is_valid_endpoint_url, get_environ_proxies
-from botocore.hooks import first_non_none_response
+from botocore.hooks import first_non_none_response
from botocore.history import get_global_history_recorder
-from botocore.response import StreamingBody
-from botocore import parsers
-
-
-logger = logging.getLogger(__name__)
-history_recorder = get_global_history_recorder()
-DEFAULT_TIMEOUT = 60
-MAX_POOL_CONNECTIONS = 10
-
-
-def convert_to_response_dict(http_response, operation_model):
- """Convert an HTTP response object to a request dict.
-
- This converts the requests library's HTTP response object to
- a dictionary.
-
- :type http_response: botocore.vendored.requests.model.Response
- :param http_response: The HTTP response from an AWS service request.
-
- :rtype: dict
- :return: A response dictionary which will contain the following keys:
- * headers (dict)
- * status_code (int)
- * body (string or file-like object)
-
- """
- response_dict = {
- 'headers': http_response.headers,
- 'status_code': http_response.status_code,
+from botocore.response import StreamingBody
+from botocore import parsers
+
+
+logger = logging.getLogger(__name__)
+history_recorder = get_global_history_recorder()
+DEFAULT_TIMEOUT = 60
+MAX_POOL_CONNECTIONS = 10
+
+
+def convert_to_response_dict(http_response, operation_model):
+ """Convert an HTTP response object to a request dict.
+
+ This converts the requests library's HTTP response object to
+ a dictionary.
+
+ :type http_response: botocore.vendored.requests.model.Response
+ :param http_response: The HTTP response from an AWS service request.
+
+ :rtype: dict
+ :return: A response dictionary which will contain the following keys:
+ * headers (dict)
+ * status_code (int)
+ * body (string or file-like object)
+
+ """
+ response_dict = {
+ 'headers': http_response.headers,
+ 'status_code': http_response.status_code,
'context': {
'operation_name': operation_model.name,
}
- }
- if response_dict['status_code'] >= 300:
- response_dict['body'] = http_response.content
+ }
+ if response_dict['status_code'] >= 300:
+ response_dict['body'] = http_response.content
elif operation_model.has_event_stream_output:
response_dict['body'] = http_response.raw
- elif operation_model.has_streaming_output:
+ elif operation_model.has_streaming_output:
length = response_dict['headers'].get('content-length')
response_dict['body'] = StreamingBody(http_response.raw, length)
- else:
- response_dict['body'] = http_response.content
- return response_dict
-
-
-class Endpoint(object):
- """
- Represents an endpoint for a particular service in a specific
- region. Only an endpoint can make requests.
-
- :ivar service: The Service object that describes this endpoints
- service.
- :ivar host: The fully qualified endpoint hostname.
- :ivar session: The session object.
- """
+ else:
+ response_dict['body'] = http_response.content
+ return response_dict
+
+
+class Endpoint(object):
+ """
+ Represents an endpoint for a particular service in a specific
+ region. Only an endpoint can make requests.
+
+ :ivar service: The Service object that describes this endpoints
+ service.
+ :ivar host: The fully qualified endpoint hostname.
+ :ivar session: The session object.
+ """
def __init__(self, host, endpoint_prefix, event_emitter,
response_parser_factory=None, http_session=None):
- self._endpoint_prefix = endpoint_prefix
- self._event_emitter = event_emitter
- self.host = host
- self._lock = threading.Lock()
- if response_parser_factory is None:
- response_parser_factory = parsers.ResponseParserFactory()
- self._response_parser_factory = response_parser_factory
+ self._endpoint_prefix = endpoint_prefix
+ self._event_emitter = event_emitter
+ self.host = host
+ self._lock = threading.Lock()
+ if response_parser_factory is None:
+ response_parser_factory = parsers.ResponseParserFactory()
+ self._response_parser_factory = response_parser_factory
self.http_session = http_session
if self.http_session is None:
self.http_session = URLLib3Session()
-
- def __repr__(self):
- return '%s(%s)' % (self._endpoint_prefix, self.host)
-
- def make_request(self, operation_model, request_dict):
+
+ def __repr__(self):
+ return '%s(%s)' % (self._endpoint_prefix, self.host)
+
+ def make_request(self, operation_model, request_dict):
logger.debug("Making request for %s with params: %s",
operation_model, request_dict)
- return self._send_request(request_dict, operation_model)
-
- def create_request(self, params, operation_model=None):
- request = create_request_object(params)
- if operation_model:
+ return self._send_request(request_dict, operation_model)
+
+ def create_request(self, params, operation_model=None):
+ request = create_request_object(params)
+ if operation_model:
request.stream_output = any([
operation_model.has_streaming_output,
operation_model.has_event_stream_output
@@ -111,58 +111,58 @@ class Endpoint(object):
service_id = operation_model.service_model.service_id.hyphenize()
event_name = 'request-created.{service_id}.{op_name}'.format(
service_id=service_id,
- op_name=operation_model.name)
- self._event_emitter.emit(event_name, request=request,
- operation_name=operation_model.name)
- prepared_request = self.prepare_request(request)
- return prepared_request
-
- def _encode_headers(self, headers):
- # In place encoding of headers to utf-8 if they are unicode.
- for key, value in headers.items():
- if isinstance(value, six.text_type):
- headers[key] = value.encode('utf-8')
-
- def prepare_request(self, request):
- self._encode_headers(request.headers)
- return request.prepare()
-
- def _send_request(self, request_dict, operation_model):
- attempts = 1
- request = self.create_request(request_dict, operation_model)
+ op_name=operation_model.name)
+ self._event_emitter.emit(event_name, request=request,
+ operation_name=operation_model.name)
+ prepared_request = self.prepare_request(request)
+ return prepared_request
+
+ def _encode_headers(self, headers):
+ # In place encoding of headers to utf-8 if they are unicode.
+ for key, value in headers.items():
+ if isinstance(value, six.text_type):
+ headers[key] = value.encode('utf-8')
+
+ def prepare_request(self, request):
+ self._encode_headers(request.headers)
+ return request.prepare()
+
+ def _send_request(self, request_dict, operation_model):
+ attempts = 1
+ request = self.create_request(request_dict, operation_model)
context = request_dict['context']
- success_response, exception = self._get_response(
+ success_response, exception = self._get_response(
request, operation_model, context)
- while self._needs_retry(attempts, operation_model, request_dict,
- success_response, exception):
- attempts += 1
- # If there is a stream associated with the request, we need
- # to reset it before attempting to send the request again.
- # This will ensure that we resend the entire contents of the
- # body.
- request.reset_stream()
- # Create a new request when retried (including a new signature).
- request = self.create_request(
- request_dict, operation_model)
- success_response, exception = self._get_response(
+ while self._needs_retry(attempts, operation_model, request_dict,
+ success_response, exception):
+ attempts += 1
+ # If there is a stream associated with the request, we need
+ # to reset it before attempting to send the request again.
+ # This will ensure that we resend the entire contents of the
+ # body.
+ request.reset_stream()
+ # Create a new request when retried (including a new signature).
+ request = self.create_request(
+ request_dict, operation_model)
+ success_response, exception = self._get_response(
request, operation_model, context)
- if success_response is not None and \
- 'ResponseMetadata' in success_response[1]:
- # We want to share num retries, not num attempts.
- total_retries = attempts - 1
- success_response[1]['ResponseMetadata']['RetryAttempts'] = \
- total_retries
- if exception is not None:
- raise exception
- else:
- return success_response
-
+ if success_response is not None and \
+ 'ResponseMetadata' in success_response[1]:
+ # We want to share num retries, not num attempts.
+ total_retries = attempts - 1
+ success_response[1]['ResponseMetadata']['RetryAttempts'] = \
+ total_retries
+ if exception is not None:
+ raise exception
+ else:
+ return success_response
+
def _get_response(self, request, operation_model, context):
- # This will return a tuple of (success_response, exception)
- # and success_response is itself a tuple of
- # (http_response, parsed_dict).
- # If an exception occurs then the success_response is None.
- # If no exception occurs then exception is None.
+ # This will return a tuple of (success_response, exception)
+ # and success_response is itself a tuple of
+ # (http_response, parsed_dict).
+ # If an exception occurs then the success_response is None.
+ # If no exception occurs then exception is None.
success_response, exception = self._do_get_response(
request, operation_model)
kwargs_to_emit = {
@@ -183,15 +183,15 @@ class Endpoint(object):
return success_response, exception
def _do_get_response(self, request, operation_model):
- try:
- logger.debug("Sending http request: %s", request)
- history_recorder.record('HTTP_REQUEST', {
- 'method': request.method,
- 'headers': request.headers,
- 'streaming': operation_model.has_streaming_input,
- 'url': request.url,
- 'body': request.body
- })
+ try:
+ logger.debug("Sending http request: %s", request)
+ history_recorder.record('HTTP_REQUEST', {
+ 'method': request.method,
+ 'headers': request.headers,
+ 'streaming': operation_model.has_streaming_input,
+ 'url': request.url,
+ 'body': request.body
+ })
service_id = operation_model.service_model.service_id.hyphenize()
event_name = 'before-send.%s.%s' % (service_id, operation_model.name)
responses = self._event_emitter.emit(event_name, request=request)
@@ -200,22 +200,22 @@ class Endpoint(object):
http_response = self._send(request)
except HTTPClientError as e:
return (None, e)
- except Exception as e:
- logger.debug("Exception received when sending HTTP request.",
- exc_info=True)
- return (None, e)
- # This returns the http_response and the parsed_data.
+ except Exception as e:
+ logger.debug("Exception received when sending HTTP request.",
+ exc_info=True)
+ return (None, e)
+ # This returns the http_response and the parsed_data.
response_dict = convert_to_response_dict(http_response, operation_model)
-
- http_response_record_dict = response_dict.copy()
- http_response_record_dict['streaming'] = \
- operation_model.has_streaming_output
- history_recorder.record('HTTP_RESPONSE', http_response_record_dict)
-
+
+ http_response_record_dict = response_dict.copy()
+ http_response_record_dict['streaming'] = \
+ operation_model.has_streaming_output
+ history_recorder.record('HTTP_RESPONSE', http_response_record_dict)
+
protocol = operation_model.metadata['protocol']
parser = self._response_parser_factory.create_parser(protocol)
- parsed_response = parser.parse(
- response_dict, operation_model.output_shape)
+ parsed_response = parser.parse(
+ response_dict, operation_model.output_shape)
# Do a second parsing pass to pick up on any modeled error fields
# NOTE: Ideally, we would push this down into the parser classes but
# they currently have no reference to the operation or service model
@@ -226,9 +226,9 @@ class Endpoint(object):
response_dict, parsed_response,
operation_model, parser,
)
- history_recorder.record('PARSED_RESPONSE', parsed_response)
- return (http_response, parsed_response), None
-
+ history_recorder.record('PARSED_RESPONSE', parsed_response)
+ return (http_response, parsed_response), None
+
def _add_modeled_error_fields(
self, response_dict, parsed_response,
operation_model, parser,
@@ -244,62 +244,62 @@ class Endpoint(object):
# TODO: avoid naming conflicts with ResponseMetadata and Error
parsed_response.update(modeled_parse)
- def _needs_retry(self, attempts, operation_model, request_dict,
- response=None, caught_exception=None):
+ def _needs_retry(self, attempts, operation_model, request_dict,
+ response=None, caught_exception=None):
service_id = operation_model.service_model.service_id.hyphenize()
event_name = 'needs-retry.%s.%s' % (
service_id,
operation_model.name)
- responses = self._event_emitter.emit(
- event_name, response=response, endpoint=self,
- operation=operation_model, attempts=attempts,
- caught_exception=caught_exception, request_dict=request_dict)
- handler_response = first_non_none_response(responses)
- if handler_response is None:
- return False
- else:
- # Request needs to be retried, and we need to sleep
- # for the specified number of times.
- logger.debug("Response received to retry, sleeping for "
- "%s seconds", handler_response)
- time.sleep(handler_response)
- return True
-
+ responses = self._event_emitter.emit(
+ event_name, response=response, endpoint=self,
+ operation=operation_model, attempts=attempts,
+ caught_exception=caught_exception, request_dict=request_dict)
+ handler_response = first_non_none_response(responses)
+ if handler_response is None:
+ return False
+ else:
+ # Request needs to be retried, and we need to sleep
+ # for the specified number of times.
+ logger.debug("Response received to retry, sleeping for "
+ "%s seconds", handler_response)
+ time.sleep(handler_response)
+ return True
+
def _send(self, request):
return self.http_session.send(request)
-
-
-class EndpointCreator(object):
- def __init__(self, event_emitter):
- self._event_emitter = event_emitter
-
- def create_endpoint(self, service_model, region_name, endpoint_url,
- verify=None, response_parser_factory=None,
- timeout=DEFAULT_TIMEOUT,
- max_pool_connections=MAX_POOL_CONNECTIONS,
+
+
+class EndpointCreator(object):
+ def __init__(self, event_emitter):
+ self._event_emitter = event_emitter
+
+ def create_endpoint(self, service_model, region_name, endpoint_url,
+ verify=None, response_parser_factory=None,
+ timeout=DEFAULT_TIMEOUT,
+ max_pool_connections=MAX_POOL_CONNECTIONS,
http_session_cls=URLLib3Session,
proxies=None,
socket_options=None,
client_cert=None,
proxies_config=None):
- if not is_valid_endpoint_url(endpoint_url):
-
- raise ValueError("Invalid endpoint: %s" % endpoint_url)
- if proxies is None:
- proxies = self._get_proxies(endpoint_url)
+ if not is_valid_endpoint_url(endpoint_url):
+
+ raise ValueError("Invalid endpoint: %s" % endpoint_url)
+ if proxies is None:
+ proxies = self._get_proxies(endpoint_url)
endpoint_prefix = service_model.endpoint_prefix
logger.debug('Setting %s timeout as %s', endpoint_prefix, timeout)
http_session = http_session_cls(
timeout=timeout,
- proxies=proxies,
- verify=self._get_verify_value(verify),
- max_pool_connections=max_pool_connections,
+ proxies=proxies,
+ verify=self._get_verify_value(verify),
+ max_pool_connections=max_pool_connections,
socket_options=socket_options,
client_cert=client_cert,
proxies_config=proxies_config
)
-
+
return Endpoint(
endpoint_url,
endpoint_prefix=endpoint_prefix,
@@ -308,20 +308,20 @@ class EndpointCreator(object):
http_session=http_session
)
- def _get_proxies(self, url):
- # We could also support getting proxies from a config file,
- # but for now proxy support is taken from the environment.
- return get_environ_proxies(url)
-
- def _get_verify_value(self, verify):
- # This is to account for:
- # https://github.com/kennethreitz/requests/issues/1436
- # where we need to honor REQUESTS_CA_BUNDLE because we're creating our
- # own request objects.
- # First, if verify is not None, then the user explicitly specified
- # a value so this automatically wins.
- if verify is not None:
- return verify
- # Otherwise use the value from REQUESTS_CA_BUNDLE, or default to
- # True if the env var does not exist.
- return os.environ.get('REQUESTS_CA_BUNDLE', True)
+ def _get_proxies(self, url):
+ # We could also support getting proxies from a config file,
+ # but for now proxy support is taken from the environment.
+ return get_environ_proxies(url)
+
+ def _get_verify_value(self, verify):
+ # This is to account for:
+ # https://github.com/kennethreitz/requests/issues/1436
+ # where we need to honor REQUESTS_CA_BUNDLE because we're creating our
+ # own request objects.
+ # First, if verify is not None, then the user explicitly specified
+ # a value so this automatically wins.
+ if verify is not None:
+ return verify
+ # Otherwise use the value from REQUESTS_CA_BUNDLE, or default to
+ # True if the env var does not exist.
+ return os.environ.get('REQUESTS_CA_BUNDLE', True)
diff --git a/contrib/python/botocore/botocore/errorfactory.py b/contrib/python/botocore/botocore/errorfactory.py
index bf314a37dc..b192a66ded 100644
--- a/contrib/python/botocore/botocore/errorfactory.py
+++ b/contrib/python/botocore/botocore/errorfactory.py
@@ -1,88 +1,88 @@
-# Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-from botocore.exceptions import ClientError
-from botocore.utils import get_service_module_name
-
-
-class BaseClientExceptions(object):
- ClientError = ClientError
-
- def __init__(self, code_to_exception):
- """Base class for exceptions object on a client
-
- :type code_to_exception: dict
- :param code_to_exception: Mapping of error codes (strings) to exception
- class that should be raised when encountering a particular
- error code.
- """
- self._code_to_exception = code_to_exception
-
- def from_code(self, error_code):
- """Retrieves the error class based on the error code
-
- This is helpful for identifying the exception class needing to be
- caught based on the ClientError.parsed_reponse['Error']['Code'] value
-
- :type error_code: string
- :param error_code: The error code associated to a ClientError exception
-
- :rtype: ClientError or a subclass of ClientError
- :returns: The appropriate modeled exception class for that error
- code. If the error code does not match any of the known
- modeled exceptions then return a generic ClientError.
- """
- return self._code_to_exception.get(error_code, self.ClientError)
-
- def __getattr__(self, name):
- exception_cls_names = [
- exception_cls.__name__ for exception_cls
- in self._code_to_exception.values()
- ]
- raise AttributeError(
- '%r object has no attribute %r. Valid exceptions are: %s' % (
- self, name, ', '.join(exception_cls_names)))
-
-
-class ClientExceptionsFactory(object):
- def __init__(self):
- self._client_exceptions_cache = {}
-
- def create_client_exceptions(self, service_model):
- """Creates a ClientExceptions object for the particular service client
-
- :type service_model: botocore.model.ServiceModel
- :param service_model: The service model for the client
-
- :rtype: object that subclasses from BaseClientExceptions
- :returns: The exceptions object of a client that can be used
- to grab the various different modeled exceptions.
- """
- service_name = service_model.service_name
- if service_name not in self._client_exceptions_cache:
- client_exceptions = self._create_client_exceptions(service_model)
- self._client_exceptions_cache[service_name] = client_exceptions
- return self._client_exceptions_cache[service_name]
-
- def _create_client_exceptions(self, service_model):
- cls_props = {}
- code_to_exception = {}
+# Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+from botocore.exceptions import ClientError
+from botocore.utils import get_service_module_name
+
+
+class BaseClientExceptions(object):
+ ClientError = ClientError
+
+ def __init__(self, code_to_exception):
+ """Base class for exceptions object on a client
+
+ :type code_to_exception: dict
+ :param code_to_exception: Mapping of error codes (strings) to exception
+ class that should be raised when encountering a particular
+ error code.
+ """
+ self._code_to_exception = code_to_exception
+
+ def from_code(self, error_code):
+ """Retrieves the error class based on the error code
+
+ This is helpful for identifying the exception class needing to be
+ caught based on the ClientError.parsed_reponse['Error']['Code'] value
+
+ :type error_code: string
+ :param error_code: The error code associated to a ClientError exception
+
+ :rtype: ClientError or a subclass of ClientError
+ :returns: The appropriate modeled exception class for that error
+ code. If the error code does not match any of the known
+ modeled exceptions then return a generic ClientError.
+ """
+ return self._code_to_exception.get(error_code, self.ClientError)
+
+ def __getattr__(self, name):
+ exception_cls_names = [
+ exception_cls.__name__ for exception_cls
+ in self._code_to_exception.values()
+ ]
+ raise AttributeError(
+ '%r object has no attribute %r. Valid exceptions are: %s' % (
+ self, name, ', '.join(exception_cls_names)))
+
+
+class ClientExceptionsFactory(object):
+ def __init__(self):
+ self._client_exceptions_cache = {}
+
+ def create_client_exceptions(self, service_model):
+ """Creates a ClientExceptions object for the particular service client
+
+ :type service_model: botocore.model.ServiceModel
+ :param service_model: The service model for the client
+
+ :rtype: object that subclasses from BaseClientExceptions
+ :returns: The exceptions object of a client that can be used
+ to grab the various different modeled exceptions.
+ """
+ service_name = service_model.service_name
+ if service_name not in self._client_exceptions_cache:
+ client_exceptions = self._create_client_exceptions(service_model)
+ self._client_exceptions_cache[service_name] = client_exceptions
+ return self._client_exceptions_cache[service_name]
+
+ def _create_client_exceptions(self, service_model):
+ cls_props = {}
+ code_to_exception = {}
for error_shape in service_model.error_shapes:
exception_name = str(error_shape.name)
exception_cls = type(exception_name, (ClientError,), {})
cls_props[exception_name] = exception_cls
code = str(error_shape.error_code)
code_to_exception[code] = exception_cls
- cls_name = str(get_service_module_name(service_model) + 'Exceptions')
- client_exceptions_cls = type(
- cls_name, (BaseClientExceptions,), cls_props)
- return client_exceptions_cls(code_to_exception)
+ cls_name = str(get_service_module_name(service_model) + 'Exceptions')
+ client_exceptions_cls = type(
+ cls_name, (BaseClientExceptions,), cls_props)
+ return client_exceptions_cls(code_to_exception)
diff --git a/contrib/python/botocore/botocore/exceptions.py b/contrib/python/botocore/botocore/exceptions.py
index 40a8494c25..1a42a4bf1e 100644
--- a/contrib/python/botocore/botocore/exceptions.py
+++ b/contrib/python/botocore/botocore/exceptions.py
@@ -1,21 +1,21 @@
-# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/
-# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-from __future__ import unicode_literals
+# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/
+# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+from __future__ import unicode_literals
from botocore.vendored import requests
from botocore.vendored.requests.packages import urllib3
-
-
+
+
def _exception_from_packed_args(exception_cls, args=None, kwargs=None):
# This is helpful for reducing Exceptions that only accept kwargs as
# only positional arguments can be provided for __reduce__
@@ -28,65 +28,65 @@ def _exception_from_packed_args(exception_cls, args=None, kwargs=None):
return exception_cls(*args, **kwargs)
-class BotoCoreError(Exception):
- """
- The base exception class for BotoCore exceptions.
-
- :ivar msg: The descriptive message associated with the error.
- """
- fmt = 'An unspecified error occurred'
-
- def __init__(self, **kwargs):
- msg = self.fmt.format(**kwargs)
- Exception.__init__(self, msg)
- self.kwargs = kwargs
-
+class BotoCoreError(Exception):
+ """
+ The base exception class for BotoCore exceptions.
+
+ :ivar msg: The descriptive message associated with the error.
+ """
+ fmt = 'An unspecified error occurred'
+
+ def __init__(self, **kwargs):
+ msg = self.fmt.format(**kwargs)
+ Exception.__init__(self, msg)
+ self.kwargs = kwargs
+
def __reduce__(self):
return _exception_from_packed_args, (self.__class__, None, self.kwargs)
-
-class DataNotFoundError(BotoCoreError):
- """
- The data associated with a particular path could not be loaded.
-
+
+class DataNotFoundError(BotoCoreError):
+ """
+ The data associated with a particular path could not be loaded.
+
:ivar data_path: The data path that the user attempted to load.
- """
- fmt = 'Unable to load data for: {data_path}'
-
-
-class UnknownServiceError(DataNotFoundError):
- """Raised when trying to load data for an unknown service.
-
- :ivar service_name: The name of the unknown service.
-
- """
- fmt = (
- "Unknown service: '{service_name}'. Valid service names are: "
- "{known_service_names}")
-
-
-class ApiVersionNotFoundError(BotoCoreError):
- """
+ """
+ fmt = 'Unable to load data for: {data_path}'
+
+
+class UnknownServiceError(DataNotFoundError):
+ """Raised when trying to load data for an unknown service.
+
+ :ivar service_name: The name of the unknown service.
+
+ """
+ fmt = (
+ "Unknown service: '{service_name}'. Valid service names are: "
+ "{known_service_names}")
+
+
+class ApiVersionNotFoundError(BotoCoreError):
+ """
The data associated with either the API version or a compatible one
- could not be loaded.
-
+ could not be loaded.
+
:ivar data_path: The data path that the user attempted to load.
:ivar api_version: The API version that the user attempted to load.
- """
- fmt = 'Unable to load data {data_path} for: {api_version}'
-
-
+ """
+ fmt = 'Unable to load data {data_path} for: {api_version}'
+
+
class HTTPClientError(BotoCoreError):
fmt = 'An HTTP Client raised an unhandled exception: {error}'
def __init__(self, request=None, response=None, **kwargs):
self.request = request
self.response = response
super(HTTPClientError, self).__init__(**kwargs)
-
+
def __reduce__(self):
return _exception_from_packed_args, (
self.__class__, (self.request, self.response), self.kwargs)
-
+
class ConnectionError(BotoCoreError):
fmt = 'An HTTP Client failed to establish a connection: {error}'
@@ -105,15 +105,15 @@ class SSLError(ConnectionError, requests.exceptions.SSLError):
class ConnectionClosedError(HTTPClientError):
- fmt = (
- 'Connection was closed before we received a valid response '
- 'from endpoint URL: "{endpoint_url}".')
-
-
+ fmt = (
+ 'Connection was closed before we received a valid response '
+ 'from endpoint URL: "{endpoint_url}".')
+
+
class ReadTimeoutError(HTTPClientError, requests.exceptions.ReadTimeout,
urllib3.exceptions.ReadTimeoutError):
fmt = 'Read timeout on endpoint URL: "{endpoint_url}"'
-
+
class ConnectTimeoutError(ConnectionError, requests.exceptions.ConnectTimeout):
fmt = 'Connect timeout on endpoint URL: "{endpoint_url}"'
@@ -123,81 +123,81 @@ class ProxyConnectionError(ConnectionError, requests.exceptions.ProxyError):
fmt = 'Failed to connect to proxy URL: "{proxy_url}"'
-class NoCredentialsError(BotoCoreError):
- """
+class NoCredentialsError(BotoCoreError):
+ """
No credentials could be found.
- """
- fmt = 'Unable to locate credentials'
-
-
-class PartialCredentialsError(BotoCoreError):
- """
- Only partial credentials were found.
-
- :ivar cred_var: The missing credential variable name.
-
- """
- fmt = 'Partial credentials found in {provider}, missing: {cred_var}'
-
-
-class CredentialRetrievalError(BotoCoreError):
- """
- Error attempting to retrieve credentials from a remote source.
-
- :ivar provider: The name of the credential provider.
+ """
+ fmt = 'Unable to locate credentials'
+
+
+class PartialCredentialsError(BotoCoreError):
+ """
+ Only partial credentials were found.
+
+ :ivar cred_var: The missing credential variable name.
+
+ """
+ fmt = 'Partial credentials found in {provider}, missing: {cred_var}'
+
+
+class CredentialRetrievalError(BotoCoreError):
+ """
+ Error attempting to retrieve credentials from a remote source.
+
+ :ivar provider: The name of the credential provider.
:ivar error_msg: The msg explaining why credentials could not be
- retrieved.
-
- """
- fmt = 'Error when retrieving credentials from {provider}: {error_msg}'
-
-
-class UnknownSignatureVersionError(BotoCoreError):
- """
- Requested Signature Version is not known.
-
- :ivar signature_version: The name of the requested signature version.
- """
- fmt = 'Unknown Signature Version: {signature_version}.'
-
-
-class ServiceNotInRegionError(BotoCoreError):
- """
- The service is not available in requested region.
-
- :ivar service_name: The name of the service.
- :ivar region_name: The name of the region.
- """
- fmt = 'Service {service_name} not available in region {region_name}'
-
-
-class BaseEndpointResolverError(BotoCoreError):
- """Base error for endpoint resolving errors.
-
- Should never be raised directly, but clients can catch
- this exception if they want to generically handle any errors
- during the endpoint resolution process.
-
- """
-
-
-class NoRegionError(BaseEndpointResolverError):
- """No region was specified."""
- fmt = 'You must specify a region.'
-
-
-class UnknownEndpointError(BaseEndpointResolverError, ValueError):
- """
- Could not construct an endpoint.
-
- :ivar service_name: The name of the service.
- :ivar region_name: The name of the region.
- """
- fmt = (
- 'Unable to construct an endpoint for '
- '{service_name} in region {region_name}')
-
-
+ retrieved.
+
+ """
+ fmt = 'Error when retrieving credentials from {provider}: {error_msg}'
+
+
+class UnknownSignatureVersionError(BotoCoreError):
+ """
+ Requested Signature Version is not known.
+
+ :ivar signature_version: The name of the requested signature version.
+ """
+ fmt = 'Unknown Signature Version: {signature_version}.'
+
+
+class ServiceNotInRegionError(BotoCoreError):
+ """
+ The service is not available in requested region.
+
+ :ivar service_name: The name of the service.
+ :ivar region_name: The name of the region.
+ """
+ fmt = 'Service {service_name} not available in region {region_name}'
+
+
+class BaseEndpointResolverError(BotoCoreError):
+ """Base error for endpoint resolving errors.
+
+ Should never be raised directly, but clients can catch
+ this exception if they want to generically handle any errors
+ during the endpoint resolution process.
+
+ """
+
+
+class NoRegionError(BaseEndpointResolverError):
+ """No region was specified."""
+ fmt = 'You must specify a region.'
+
+
+class UnknownEndpointError(BaseEndpointResolverError, ValueError):
+ """
+ Could not construct an endpoint.
+
+ :ivar service_name: The name of the service.
+ :ivar region_name: The name of the region.
+ """
+ fmt = (
+ 'Unable to construct an endpoint for '
+ '{service_name} in region {region_name}')
+
+
class UnknownFIPSEndpointError(BaseEndpointResolverError):
"""
Could not construct a FIPS endpoint.
@@ -212,110 +212,110 @@ class UnknownFIPSEndpointError(BaseEndpointResolverError):
)
-class ProfileNotFound(BotoCoreError):
- """
- The specified configuration profile was not found in the
- configuration file.
-
- :ivar profile: The name of the profile the user attempted to load.
- """
- fmt = 'The config profile ({profile}) could not be found'
-
-
-class ConfigParseError(BotoCoreError):
- """
- The configuration file could not be parsed.
-
- :ivar path: The path to the configuration file.
- """
- fmt = 'Unable to parse config file: {path}'
-
-
-class ConfigNotFound(BotoCoreError):
- """
- The specified configuration file could not be found.
-
- :ivar path: The path to the configuration file.
- """
- fmt = 'The specified config file ({path}) could not be found.'
-
-
-class MissingParametersError(BotoCoreError):
- """
- One or more required parameters were not supplied.
-
- :ivar object: The object that has missing parameters.
- This can be an operation or a parameter (in the
- case of inner params). The str() of this object
- will be used so it doesn't need to implement anything
- other than str().
- :ivar missing: The names of the missing parameters.
- """
- fmt = ('The following required parameters are missing for '
- '{object_name}: {missing}')
-
-
-class ValidationError(BotoCoreError):
- """
- An exception occurred validating parameters.
-
- Subclasses must accept a ``value`` and ``param``
- argument in their ``__init__``.
-
- :ivar value: The value that was being validated.
- :ivar param: The parameter that failed validation.
- :ivar type_name: The name of the underlying type.
- """
- fmt = ("Invalid value ('{value}') for param {param} "
- "of type {type_name} ")
-
-
-class ParamValidationError(BotoCoreError):
- fmt = 'Parameter validation failed:\n{report}'
-
-
-# These exceptions subclass from ValidationError so that code
-# can just 'except ValidationError' to catch any possibly validation
-# error.
-class UnknownKeyError(ValidationError):
- """
+class ProfileNotFound(BotoCoreError):
+ """
+ The specified configuration profile was not found in the
+ configuration file.
+
+ :ivar profile: The name of the profile the user attempted to load.
+ """
+ fmt = 'The config profile ({profile}) could not be found'
+
+
+class ConfigParseError(BotoCoreError):
+ """
+ The configuration file could not be parsed.
+
+ :ivar path: The path to the configuration file.
+ """
+ fmt = 'Unable to parse config file: {path}'
+
+
+class ConfigNotFound(BotoCoreError):
+ """
+ The specified configuration file could not be found.
+
+ :ivar path: The path to the configuration file.
+ """
+ fmt = 'The specified config file ({path}) could not be found.'
+
+
+class MissingParametersError(BotoCoreError):
+ """
+ One or more required parameters were not supplied.
+
+ :ivar object: The object that has missing parameters.
+ This can be an operation or a parameter (in the
+ case of inner params). The str() of this object
+ will be used so it doesn't need to implement anything
+ other than str().
+ :ivar missing: The names of the missing parameters.
+ """
+ fmt = ('The following required parameters are missing for '
+ '{object_name}: {missing}')
+
+
+class ValidationError(BotoCoreError):
+ """
+ An exception occurred validating parameters.
+
+ Subclasses must accept a ``value`` and ``param``
+ argument in their ``__init__``.
+
+ :ivar value: The value that was being validated.
+ :ivar param: The parameter that failed validation.
+ :ivar type_name: The name of the underlying type.
+ """
+ fmt = ("Invalid value ('{value}') for param {param} "
+ "of type {type_name} ")
+
+
+class ParamValidationError(BotoCoreError):
+ fmt = 'Parameter validation failed:\n{report}'
+
+
+# These exceptions subclass from ValidationError so that code
+# can just 'except ValidationError' to catch any possibly validation
+# error.
+class UnknownKeyError(ValidationError):
+ """
Unknown key in a struct parameter.
-
- :ivar value: The value that was being checked.
- :ivar param: The name of the parameter.
- :ivar choices: The valid choices the value can be.
- """
- fmt = ("Unknown key '{value}' for param '{param}'. Must be one "
- "of: {choices}")
-
-
-class RangeError(ValidationError):
- """
- A parameter value was out of the valid range.
-
- :ivar value: The value that was being checked.
- :ivar param: The parameter that failed validation.
- :ivar min_value: The specified minimum value.
- :ivar max_value: The specified maximum value.
- """
- fmt = ('Value out of range for param {param}: '
- '{min_value} <= {value} <= {max_value}')
-
-
-class UnknownParameterError(ValidationError):
- """
- Unknown top level parameter.
-
- :ivar name: The name of the unknown parameter.
- :ivar operation: The name of the operation.
- :ivar choices: The valid choices the parameter name can be.
- """
- fmt = (
- "Unknown parameter '{name}' for operation {operation}. Must be one "
- "of: {choices}"
- )
-
-
+
+ :ivar value: The value that was being checked.
+ :ivar param: The name of the parameter.
+ :ivar choices: The valid choices the value can be.
+ """
+ fmt = ("Unknown key '{value}' for param '{param}'. Must be one "
+ "of: {choices}")
+
+
+class RangeError(ValidationError):
+ """
+ A parameter value was out of the valid range.
+
+ :ivar value: The value that was being checked.
+ :ivar param: The parameter that failed validation.
+ :ivar min_value: The specified minimum value.
+ :ivar max_value: The specified maximum value.
+ """
+ fmt = ('Value out of range for param {param}: '
+ '{min_value} <= {value} <= {max_value}')
+
+
+class UnknownParameterError(ValidationError):
+ """
+ Unknown top level parameter.
+
+ :ivar name: The name of the unknown parameter.
+ :ivar operation: The name of the operation.
+ :ivar choices: The valid choices the parameter name can be.
+ """
+ fmt = (
+ "Unknown parameter '{name}' for operation {operation}. Must be one "
+ "of: {choices}"
+ )
+
+
class InvalidRegionError(ValidationError, ValueError):
"""
Invalid region_name provided to client or resource.
@@ -327,161 +327,161 @@ class InvalidRegionError(ValidationError, ValueError):
)
-class AliasConflictParameterError(ValidationError):
- """
- Error when an alias is provided for a parameter as well as the original.
-
- :ivar original: The name of the original parameter.
- :ivar alias: The name of the alias
- :ivar operation: The name of the operation.
- """
- fmt = (
- "Parameter '{original}' and its alias '{alias}' were provided "
- "for operation {operation}. Only one of them may be used."
- )
-
-
-class UnknownServiceStyle(BotoCoreError):
- """
- Unknown style of service invocation.
-
- :ivar service_style: The style requested.
- """
- fmt = 'The service style ({service_style}) is not understood.'
-
-
-class PaginationError(BotoCoreError):
- fmt = 'Error during pagination: {message}'
-
-
-class OperationNotPageableError(BotoCoreError):
- fmt = 'Operation cannot be paginated: {operation_name}'
-
-
-class ChecksumError(BotoCoreError):
- """The expected checksum did not match the calculated checksum.
-
- """
- fmt = ('Checksum {checksum_type} failed, expected checksum '
- '{expected_checksum} did not match calculated checksum '
- '{actual_checksum}.')
-
-
-class UnseekableStreamError(BotoCoreError):
- """Need to seek a stream, but stream does not support seeking.
-
- """
- fmt = ('Need to rewind the stream {stream_object}, but stream '
- 'is not seekable.')
-
-
-class WaiterError(BotoCoreError):
- """Waiter failed to reach desired state."""
- fmt = 'Waiter {name} failed: {reason}'
-
- def __init__(self, name, reason, last_response):
- super(WaiterError, self).__init__(name=name, reason=reason)
- self.last_response = last_response
-
-
-class IncompleteReadError(BotoCoreError):
- """HTTP response did not return expected number of bytes."""
- fmt = ('{actual_bytes} read, but total bytes '
- 'expected is {expected_bytes}.')
-
-
-class InvalidExpressionError(BotoCoreError):
- """Expression is either invalid or too complex."""
- fmt = 'Invalid expression {expression}: Only dotted lookups are supported.'
-
-
-class UnknownCredentialError(BotoCoreError):
- """Tried to insert before/after an unregistered credential type."""
- fmt = 'Credential named {name} not found.'
-
-
-class WaiterConfigError(BotoCoreError):
- """Error when processing waiter configuration."""
- fmt = 'Error processing waiter config: {error_msg}'
-
-
-class UnknownClientMethodError(BotoCoreError):
- """Error when trying to access a method on a client that does not exist."""
- fmt = 'Client does not have method: {method_name}'
-
-
-class UnsupportedSignatureVersionError(BotoCoreError):
+class AliasConflictParameterError(ValidationError):
+ """
+ Error when an alias is provided for a parameter as well as the original.
+
+ :ivar original: The name of the original parameter.
+ :ivar alias: The name of the alias
+ :ivar operation: The name of the operation.
+ """
+ fmt = (
+ "Parameter '{original}' and its alias '{alias}' were provided "
+ "for operation {operation}. Only one of them may be used."
+ )
+
+
+class UnknownServiceStyle(BotoCoreError):
+ """
+ Unknown style of service invocation.
+
+ :ivar service_style: The style requested.
+ """
+ fmt = 'The service style ({service_style}) is not understood.'
+
+
+class PaginationError(BotoCoreError):
+ fmt = 'Error during pagination: {message}'
+
+
+class OperationNotPageableError(BotoCoreError):
+ fmt = 'Operation cannot be paginated: {operation_name}'
+
+
+class ChecksumError(BotoCoreError):
+ """The expected checksum did not match the calculated checksum.
+
+ """
+ fmt = ('Checksum {checksum_type} failed, expected checksum '
+ '{expected_checksum} did not match calculated checksum '
+ '{actual_checksum}.')
+
+
+class UnseekableStreamError(BotoCoreError):
+ """Need to seek a stream, but stream does not support seeking.
+
+ """
+ fmt = ('Need to rewind the stream {stream_object}, but stream '
+ 'is not seekable.')
+
+
+class WaiterError(BotoCoreError):
+ """Waiter failed to reach desired state."""
+ fmt = 'Waiter {name} failed: {reason}'
+
+ def __init__(self, name, reason, last_response):
+ super(WaiterError, self).__init__(name=name, reason=reason)
+ self.last_response = last_response
+
+
+class IncompleteReadError(BotoCoreError):
+ """HTTP response did not return expected number of bytes."""
+ fmt = ('{actual_bytes} read, but total bytes '
+ 'expected is {expected_bytes}.')
+
+
+class InvalidExpressionError(BotoCoreError):
+ """Expression is either invalid or too complex."""
+ fmt = 'Invalid expression {expression}: Only dotted lookups are supported.'
+
+
+class UnknownCredentialError(BotoCoreError):
+ """Tried to insert before/after an unregistered credential type."""
+ fmt = 'Credential named {name} not found.'
+
+
+class WaiterConfigError(BotoCoreError):
+ """Error when processing waiter configuration."""
+ fmt = 'Error processing waiter config: {error_msg}'
+
+
+class UnknownClientMethodError(BotoCoreError):
+ """Error when trying to access a method on a client that does not exist."""
+ fmt = 'Client does not have method: {method_name}'
+
+
+class UnsupportedSignatureVersionError(BotoCoreError):
"""Error when trying to use an unsupported Signature Version."""
- fmt = 'Signature version is not supported: {signature_version}'
-
-
-class ClientError(Exception):
- MSG_TEMPLATE = (
- 'An error occurred ({error_code}) when calling the {operation_name} '
- 'operation{retry_info}: {error_message}')
-
- def __init__(self, error_response, operation_name):
- retry_info = self._get_retry_info(error_response)
- error = error_response.get('Error', {})
- msg = self.MSG_TEMPLATE.format(
- error_code=error.get('Code', 'Unknown'),
- error_message=error.get('Message', 'Unknown'),
- operation_name=operation_name,
- retry_info=retry_info,
- )
- super(ClientError, self).__init__(msg)
- self.response = error_response
- self.operation_name = operation_name
-
- def _get_retry_info(self, response):
- retry_info = ''
- if 'ResponseMetadata' in response:
- metadata = response['ResponseMetadata']
- if metadata.get('MaxAttemptsReached', False):
- if 'RetryAttempts' in metadata:
- retry_info = (' (reached max retries: %s)' %
- metadata['RetryAttempts'])
- return retry_info
-
+ fmt = 'Signature version is not supported: {signature_version}'
+
+
+class ClientError(Exception):
+ MSG_TEMPLATE = (
+ 'An error occurred ({error_code}) when calling the {operation_name} '
+ 'operation{retry_info}: {error_message}')
+
+ def __init__(self, error_response, operation_name):
+ retry_info = self._get_retry_info(error_response)
+ error = error_response.get('Error', {})
+ msg = self.MSG_TEMPLATE.format(
+ error_code=error.get('Code', 'Unknown'),
+ error_message=error.get('Message', 'Unknown'),
+ operation_name=operation_name,
+ retry_info=retry_info,
+ )
+ super(ClientError, self).__init__(msg)
+ self.response = error_response
+ self.operation_name = operation_name
+
+ def _get_retry_info(self, response):
+ retry_info = ''
+ if 'ResponseMetadata' in response:
+ metadata = response['ResponseMetadata']
+ if metadata.get('MaxAttemptsReached', False):
+ if 'RetryAttempts' in metadata:
+ retry_info = (' (reached max retries: %s)' %
+ metadata['RetryAttempts'])
+ return retry_info
+
def __reduce__(self):
# Subclasses of ClientError's are dynamically generated and
# cannot be pickled unless they are attributes of a
# module. So at the very least return a ClientError back.
return ClientError, (self.response, self.operation_name)
-
+
class EventStreamError(ClientError):
pass
-class UnsupportedTLSVersionWarning(Warning):
- """Warn when an openssl version that uses TLS 1.2 is required"""
- pass
-
-
-class ImminentRemovalWarning(Warning):
- pass
-
-
-class InvalidDNSNameError(BotoCoreError):
- """Error when virtual host path is forced on a non-DNS compatible bucket"""
- fmt = (
- 'Bucket named {bucket_name} is not DNS compatible. Virtual '
- 'hosted-style addressing cannot be used. The addressing style '
- 'can be configured by removing the addressing_style value '
- 'or setting that value to \'path\' or \'auto\' in the AWS Config '
- 'file or in the botocore.client.Config object.'
- )
-
-
-class InvalidS3AddressingStyleError(BotoCoreError):
- """Error when an invalid path style is specified"""
- fmt = (
+class UnsupportedTLSVersionWarning(Warning):
+ """Warn when an openssl version that uses TLS 1.2 is required"""
+ pass
+
+
+class ImminentRemovalWarning(Warning):
+ pass
+
+
+class InvalidDNSNameError(BotoCoreError):
+ """Error when virtual host path is forced on a non-DNS compatible bucket"""
+ fmt = (
+ 'Bucket named {bucket_name} is not DNS compatible. Virtual '
+ 'hosted-style addressing cannot be used. The addressing style '
+ 'can be configured by removing the addressing_style value '
+ 'or setting that value to \'path\' or \'auto\' in the AWS Config '
+ 'file or in the botocore.client.Config object.'
+ )
+
+
+class InvalidS3AddressingStyleError(BotoCoreError):
+ """Error when an invalid path style is specified"""
+ fmt = (
'S3 addressing style {s3_addressing_style} is invalid. Valid options '
- 'are: \'auto\', \'virtual\', and \'path\''
- )
-
-
+ 'are: \'auto\', \'virtual\', and \'path\''
+ )
+
+
class UnsupportedS3ArnError(BotoCoreError):
"""Error when S3 ARN provided to Bucket parameter is not supported"""
fmt = (
@@ -543,21 +543,21 @@ class UnsupportedS3ControlConfigurationError(BotoCoreError):
)
-class InvalidRetryConfigurationError(BotoCoreError):
- """Error when invalid retry configuration is specified"""
- fmt = (
- 'Cannot provide retry configuration for "{retry_config_option}". '
- 'Valid retry configuration options are: \'max_attempts\''
- )
-
-
-class InvalidMaxRetryAttemptsError(InvalidRetryConfigurationError):
- """Error when invalid retry configuration is specified"""
- fmt = (
- 'Value provided to "max_attempts": {provided_max_attempts} must '
+class InvalidRetryConfigurationError(BotoCoreError):
+ """Error when invalid retry configuration is specified"""
+ fmt = (
+ 'Cannot provide retry configuration for "{retry_config_option}". '
+ 'Valid retry configuration options are: \'max_attempts\''
+ )
+
+
+class InvalidMaxRetryAttemptsError(InvalidRetryConfigurationError):
+ """Error when invalid retry configuration is specified"""
+ fmt = (
+ 'Value provided to "max_attempts": {provided_max_attempts} must '
'be an integer greater than or equal to {min_value}.'
- )
-
+ )
+
class InvalidRetryModeError(InvalidRetryConfigurationError):
"""Error when invalid retry mode configuration is specified"""
@@ -584,40 +584,40 @@ class InvalidSTSRegionalEndpointsConfigError(BotoCoreError):
)
-class StubResponseError(BotoCoreError):
- fmt = 'Error getting response stub for operation {operation_name}: {reason}'
-
-
-class StubAssertionError(StubResponseError, AssertionError):
+class StubResponseError(BotoCoreError):
+ fmt = 'Error getting response stub for operation {operation_name}: {reason}'
+
+
+class StubAssertionError(StubResponseError, AssertionError):
pass
-
+
class UnStubbedResponseError(StubResponseError):
pass
-
-
-class InvalidConfigError(BotoCoreError):
- fmt = '{error_msg}'
-
-
-class InfiniteLoopConfigError(InvalidConfigError):
- fmt = (
- 'Infinite loop in credential configuration detected. Attempting to '
- 'load from profile {source_profile} which has already been visited. '
- 'Visited profiles: {visited_profiles}'
- )
-
-
-class RefreshWithMFAUnsupportedError(BotoCoreError):
- fmt = 'Cannot refresh credentials: MFA token required.'
-
-
-class MD5UnavailableError(BotoCoreError):
- fmt = "This system does not support MD5 generation."
-
-
-class MetadataRetrievalError(BotoCoreError):
- fmt = "Error retrieving metadata: {error_msg}"
+
+
+class InvalidConfigError(BotoCoreError):
+ fmt = '{error_msg}'
+
+
+class InfiniteLoopConfigError(InvalidConfigError):
+ fmt = (
+ 'Infinite loop in credential configuration detected. Attempting to '
+ 'load from profile {source_profile} which has already been visited. '
+ 'Visited profiles: {visited_profiles}'
+ )
+
+
+class RefreshWithMFAUnsupportedError(BotoCoreError):
+ fmt = 'Cannot refresh credentials: MFA token required.'
+
+
+class MD5UnavailableError(BotoCoreError):
+ fmt = "This system does not support MD5 generation."
+
+
+class MetadataRetrievalError(BotoCoreError):
+ fmt = "Error retrieving metadata: {error_msg}"
class UndefinedModelAttributeError(Exception):
diff --git a/contrib/python/botocore/botocore/handlers.py b/contrib/python/botocore/botocore/handlers.py
index 8a70563fcc..d5957c8f7f 100644
--- a/contrib/python/botocore/botocore/handlers.py
+++ b/contrib/python/botocore/botocore/handlers.py
@@ -1,65 +1,65 @@
-# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-
-"""Builtin event handlers.
-
-This module contains builtin handlers for events emitted by botocore.
-"""
-
-import base64
-import logging
-import copy
-import re
-import warnings
-import uuid
-
+# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+
+"""Builtin event handlers.
+
+This module contains builtin handlers for events emitted by botocore.
+"""
+
+import base64
+import logging
+import copy
+import re
+import warnings
+import uuid
+
from botocore.compat import (
unquote, json, six, unquote_str, ensure_bytes, get_md5,
MD5_AVAILABLE, OrderedDict, urlsplit, urlunsplit, XMLParseError,
ETree,
)
-from botocore.docs.utils import AutoPopulatedParam
-from botocore.docs.utils import HideParamFromOperations
-from botocore.docs.utils import AppendParamDocumentation
-from botocore.signers import add_generate_presigned_url
-from botocore.signers import add_generate_presigned_post
-from botocore.signers import add_generate_db_auth_token
-from botocore.exceptions import ParamValidationError
-from botocore.exceptions import AliasConflictParameterError
-from botocore.exceptions import UnsupportedTLSVersionWarning
+from botocore.docs.utils import AutoPopulatedParam
+from botocore.docs.utils import HideParamFromOperations
+from botocore.docs.utils import AppendParamDocumentation
+from botocore.signers import add_generate_presigned_url
+from botocore.signers import add_generate_presigned_post
+from botocore.signers import add_generate_db_auth_token
+from botocore.exceptions import ParamValidationError
+from botocore.exceptions import AliasConflictParameterError
+from botocore.exceptions import UnsupportedTLSVersionWarning
from botocore.exceptions import MissingServiceIdError
-from botocore.utils import percent_encode, SAFE_CHARS
-from botocore.utils import switch_host_with_param
+from botocore.utils import percent_encode, SAFE_CHARS
+from botocore.utils import switch_host_with_param
from botocore.utils import hyphenize_service_id
from botocore.utils import conditionally_calculate_md5
-
-from botocore import retryhandler
-from botocore import utils
-from botocore import translate
-import botocore
-import botocore.auth
-
-
-logger = logging.getLogger(__name__)
-
-REGISTER_FIRST = object()
-REGISTER_LAST = object()
-# From the S3 docs:
-# The rules for bucket names in the US Standard region allow bucket names
-# to be as long as 255 characters, and bucket names can contain any
-# combination of uppercase letters, lowercase letters, numbers, periods
-# (.), hyphens (-), and underscores (_).
-VALID_BUCKET = re.compile(r'^[a-zA-Z0-9.\-_]{1,255}$')
+
+from botocore import retryhandler
+from botocore import utils
+from botocore import translate
+import botocore
+import botocore.auth
+
+
+logger = logging.getLogger(__name__)
+
+REGISTER_FIRST = object()
+REGISTER_LAST = object()
+# From the S3 docs:
+# The rules for bucket names in the US Standard region allow bucket names
+# to be as long as 255 characters, and bucket names can contain any
+# combination of uppercase letters, lowercase letters, numbers, periods
+# (.), hyphens (-), and underscores (_).
+VALID_BUCKET = re.compile(r'^[a-zA-Z0-9.\-_]{1,255}$')
_ACCESSPOINT_ARN = (
r'^arn:(aws).*:(s3|s3-object-lambda):[a-z\-0-9]+:[0-9]{12}:accesspoint[/:]'
r'[a-zA-Z0-9\-]{1,63}$'
@@ -69,12 +69,12 @@ _OUTPOST_ARN = (
r'[a-zA-Z0-9\-]{1,63}[/:]accesspoint[/:][a-zA-Z0-9\-]{1,63}$'
)
VALID_S3_ARN = re.compile('|'.join([_ACCESSPOINT_ARN, _OUTPOST_ARN]))
-VERSION_ID_SUFFIX = re.compile(r'\?versionId=[^\s]+$')
-
+VERSION_ID_SUFFIX = re.compile(r'\?versionId=[^\s]+$')
+
SERVICE_NAME_ALIASES = {
'runtime.sagemaker': 'sagemaker-runtime'
}
-
+
def handle_service_name_alias(service_name, **kwargs):
return SERVICE_NAME_ALIASES.get(service_name, service_name)
@@ -106,36 +106,36 @@ def escape_xml_payload(params, **kwargs):
conditionally_calculate_md5(params, **kwargs)
-def check_for_200_error(response, **kwargs):
- # From: http://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectCOPY.html
- # There are two opportunities for a copy request to return an error. One
- # can occur when Amazon S3 receives the copy request and the other can
- # occur while Amazon S3 is copying the files. If the error occurs before
- # the copy operation starts, you receive a standard Amazon S3 error. If the
- # error occurs during the copy operation, the error response is embedded in
- # the 200 OK response. This means that a 200 OK response can contain either
- # a success or an error. Make sure to design your application to parse the
- # contents of the response and handle it appropriately.
- #
- # So this handler checks for this case. Even though the server sends a
- # 200 response, conceptually this should be handled exactly like a
- # 500 response (with respect to raising exceptions, retries, etc.)
- # We're connected *before* all the other retry logic handlers, so as long
- # as we switch the error code to 500, we'll retry the error as expected.
- if response is None:
- # A None response can happen if an exception is raised while
- # trying to retrieve the response. See Endpoint._get_response().
- return
- http_response, parsed = response
- if _looks_like_special_case_error(http_response):
- logger.debug("Error found for response with 200 status code, "
- "errors: %s, changing status code to "
- "500.", parsed)
- http_response.status_code = 500
-
-
-def _looks_like_special_case_error(http_response):
- if http_response.status_code == 200:
+def check_for_200_error(response, **kwargs):
+ # From: http://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectCOPY.html
+ # There are two opportunities for a copy request to return an error. One
+ # can occur when Amazon S3 receives the copy request and the other can
+ # occur while Amazon S3 is copying the files. If the error occurs before
+ # the copy operation starts, you receive a standard Amazon S3 error. If the
+ # error occurs during the copy operation, the error response is embedded in
+ # the 200 OK response. This means that a 200 OK response can contain either
+ # a success or an error. Make sure to design your application to parse the
+ # contents of the response and handle it appropriately.
+ #
+ # So this handler checks for this case. Even though the server sends a
+ # 200 response, conceptually this should be handled exactly like a
+ # 500 response (with respect to raising exceptions, retries, etc.)
+ # We're connected *before* all the other retry logic handlers, so as long
+ # as we switch the error code to 500, we'll retry the error as expected.
+ if response is None:
+ # A None response can happen if an exception is raised while
+ # trying to retrieve the response. See Endpoint._get_response().
+ return
+ http_response, parsed = response
+ if _looks_like_special_case_error(http_response):
+ logger.debug("Error found for response with 200 status code, "
+ "errors: %s, changing status code to "
+ "500.", parsed)
+ http_response.status_code = 500
+
+
+def _looks_like_special_case_error(http_response):
+ if http_response.status_code == 200:
try:
parser = ETree.XMLParser(
target=ETree.TreeBuilder(),
@@ -147,156 +147,156 @@ def _looks_like_special_case_error(http_response):
# streamed response from S3. We need to treat these cases as
# 500 Service Errors and try again.
return True
- if root.tag == 'Error':
- return True
- return False
-
-
-def set_operation_specific_signer(context, signing_name, **kwargs):
- """ Choose the operation-specific signer.
-
- Individual operations may have a different auth type than the service as a
- whole. This will most often manifest as operations that should not be
- authenticated at all, but can include other auth modes such as sigv4
- without body signing.
- """
- auth_type = context.get('auth_type')
-
- # Auth type will be None if the operation doesn't have a configured auth
- # type.
- if not auth_type:
- return
-
- # Auth type will be the string value 'none' if the operation should not
- # be signed at all.
- if auth_type == 'none':
- return botocore.UNSIGNED
-
- if auth_type.startswith('v4'):
- signature_version = 'v4'
- if signing_name == 's3':
- signature_version = 's3v4'
-
- # If the operation needs an unsigned body, we set additional context
- # allowing the signer to be aware of this.
- if auth_type == 'v4-unsigned-body':
- context['payload_signing_enabled'] = False
-
- return signature_version
-
-
-def decode_console_output(parsed, **kwargs):
- if 'Output' in parsed:
- try:
- # We're using 'replace' for errors because it is
- # possible that console output contains non string
- # chars we can't utf-8 decode.
- value = base64.b64decode(six.b(parsed['Output'])).decode(
- 'utf-8', 'replace')
- parsed['Output'] = value
- except (ValueError, TypeError, AttributeError):
- logger.debug('Error decoding base64', exc_info=True)
-
-
-def generate_idempotent_uuid(params, model, **kwargs):
- for name in model.idempotent_members:
- if name not in params:
- params[name] = str(uuid.uuid4())
- logger.debug("injecting idempotency token (%s) into param '%s'." %
- (params[name], name))
-
-
-def decode_quoted_jsondoc(value):
- try:
- value = json.loads(unquote(value))
- except (ValueError, TypeError):
- logger.debug('Error loading quoted JSON', exc_info=True)
- return value
-
-
-def json_decode_template_body(parsed, **kwargs):
- if 'TemplateBody' in parsed:
- try:
+ if root.tag == 'Error':
+ return True
+ return False
+
+
+def set_operation_specific_signer(context, signing_name, **kwargs):
+ """ Choose the operation-specific signer.
+
+ Individual operations may have a different auth type than the service as a
+ whole. This will most often manifest as operations that should not be
+ authenticated at all, but can include other auth modes such as sigv4
+ without body signing.
+ """
+ auth_type = context.get('auth_type')
+
+ # Auth type will be None if the operation doesn't have a configured auth
+ # type.
+ if not auth_type:
+ return
+
+ # Auth type will be the string value 'none' if the operation should not
+ # be signed at all.
+ if auth_type == 'none':
+ return botocore.UNSIGNED
+
+ if auth_type.startswith('v4'):
+ signature_version = 'v4'
+ if signing_name == 's3':
+ signature_version = 's3v4'
+
+ # If the operation needs an unsigned body, we set additional context
+ # allowing the signer to be aware of this.
+ if auth_type == 'v4-unsigned-body':
+ context['payload_signing_enabled'] = False
+
+ return signature_version
+
+
+def decode_console_output(parsed, **kwargs):
+ if 'Output' in parsed:
+ try:
+ # We're using 'replace' for errors because it is
+ # possible that console output contains non string
+ # chars we can't utf-8 decode.
+ value = base64.b64decode(six.b(parsed['Output'])).decode(
+ 'utf-8', 'replace')
+ parsed['Output'] = value
+ except (ValueError, TypeError, AttributeError):
+ logger.debug('Error decoding base64', exc_info=True)
+
+
+def generate_idempotent_uuid(params, model, **kwargs):
+ for name in model.idempotent_members:
+ if name not in params:
+ params[name] = str(uuid.uuid4())
+ logger.debug("injecting idempotency token (%s) into param '%s'." %
+ (params[name], name))
+
+
+def decode_quoted_jsondoc(value):
+ try:
+ value = json.loads(unquote(value))
+ except (ValueError, TypeError):
+ logger.debug('Error loading quoted JSON', exc_info=True)
+ return value
+
+
+def json_decode_template_body(parsed, **kwargs):
+ if 'TemplateBody' in parsed:
+ try:
value = json.loads(
parsed['TemplateBody'], object_pairs_hook=OrderedDict)
- parsed['TemplateBody'] = value
- except (ValueError, TypeError):
- logger.debug('error loading JSON', exc_info=True)
-
-
-def validate_bucket_name(params, **kwargs):
- if 'Bucket' not in params:
- return
- bucket = params['Bucket']
+ parsed['TemplateBody'] = value
+ except (ValueError, TypeError):
+ logger.debug('error loading JSON', exc_info=True)
+
+
+def validate_bucket_name(params, **kwargs):
+ if 'Bucket' not in params:
+ return
+ bucket = params['Bucket']
if not VALID_BUCKET.search(bucket) and not VALID_S3_ARN.search(bucket):
- error_msg = (
- 'Invalid bucket name "%s": Bucket name must match '
+ error_msg = (
+ 'Invalid bucket name "%s": Bucket name must match '
'the regex "%s" or be an ARN matching the regex "%s"' % (
bucket, VALID_BUCKET.pattern, VALID_S3_ARN.pattern))
- raise ParamValidationError(report=error_msg)
-
-
-def sse_md5(params, **kwargs):
- """
- S3 server-side encryption requires the encryption key to be sent to the
- server base64 encoded, as well as a base64-encoded MD5 hash of the
- encryption key. This handler does both if the MD5 has not been set by
- the caller.
- """
- _sse_md5(params, 'SSECustomer')
-
-
-def copy_source_sse_md5(params, **kwargs):
- """
- S3 server-side encryption requires the encryption key to be sent to the
- server base64 encoded, as well as a base64-encoded MD5 hash of the
- encryption key. This handler does both if the MD5 has not been set by
- the caller specifically if the parameter is for the copy-source sse-c key.
- """
- _sse_md5(params, 'CopySourceSSECustomer')
-
-
-def _sse_md5(params, sse_member_prefix='SSECustomer'):
- if not _needs_s3_sse_customization(params, sse_member_prefix):
- return
-
- sse_key_member = sse_member_prefix + 'Key'
- sse_md5_member = sse_member_prefix + 'KeyMD5'
- key_as_bytes = params[sse_key_member]
- if isinstance(key_as_bytes, six.text_type):
- key_as_bytes = key_as_bytes.encode('utf-8')
- key_md5_str = base64.b64encode(
- get_md5(key_as_bytes).digest()).decode('utf-8')
- key_b64_encoded = base64.b64encode(key_as_bytes).decode('utf-8')
- params[sse_key_member] = key_b64_encoded
- params[sse_md5_member] = key_md5_str
-
-
-def _needs_s3_sse_customization(params, sse_member_prefix):
- return (params.get(sse_member_prefix + 'Key') is not None and
- sse_member_prefix + 'KeyMD5' not in params)
-
-
-def disable_signing(**kwargs):
- """
- This handler disables request signing by setting the signer
- name to a special sentinel value.
- """
- return botocore.UNSIGNED
-
-
-def add_expect_header(model, params, **kwargs):
- if model.http.get('method', '') not in ['PUT', 'POST']:
- return
- if 'body' in params:
- body = params['body']
- if hasattr(body, 'read'):
- # Any file like object will use an expect 100-continue
- # header regardless of size.
- logger.debug("Adding expect 100 continue header to request.")
- params['headers']['Expect'] = '100-continue'
-
-
+ raise ParamValidationError(report=error_msg)
+
+
+def sse_md5(params, **kwargs):
+ """
+ S3 server-side encryption requires the encryption key to be sent to the
+ server base64 encoded, as well as a base64-encoded MD5 hash of the
+ encryption key. This handler does both if the MD5 has not been set by
+ the caller.
+ """
+ _sse_md5(params, 'SSECustomer')
+
+
+def copy_source_sse_md5(params, **kwargs):
+ """
+ S3 server-side encryption requires the encryption key to be sent to the
+ server base64 encoded, as well as a base64-encoded MD5 hash of the
+ encryption key. This handler does both if the MD5 has not been set by
+ the caller specifically if the parameter is for the copy-source sse-c key.
+ """
+ _sse_md5(params, 'CopySourceSSECustomer')
+
+
+def _sse_md5(params, sse_member_prefix='SSECustomer'):
+ if not _needs_s3_sse_customization(params, sse_member_prefix):
+ return
+
+ sse_key_member = sse_member_prefix + 'Key'
+ sse_md5_member = sse_member_prefix + 'KeyMD5'
+ key_as_bytes = params[sse_key_member]
+ if isinstance(key_as_bytes, six.text_type):
+ key_as_bytes = key_as_bytes.encode('utf-8')
+ key_md5_str = base64.b64encode(
+ get_md5(key_as_bytes).digest()).decode('utf-8')
+ key_b64_encoded = base64.b64encode(key_as_bytes).decode('utf-8')
+ params[sse_key_member] = key_b64_encoded
+ params[sse_md5_member] = key_md5_str
+
+
+def _needs_s3_sse_customization(params, sse_member_prefix):
+ return (params.get(sse_member_prefix + 'Key') is not None and
+ sse_member_prefix + 'KeyMD5' not in params)
+
+
+def disable_signing(**kwargs):
+ """
+ This handler disables request signing by setting the signer
+ name to a special sentinel value.
+ """
+ return botocore.UNSIGNED
+
+
+def add_expect_header(model, params, **kwargs):
+ if model.http.get('method', '') not in ['PUT', 'POST']:
+ return
+ if 'body' in params:
+ body = params['body']
+ if hasattr(body, 'read'):
+ # Any file like object will use an expect 100-continue
+ # header regardless of size.
+ logger.debug("Adding expect 100 continue header to request.")
+ params['headers']['Expect'] = '100-continue'
+
+
class DeprecatedServiceDocumenter(object):
def __init__(self, replacement_service_name):
self._replacement_service_name = replacement_service_name
@@ -312,410 +312,410 @@ class DeprecatedServiceDocumenter(object):
section.style.end_important()
-def document_copy_source_form(section, event_name, **kwargs):
- if 'request-example' in event_name:
- parent = section.get_section('structure-value')
- param_line = parent.get_section('CopySource')
- value_portion = param_line.get_section('member-value')
- value_portion.clear_text()
- value_portion.write("'string' or {'Bucket': 'string', "
- "'Key': 'string', 'VersionId': 'string'}")
- elif 'request-params' in event_name:
- param_section = section.get_section('CopySource')
- type_section = param_section.get_section('param-type')
- type_section.clear_text()
- type_section.write(':type CopySource: str or dict')
- doc_section = param_section.get_section('param-documentation')
- doc_section.clear_text()
- doc_section.write(
- "The name of the source bucket, key name of the source object, "
- "and optional version ID of the source object. You can either "
- "provide this value as a string or a dictionary. The "
- "string form is {bucket}/{key} or "
- "{bucket}/{key}?versionId={versionId} if you want to copy a "
- "specific version. You can also provide this value as a "
- "dictionary. The dictionary format is recommended over "
- "the string format because it is more explicit. The dictionary "
- "format is: {'Bucket': 'bucket', 'Key': 'key', 'VersionId': 'id'}."
- " Note that the VersionId key is optional and may be omitted."
+def document_copy_source_form(section, event_name, **kwargs):
+ if 'request-example' in event_name:
+ parent = section.get_section('structure-value')
+ param_line = parent.get_section('CopySource')
+ value_portion = param_line.get_section('member-value')
+ value_portion.clear_text()
+ value_portion.write("'string' or {'Bucket': 'string', "
+ "'Key': 'string', 'VersionId': 'string'}")
+ elif 'request-params' in event_name:
+ param_section = section.get_section('CopySource')
+ type_section = param_section.get_section('param-type')
+ type_section.clear_text()
+ type_section.write(':type CopySource: str or dict')
+ doc_section = param_section.get_section('param-documentation')
+ doc_section.clear_text()
+ doc_section.write(
+ "The name of the source bucket, key name of the source object, "
+ "and optional version ID of the source object. You can either "
+ "provide this value as a string or a dictionary. The "
+ "string form is {bucket}/{key} or "
+ "{bucket}/{key}?versionId={versionId} if you want to copy a "
+ "specific version. You can also provide this value as a "
+ "dictionary. The dictionary format is recommended over "
+ "the string format because it is more explicit. The dictionary "
+ "format is: {'Bucket': 'bucket', 'Key': 'key', 'VersionId': 'id'}."
+ " Note that the VersionId key is optional and may be omitted."
" To specify an S3 access point, provide the access point"
" ARN for the ``Bucket`` key in the copy source dictionary. If you"
" want to provide the copy source for an S3 access point as a"
" string instead of a dictionary, the ARN provided must be the"
" full S3 access point object ARN"
" (i.e. {accesspoint_arn}/object/{key})"
- )
-
-
-def handle_copy_source_param(params, **kwargs):
- """Convert CopySource param for CopyObject/UploadPartCopy.
-
- This handler will deal with two cases:
-
- * CopySource provided as a string. We'll make a best effort
- to URL encode the key name as required. This will require
- parsing the bucket and version id from the CopySource value
- and only encoding the key.
- * CopySource provided as a dict. In this case we're
- explicitly given the Bucket, Key, and VersionId so we're
- able to encode the key and ensure this value is serialized
- and correctly sent to S3.
-
- """
- source = params.get('CopySource')
- if source is None:
- # The call will eventually fail but we'll let the
- # param validator take care of this. It will
- # give a better error message.
- return
- if isinstance(source, six.string_types):
- params['CopySource'] = _quote_source_header(source)
- elif isinstance(source, dict):
- params['CopySource'] = _quote_source_header_from_dict(source)
-
-
-def _quote_source_header_from_dict(source_dict):
- try:
- bucket = source_dict['Bucket']
+ )
+
+
+def handle_copy_source_param(params, **kwargs):
+ """Convert CopySource param for CopyObject/UploadPartCopy.
+
+ This handler will deal with two cases:
+
+ * CopySource provided as a string. We'll make a best effort
+ to URL encode the key name as required. This will require
+ parsing the bucket and version id from the CopySource value
+ and only encoding the key.
+ * CopySource provided as a dict. In this case we're
+ explicitly given the Bucket, Key, and VersionId so we're
+ able to encode the key and ensure this value is serialized
+ and correctly sent to S3.
+
+ """
+ source = params.get('CopySource')
+ if source is None:
+ # The call will eventually fail but we'll let the
+ # param validator take care of this. It will
+ # give a better error message.
+ return
+ if isinstance(source, six.string_types):
+ params['CopySource'] = _quote_source_header(source)
+ elif isinstance(source, dict):
+ params['CopySource'] = _quote_source_header_from_dict(source)
+
+
+def _quote_source_header_from_dict(source_dict):
+ try:
+ bucket = source_dict['Bucket']
key = source_dict['Key']
- version_id = source_dict.get('VersionId')
+ version_id = source_dict.get('VersionId')
if VALID_S3_ARN.search(bucket):
final = '%s/object/%s' % (bucket, key)
else:
final = '%s/%s' % (bucket, key)
- except KeyError as e:
- raise ParamValidationError(
- report='Missing required parameter: %s' % str(e))
+ except KeyError as e:
+ raise ParamValidationError(
+ report='Missing required parameter: %s' % str(e))
final = percent_encode(final, safe=SAFE_CHARS + '/')
- if version_id is not None:
- final += '?versionId=%s' % version_id
- return final
-
-
-def _quote_source_header(value):
- result = VERSION_ID_SUFFIX.search(value)
- if result is None:
- return percent_encode(value, safe=SAFE_CHARS + '/')
- else:
- first, version_id = value[:result.start()], value[result.start():]
- return percent_encode(first, safe=SAFE_CHARS + '/') + version_id
-
-
-def _get_cross_region_presigned_url(request_signer, request_dict, model,
- source_region, destination_region):
- # The better way to do this is to actually get the
- # endpoint_resolver and get the endpoint_url given the
- # source region. In this specific case, we know that
- # we can safely replace the dest region with the source
- # region because of the supported EC2 regions, but in
- # general this is not a safe assumption to make.
- # I think eventually we should try to plumb through something
- # that allows us to resolve endpoints from regions.
- request_dict_copy = copy.deepcopy(request_dict)
- request_dict_copy['body']['DestinationRegion'] = destination_region
- request_dict_copy['url'] = request_dict['url'].replace(
- destination_region, source_region)
- request_dict_copy['method'] = 'GET'
- request_dict_copy['headers'] = {}
- return request_signer.generate_presigned_url(
- request_dict_copy, region_name=source_region,
- operation_name=model.name)
-
-
-def _get_presigned_url_source_and_destination_regions(request_signer, params):
- # Gets the source and destination regions to be used
- destination_region = request_signer._region_name
- source_region = params.get('SourceRegion')
- return source_region, destination_region
-
-
-def inject_presigned_url_ec2(params, request_signer, model, **kwargs):
- # The customer can still provide this, so we should pass if they do.
- if 'PresignedUrl' in params['body']:
- return
- src, dest = _get_presigned_url_source_and_destination_regions(
- request_signer, params['body'])
- url = _get_cross_region_presigned_url(
- request_signer, params, model, src, dest)
- params['body']['PresignedUrl'] = url
- # EC2 Requires that the destination region be sent over the wire in
- # addition to the source region.
- params['body']['DestinationRegion'] = dest
-
-
-def inject_presigned_url_rds(params, request_signer, model, **kwargs):
- # SourceRegion is not required for RDS operations, so it's possible that
- # it isn't set. In that case it's probably a local copy so we don't need
- # to do anything else.
- if 'SourceRegion' not in params['body']:
- return
-
- src, dest = _get_presigned_url_source_and_destination_regions(
- request_signer, params['body'])
-
- # Since SourceRegion isn't actually modeled for RDS, it needs to be
- # removed from the request params before we send the actual request.
- del params['body']['SourceRegion']
-
- if 'PreSignedUrl' in params['body']:
- return
-
- url = _get_cross_region_presigned_url(
- request_signer, params, model, src, dest)
- params['body']['PreSignedUrl'] = url
-
-
-def json_decode_policies(parsed, model, **kwargs):
- # Any time an IAM operation returns a policy document
- # it is a string that is json that has been urlencoded,
- # i.e urlencode(json.dumps(policy_document)).
- # To give users something more useful, we will urldecode
- # this value and json.loads() the result so that they have
- # the policy document as a dictionary.
- output_shape = model.output_shape
- if output_shape is not None:
- _decode_policy_types(parsed, model.output_shape)
-
-
-def _decode_policy_types(parsed, shape):
- # IAM consistently uses the policyDocumentType shape to indicate
- # strings that have policy documents.
- shape_name = 'policyDocumentType'
- if shape.type_name == 'structure':
- for member_name, member_shape in shape.members.items():
- if member_shape.type_name == 'string' and \
- member_shape.name == shape_name and \
- member_name in parsed:
- parsed[member_name] = decode_quoted_jsondoc(
- parsed[member_name])
- elif member_name in parsed:
- _decode_policy_types(parsed[member_name], member_shape)
- if shape.type_name == 'list':
- shape_member = shape.member
- for item in parsed:
- _decode_policy_types(item, shape_member)
-
-
-def parse_get_bucket_location(parsed, http_response, **kwargs):
- # s3.GetBucketLocation cannot be modeled properly. To
- # account for this we just manually parse the XML document.
- # The "parsed" passed in only has the ResponseMetadata
- # filled out. This handler will fill in the LocationConstraint
- # value.
+ if version_id is not None:
+ final += '?versionId=%s' % version_id
+ return final
+
+
+def _quote_source_header(value):
+ result = VERSION_ID_SUFFIX.search(value)
+ if result is None:
+ return percent_encode(value, safe=SAFE_CHARS + '/')
+ else:
+ first, version_id = value[:result.start()], value[result.start():]
+ return percent_encode(first, safe=SAFE_CHARS + '/') + version_id
+
+
+def _get_cross_region_presigned_url(request_signer, request_dict, model,
+ source_region, destination_region):
+ # The better way to do this is to actually get the
+ # endpoint_resolver and get the endpoint_url given the
+ # source region. In this specific case, we know that
+ # we can safely replace the dest region with the source
+ # region because of the supported EC2 regions, but in
+ # general this is not a safe assumption to make.
+ # I think eventually we should try to plumb through something
+ # that allows us to resolve endpoints from regions.
+ request_dict_copy = copy.deepcopy(request_dict)
+ request_dict_copy['body']['DestinationRegion'] = destination_region
+ request_dict_copy['url'] = request_dict['url'].replace(
+ destination_region, source_region)
+ request_dict_copy['method'] = 'GET'
+ request_dict_copy['headers'] = {}
+ return request_signer.generate_presigned_url(
+ request_dict_copy, region_name=source_region,
+ operation_name=model.name)
+
+
+def _get_presigned_url_source_and_destination_regions(request_signer, params):
+ # Gets the source and destination regions to be used
+ destination_region = request_signer._region_name
+ source_region = params.get('SourceRegion')
+ return source_region, destination_region
+
+
+def inject_presigned_url_ec2(params, request_signer, model, **kwargs):
+ # The customer can still provide this, so we should pass if they do.
+ if 'PresignedUrl' in params['body']:
+ return
+ src, dest = _get_presigned_url_source_and_destination_regions(
+ request_signer, params['body'])
+ url = _get_cross_region_presigned_url(
+ request_signer, params, model, src, dest)
+ params['body']['PresignedUrl'] = url
+ # EC2 Requires that the destination region be sent over the wire in
+ # addition to the source region.
+ params['body']['DestinationRegion'] = dest
+
+
+def inject_presigned_url_rds(params, request_signer, model, **kwargs):
+ # SourceRegion is not required for RDS operations, so it's possible that
+ # it isn't set. In that case it's probably a local copy so we don't need
+ # to do anything else.
+ if 'SourceRegion' not in params['body']:
+ return
+
+ src, dest = _get_presigned_url_source_and_destination_regions(
+ request_signer, params['body'])
+
+ # Since SourceRegion isn't actually modeled for RDS, it needs to be
+ # removed from the request params before we send the actual request.
+ del params['body']['SourceRegion']
+
+ if 'PreSignedUrl' in params['body']:
+ return
+
+ url = _get_cross_region_presigned_url(
+ request_signer, params, model, src, dest)
+ params['body']['PreSignedUrl'] = url
+
+
+def json_decode_policies(parsed, model, **kwargs):
+ # Any time an IAM operation returns a policy document
+ # it is a string that is json that has been urlencoded,
+ # i.e urlencode(json.dumps(policy_document)).
+ # To give users something more useful, we will urldecode
+ # this value and json.loads() the result so that they have
+ # the policy document as a dictionary.
+ output_shape = model.output_shape
+ if output_shape is not None:
+ _decode_policy_types(parsed, model.output_shape)
+
+
+def _decode_policy_types(parsed, shape):
+ # IAM consistently uses the policyDocumentType shape to indicate
+ # strings that have policy documents.
+ shape_name = 'policyDocumentType'
+ if shape.type_name == 'structure':
+ for member_name, member_shape in shape.members.items():
+ if member_shape.type_name == 'string' and \
+ member_shape.name == shape_name and \
+ member_name in parsed:
+ parsed[member_name] = decode_quoted_jsondoc(
+ parsed[member_name])
+ elif member_name in parsed:
+ _decode_policy_types(parsed[member_name], member_shape)
+ if shape.type_name == 'list':
+ shape_member = shape.member
+ for item in parsed:
+ _decode_policy_types(item, shape_member)
+
+
+def parse_get_bucket_location(parsed, http_response, **kwargs):
+ # s3.GetBucketLocation cannot be modeled properly. To
+ # account for this we just manually parse the XML document.
+ # The "parsed" passed in only has the ResponseMetadata
+ # filled out. This handler will fill in the LocationConstraint
+ # value.
if http_response.raw is None:
- return
- response_body = http_response.content
+ return
+ response_body = http_response.content
parser = ETree.XMLParser(
target=ETree.TreeBuilder(),
- encoding='utf-8')
- parser.feed(response_body)
- root = parser.close()
- region = root.text
- parsed['LocationConstraint'] = region
-
-
-def base64_encode_user_data(params, **kwargs):
- if 'UserData' in params:
- if isinstance(params['UserData'], six.text_type):
- # Encode it to bytes if it is text.
- params['UserData'] = params['UserData'].encode('utf-8')
- params['UserData'] = base64.b64encode(
- params['UserData']).decode('utf-8')
-
-
-def document_base64_encoding(param):
- description = ('**This value will be base64 encoded automatically. Do '
- 'not base64 encode this value prior to performing the '
- 'operation.**')
- append = AppendParamDocumentation(param, description)
- return append.append_documentation
-
-
-def validate_ascii_metadata(params, **kwargs):
- """Verify S3 Metadata only contains ascii characters.
-
- From: http://docs.aws.amazon.com/AmazonS3/latest/dev/UsingMetadata.html
-
- "Amazon S3 stores user-defined metadata in lowercase. Each name, value pair
- must conform to US-ASCII when using REST and UTF-8 when using SOAP or
- browser-based uploads via POST."
-
- """
- metadata = params.get('Metadata')
- if not metadata or not isinstance(metadata, dict):
- # We have to at least type check the metadata as a dict type
- # because this handler is called before param validation.
- # We'll go ahead and return because the param validator will
- # give a descriptive error message for us.
- # We might need a post-param validation event.
- return
- for key, value in metadata.items():
- try:
- key.encode('ascii')
- value.encode('ascii')
- except UnicodeEncodeError as e:
- error_msg = (
- 'Non ascii characters found in S3 metadata '
- 'for key "%s", value: "%s". \nS3 metadata can only '
- 'contain ASCII characters. ' % (key, value)
- )
- raise ParamValidationError(
- report=error_msg)
-
-
-def fix_route53_ids(params, model, **kwargs):
- """
- Check for and split apart Route53 resource IDs, setting
- only the last piece. This allows the output of one operation
- (e.g. ``'foo/1234'``) to be used as input in another
- operation (e.g. it expects just ``'1234'``).
- """
- input_shape = model.input_shape
- if not input_shape or not hasattr(input_shape, 'members'):
- return
-
- members = [name for (name, shape) in input_shape.members.items()
- if shape.name in ['ResourceId', 'DelegationSetId']]
-
- for name in members:
- if name in params:
- orig_value = params[name]
- params[name] = orig_value.split('/')[-1]
- logger.debug('%s %s -> %s', name, orig_value, params[name])
-
-
-def inject_account_id(params, **kwargs):
- if params.get('accountId') is None:
- # Glacier requires accountId, but allows you
- # to specify '-' for the current owners account.
- # We add this default value if the user does not
- # provide the accountId as a convenience.
- params['accountId'] = '-'
-
-
-def add_glacier_version(model, params, **kwargs):
- request_dict = params
- request_dict['headers']['x-amz-glacier-version'] = model.metadata[
- 'apiVersion']
-
-
-def add_accept_header(model, params, **kwargs):
- if params['headers'].get('Accept', None) is None:
- request_dict = params
- request_dict['headers']['Accept'] = 'application/json'
-
-
-def add_glacier_checksums(params, **kwargs):
- """Add glacier checksums to the http request.
-
- This will add two headers to the http request:
-
- * x-amz-content-sha256
- * x-amz-sha256-tree-hash
-
- These values will only be added if they are not present
- in the HTTP request.
-
- """
- request_dict = params
- headers = request_dict['headers']
- body = request_dict['body']
- if isinstance(body, six.binary_type):
- # If the user provided a bytes type instead of a file
- # like object, we're temporarily create a BytesIO object
- # so we can use the util functions to calculate the
- # checksums which assume file like objects. Note that
- # we're not actually changing the body in the request_dict.
- body = six.BytesIO(body)
- starting_position = body.tell()
- if 'x-amz-content-sha256' not in headers:
- headers['x-amz-content-sha256'] = utils.calculate_sha256(
- body, as_hex=True)
- body.seek(starting_position)
- if 'x-amz-sha256-tree-hash' not in headers:
- headers['x-amz-sha256-tree-hash'] = utils.calculate_tree_hash(body)
- body.seek(starting_position)
-
-
-def document_glacier_tree_hash_checksum():
- doc = '''
- This is a required field.
-
- Ideally you will want to compute this value with checksums from
- previous uploaded parts, using the algorithm described in
- `Glacier documentation <http://docs.aws.amazon.com/amazonglacier/latest/dev/checksum-calculations.html>`_.
-
- But if you prefer, you can also use botocore.utils.calculate_tree_hash()
- to compute it from raw file by::
-
- checksum = calculate_tree_hash(open('your_file.txt', 'rb'))
-
- '''
- return AppendParamDocumentation('checksum', doc).append_documentation
-
-
-def document_cloudformation_get_template_return_type(section, event_name, **kwargs):
- if 'response-params' in event_name:
- template_body_section = section.get_section('TemplateBody')
- type_section = template_body_section.get_section('param-type')
- type_section.clear_text()
- type_section.write('(*dict*) --')
- elif 'response-example' in event_name:
- parent = section.get_section('structure-value')
- param_line = parent.get_section('TemplateBody')
- value_portion = param_line.get_section('member-value')
- value_portion.clear_text()
- value_portion.write('{}')
-
-
-def switch_host_machinelearning(request, **kwargs):
- switch_host_with_param(request, 'PredictEndpoint')
-
-
-def check_openssl_supports_tls_version_1_2(**kwargs):
- import ssl
- try:
- openssl_version_tuple = ssl.OPENSSL_VERSION_INFO
- if openssl_version_tuple < (1, 0, 1):
- warnings.warn(
- 'Currently installed openssl version: %s does not '
- 'support TLS 1.2, which is required for use of iot-data. '
- 'Please use python installed with openssl version 1.0.1 or '
- 'higher.' % (ssl.OPENSSL_VERSION),
- UnsupportedTLSVersionWarning
- )
- # We cannot check the openssl version on python2.6, so we should just
- # pass on this conveniency check.
- except AttributeError:
- pass
-
-
-def change_get_to_post(request, **kwargs):
- # This is useful when we need to change a potentially large GET request
- # into a POST with x-www-form-urlencoded encoding.
- if request.method == 'GET' and '?' in request.url:
- request.headers['Content-Type'] = 'application/x-www-form-urlencoded'
- request.method = 'POST'
- request.url, request.data = request.url.split('?', 1)
-
-
-def set_list_objects_encoding_type_url(params, context, **kwargs):
- if 'EncodingType' not in params:
- # We set this context so that we know it wasn't the customer that
- # requested the encoding.
- context['encoding_type_auto_set'] = True
- params['EncodingType'] = 'url'
-
-
-def decode_list_object(parsed, context, **kwargs):
- # This is needed because we are passing url as the encoding type. Since the
- # paginator is based on the key, we need to handle it before it can be
- # round tripped.
- #
- # From the documentation: If you specify encoding-type request parameter,
- # Amazon S3 includes this element in the response, and returns encoded key
- # name values in the following response elements:
- # Delimiter, Marker, Prefix, NextMarker, Key.
+ encoding='utf-8')
+ parser.feed(response_body)
+ root = parser.close()
+ region = root.text
+ parsed['LocationConstraint'] = region
+
+
+def base64_encode_user_data(params, **kwargs):
+ if 'UserData' in params:
+ if isinstance(params['UserData'], six.text_type):
+ # Encode it to bytes if it is text.
+ params['UserData'] = params['UserData'].encode('utf-8')
+ params['UserData'] = base64.b64encode(
+ params['UserData']).decode('utf-8')
+
+
+def document_base64_encoding(param):
+ description = ('**This value will be base64 encoded automatically. Do '
+ 'not base64 encode this value prior to performing the '
+ 'operation.**')
+ append = AppendParamDocumentation(param, description)
+ return append.append_documentation
+
+
+def validate_ascii_metadata(params, **kwargs):
+ """Verify S3 Metadata only contains ascii characters.
+
+ From: http://docs.aws.amazon.com/AmazonS3/latest/dev/UsingMetadata.html
+
+ "Amazon S3 stores user-defined metadata in lowercase. Each name, value pair
+ must conform to US-ASCII when using REST and UTF-8 when using SOAP or
+ browser-based uploads via POST."
+
+ """
+ metadata = params.get('Metadata')
+ if not metadata or not isinstance(metadata, dict):
+ # We have to at least type check the metadata as a dict type
+ # because this handler is called before param validation.
+ # We'll go ahead and return because the param validator will
+ # give a descriptive error message for us.
+ # We might need a post-param validation event.
+ return
+ for key, value in metadata.items():
+ try:
+ key.encode('ascii')
+ value.encode('ascii')
+ except UnicodeEncodeError as e:
+ error_msg = (
+ 'Non ascii characters found in S3 metadata '
+ 'for key "%s", value: "%s". \nS3 metadata can only '
+ 'contain ASCII characters. ' % (key, value)
+ )
+ raise ParamValidationError(
+ report=error_msg)
+
+
+def fix_route53_ids(params, model, **kwargs):
+ """
+ Check for and split apart Route53 resource IDs, setting
+ only the last piece. This allows the output of one operation
+ (e.g. ``'foo/1234'``) to be used as input in another
+ operation (e.g. it expects just ``'1234'``).
+ """
+ input_shape = model.input_shape
+ if not input_shape or not hasattr(input_shape, 'members'):
+ return
+
+ members = [name for (name, shape) in input_shape.members.items()
+ if shape.name in ['ResourceId', 'DelegationSetId']]
+
+ for name in members:
+ if name in params:
+ orig_value = params[name]
+ params[name] = orig_value.split('/')[-1]
+ logger.debug('%s %s -> %s', name, orig_value, params[name])
+
+
+def inject_account_id(params, **kwargs):
+ if params.get('accountId') is None:
+ # Glacier requires accountId, but allows you
+ # to specify '-' for the current owners account.
+ # We add this default value if the user does not
+ # provide the accountId as a convenience.
+ params['accountId'] = '-'
+
+
+def add_glacier_version(model, params, **kwargs):
+ request_dict = params
+ request_dict['headers']['x-amz-glacier-version'] = model.metadata[
+ 'apiVersion']
+
+
+def add_accept_header(model, params, **kwargs):
+ if params['headers'].get('Accept', None) is None:
+ request_dict = params
+ request_dict['headers']['Accept'] = 'application/json'
+
+
+def add_glacier_checksums(params, **kwargs):
+ """Add glacier checksums to the http request.
+
+ This will add two headers to the http request:
+
+ * x-amz-content-sha256
+ * x-amz-sha256-tree-hash
+
+ These values will only be added if they are not present
+ in the HTTP request.
+
+ """
+ request_dict = params
+ headers = request_dict['headers']
+ body = request_dict['body']
+ if isinstance(body, six.binary_type):
+ # If the user provided a bytes type instead of a file
+ # like object, we're temporarily create a BytesIO object
+ # so we can use the util functions to calculate the
+ # checksums which assume file like objects. Note that
+ # we're not actually changing the body in the request_dict.
+ body = six.BytesIO(body)
+ starting_position = body.tell()
+ if 'x-amz-content-sha256' not in headers:
+ headers['x-amz-content-sha256'] = utils.calculate_sha256(
+ body, as_hex=True)
+ body.seek(starting_position)
+ if 'x-amz-sha256-tree-hash' not in headers:
+ headers['x-amz-sha256-tree-hash'] = utils.calculate_tree_hash(body)
+ body.seek(starting_position)
+
+
+def document_glacier_tree_hash_checksum():
+ doc = '''
+ This is a required field.
+
+ Ideally you will want to compute this value with checksums from
+ previous uploaded parts, using the algorithm described in
+ `Glacier documentation <http://docs.aws.amazon.com/amazonglacier/latest/dev/checksum-calculations.html>`_.
+
+ But if you prefer, you can also use botocore.utils.calculate_tree_hash()
+ to compute it from raw file by::
+
+ checksum = calculate_tree_hash(open('your_file.txt', 'rb'))
+
+ '''
+ return AppendParamDocumentation('checksum', doc).append_documentation
+
+
+def document_cloudformation_get_template_return_type(section, event_name, **kwargs):
+ if 'response-params' in event_name:
+ template_body_section = section.get_section('TemplateBody')
+ type_section = template_body_section.get_section('param-type')
+ type_section.clear_text()
+ type_section.write('(*dict*) --')
+ elif 'response-example' in event_name:
+ parent = section.get_section('structure-value')
+ param_line = parent.get_section('TemplateBody')
+ value_portion = param_line.get_section('member-value')
+ value_portion.clear_text()
+ value_portion.write('{}')
+
+
+def switch_host_machinelearning(request, **kwargs):
+ switch_host_with_param(request, 'PredictEndpoint')
+
+
+def check_openssl_supports_tls_version_1_2(**kwargs):
+ import ssl
+ try:
+ openssl_version_tuple = ssl.OPENSSL_VERSION_INFO
+ if openssl_version_tuple < (1, 0, 1):
+ warnings.warn(
+ 'Currently installed openssl version: %s does not '
+ 'support TLS 1.2, which is required for use of iot-data. '
+ 'Please use python installed with openssl version 1.0.1 or '
+ 'higher.' % (ssl.OPENSSL_VERSION),
+ UnsupportedTLSVersionWarning
+ )
+ # We cannot check the openssl version on python2.6, so we should just
+ # pass on this conveniency check.
+ except AttributeError:
+ pass
+
+
+def change_get_to_post(request, **kwargs):
+ # This is useful when we need to change a potentially large GET request
+ # into a POST with x-www-form-urlencoded encoding.
+ if request.method == 'GET' and '?' in request.url:
+ request.headers['Content-Type'] = 'application/x-www-form-urlencoded'
+ request.method = 'POST'
+ request.url, request.data = request.url.split('?', 1)
+
+
+def set_list_objects_encoding_type_url(params, context, **kwargs):
+ if 'EncodingType' not in params:
+ # We set this context so that we know it wasn't the customer that
+ # requested the encoding.
+ context['encoding_type_auto_set'] = True
+ params['EncodingType'] = 'url'
+
+
+def decode_list_object(parsed, context, **kwargs):
+ # This is needed because we are passing url as the encoding type. Since the
+ # paginator is based on the key, we need to handle it before it can be
+ # round tripped.
+ #
+ # From the documentation: If you specify encoding-type request parameter,
+ # Amazon S3 includes this element in the response, and returns encoded key
+ # name values in the following response elements:
+ # Delimiter, Marker, Prefix, NextMarker, Key.
_decode_list_object(
top_level_keys=['Delimiter', 'Marker', 'NextMarker'],
nested_keys=[('Contents', 'Key'), ('CommonPrefixes', 'Prefix')],
@@ -781,120 +781,120 @@ def decode_list_object_versions(parsed, context, **kwargs):
def _decode_list_object(top_level_keys, nested_keys, parsed, context):
- if parsed.get('EncodingType') == 'url' and \
- context.get('encoding_type_auto_set'):
- # URL decode top-level keys in the response if present.
- for key in top_level_keys:
- if key in parsed:
- parsed[key] = unquote_str(parsed[key])
- # URL decode nested keys from the response if present.
- for (top_key, child_key) in nested_keys:
- if top_key in parsed:
- for member in parsed[top_key]:
- member[child_key] = unquote_str(member[child_key])
-
-
-def convert_body_to_file_like_object(params, **kwargs):
- if 'Body' in params:
- if isinstance(params['Body'], six.string_types):
- params['Body'] = six.BytesIO(ensure_bytes(params['Body']))
- elif isinstance(params['Body'], six.binary_type):
- params['Body'] = six.BytesIO(params['Body'])
-
-
-def _add_parameter_aliases(handler_list):
- # Mapping of original parameter to parameter alias.
- # The key is <service>.<operation>.parameter
- # The first part of the key is used for event registration.
- # The last part is the original parameter name and the value is the
- # alias to expose in documentation.
- aliases = {
- 'ec2.*.Filter': 'Filters',
- 'logs.CreateExportTask.from': 'fromTime',
- 'cloudsearchdomain.Search.return': 'returnFields'
- }
-
- for original, new_name in aliases.items():
- event_portion, original_name = original.rsplit('.', 1)
- parameter_alias = ParameterAlias(original_name, new_name)
-
- # Add the handlers to the list of handlers.
- # One handler is to handle when users provide the alias.
- # The other handler is to update the documentation to show only
- # the alias.
- parameter_build_event_handler_tuple = (
- 'before-parameter-build.' + event_portion,
- parameter_alias.alias_parameter_in_call,
- REGISTER_FIRST
- )
- docs_event_handler_tuple = (
- 'docs.*.' + event_portion + '.complete-section',
- parameter_alias.alias_parameter_in_documentation)
- handler_list.append(parameter_build_event_handler_tuple)
- handler_list.append(docs_event_handler_tuple)
-
-
-class ParameterAlias(object):
- def __init__(self, original_name, alias_name):
- self._original_name = original_name
- self._alias_name = alias_name
-
- def alias_parameter_in_call(self, params, model, **kwargs):
- if model.input_shape:
- # Only consider accepting the alias if it is modeled in the
- # input shape.
- if self._original_name in model.input_shape.members:
- if self._alias_name in params:
- if self._original_name in params:
- raise AliasConflictParameterError(
- original=self._original_name,
- alias=self._alias_name,
- operation=model.name
- )
- # Remove the alias parameter value and use the old name
- # instead.
- params[self._original_name] = params.pop(self._alias_name)
-
- def alias_parameter_in_documentation(self, event_name, section, **kwargs):
- if event_name.startswith('docs.request-params'):
- if self._original_name not in section.available_sections:
- return
- # Replace the name for parameter type
- param_section = section.get_section(self._original_name)
- param_type_section = param_section.get_section('param-type')
- self._replace_content(param_type_section)
-
- # Replace the name for the parameter description
- param_name_section = param_section.get_section('param-name')
- self._replace_content(param_name_section)
- elif event_name.startswith('docs.request-example'):
- section = section.get_section('structure-value')
- if self._original_name not in section.available_sections:
- return
- # Replace the name for the example
- param_section = section.get_section(self._original_name)
- self._replace_content(param_section)
-
- def _replace_content(self, section):
- content = section.getvalue().decode('utf-8')
- updated_content = content.replace(
- self._original_name, self._alias_name)
- section.clear_text()
- section.write(updated_content)
-
-
-class ClientMethodAlias(object):
- def __init__(self, actual_name):
- """ Aliases a non-extant method to an existing method.
-
- :param actual_name: The name of the method that actually exists on
- the client.
- """
- self._actual = actual_name
-
- def __call__(self, client, **kwargs):
- return getattr(client, self._actual)
-
+ if parsed.get('EncodingType') == 'url' and \
+ context.get('encoding_type_auto_set'):
+ # URL decode top-level keys in the response if present.
+ for key in top_level_keys:
+ if key in parsed:
+ parsed[key] = unquote_str(parsed[key])
+ # URL decode nested keys from the response if present.
+ for (top_key, child_key) in nested_keys:
+ if top_key in parsed:
+ for member in parsed[top_key]:
+ member[child_key] = unquote_str(member[child_key])
+
+
+def convert_body_to_file_like_object(params, **kwargs):
+ if 'Body' in params:
+ if isinstance(params['Body'], six.string_types):
+ params['Body'] = six.BytesIO(ensure_bytes(params['Body']))
+ elif isinstance(params['Body'], six.binary_type):
+ params['Body'] = six.BytesIO(params['Body'])
+
+
+def _add_parameter_aliases(handler_list):
+ # Mapping of original parameter to parameter alias.
+ # The key is <service>.<operation>.parameter
+ # The first part of the key is used for event registration.
+ # The last part is the original parameter name and the value is the
+ # alias to expose in documentation.
+ aliases = {
+ 'ec2.*.Filter': 'Filters',
+ 'logs.CreateExportTask.from': 'fromTime',
+ 'cloudsearchdomain.Search.return': 'returnFields'
+ }
+
+ for original, new_name in aliases.items():
+ event_portion, original_name = original.rsplit('.', 1)
+ parameter_alias = ParameterAlias(original_name, new_name)
+
+ # Add the handlers to the list of handlers.
+ # One handler is to handle when users provide the alias.
+ # The other handler is to update the documentation to show only
+ # the alias.
+ parameter_build_event_handler_tuple = (
+ 'before-parameter-build.' + event_portion,
+ parameter_alias.alias_parameter_in_call,
+ REGISTER_FIRST
+ )
+ docs_event_handler_tuple = (
+ 'docs.*.' + event_portion + '.complete-section',
+ parameter_alias.alias_parameter_in_documentation)
+ handler_list.append(parameter_build_event_handler_tuple)
+ handler_list.append(docs_event_handler_tuple)
+
+
+class ParameterAlias(object):
+ def __init__(self, original_name, alias_name):
+ self._original_name = original_name
+ self._alias_name = alias_name
+
+ def alias_parameter_in_call(self, params, model, **kwargs):
+ if model.input_shape:
+ # Only consider accepting the alias if it is modeled in the
+ # input shape.
+ if self._original_name in model.input_shape.members:
+ if self._alias_name in params:
+ if self._original_name in params:
+ raise AliasConflictParameterError(
+ original=self._original_name,
+ alias=self._alias_name,
+ operation=model.name
+ )
+ # Remove the alias parameter value and use the old name
+ # instead.
+ params[self._original_name] = params.pop(self._alias_name)
+
+ def alias_parameter_in_documentation(self, event_name, section, **kwargs):
+ if event_name.startswith('docs.request-params'):
+ if self._original_name not in section.available_sections:
+ return
+ # Replace the name for parameter type
+ param_section = section.get_section(self._original_name)
+ param_type_section = param_section.get_section('param-type')
+ self._replace_content(param_type_section)
+
+ # Replace the name for the parameter description
+ param_name_section = param_section.get_section('param-name')
+ self._replace_content(param_name_section)
+ elif event_name.startswith('docs.request-example'):
+ section = section.get_section('structure-value')
+ if self._original_name not in section.available_sections:
+ return
+ # Replace the name for the example
+ param_section = section.get_section(self._original_name)
+ self._replace_content(param_section)
+
+ def _replace_content(self, section):
+ content = section.getvalue().decode('utf-8')
+ updated_content = content.replace(
+ self._original_name, self._alias_name)
+ section.clear_text()
+ section.write(updated_content)
+
+
+class ClientMethodAlias(object):
+ def __init__(self, actual_name):
+ """ Aliases a non-extant method to an existing method.
+
+ :param actual_name: The name of the method that actually exists on
+ the client.
+ """
+ self._actual = actual_name
+
+ def __call__(self, client, **kwargs):
+ return getattr(client, self._actual)
+
# TODO: Remove this class as it is no longer used
class HeaderToHostHoister(object):
@@ -956,138 +956,138 @@ def remove_lex_v2_start_conversation(class_attributes, **kwargs):
del class_attributes['start_conversation']
-# This is a list of (event_name, handler).
-# When a Session is created, everything in this list will be
-# automatically registered with that Session.
-
-BUILTIN_HANDLERS = [
+# This is a list of (event_name, handler).
+# When a Session is created, everything in this list will be
+# automatically registered with that Session.
+
+BUILTIN_HANDLERS = [
('choose-service-name', handle_service_name_alias),
- ('getattr.mturk.list_hi_ts_for_qualification_type',
- ClientMethodAlias('list_hits_for_qualification_type')),
- ('before-parameter-build.s3.UploadPart',
- convert_body_to_file_like_object, REGISTER_LAST),
- ('before-parameter-build.s3.PutObject',
- convert_body_to_file_like_object, REGISTER_LAST),
- ('creating-client-class', add_generate_presigned_url),
- ('creating-client-class.s3', add_generate_presigned_post),
- ('creating-client-class.iot-data', check_openssl_supports_tls_version_1_2),
+ ('getattr.mturk.list_hi_ts_for_qualification_type',
+ ClientMethodAlias('list_hits_for_qualification_type')),
+ ('before-parameter-build.s3.UploadPart',
+ convert_body_to_file_like_object, REGISTER_LAST),
+ ('before-parameter-build.s3.PutObject',
+ convert_body_to_file_like_object, REGISTER_LAST),
+ ('creating-client-class', add_generate_presigned_url),
+ ('creating-client-class.s3', add_generate_presigned_post),
+ ('creating-client-class.iot-data', check_openssl_supports_tls_version_1_2),
('creating-client-class.lex-runtime-v2', remove_lex_v2_start_conversation),
- ('after-call.iam', json_decode_policies),
-
- ('after-call.ec2.GetConsoleOutput', decode_console_output),
- ('after-call.cloudformation.GetTemplate', json_decode_template_body),
- ('after-call.s3.GetBucketLocation', parse_get_bucket_location),
-
- ('before-parameter-build', generate_idempotent_uuid),
-
- ('before-parameter-build.s3', validate_bucket_name),
-
- ('before-parameter-build.s3.ListObjects',
- set_list_objects_encoding_type_url),
+ ('after-call.iam', json_decode_policies),
+
+ ('after-call.ec2.GetConsoleOutput', decode_console_output),
+ ('after-call.cloudformation.GetTemplate', json_decode_template_body),
+ ('after-call.s3.GetBucketLocation', parse_get_bucket_location),
+
+ ('before-parameter-build', generate_idempotent_uuid),
+
+ ('before-parameter-build.s3', validate_bucket_name),
+
+ ('before-parameter-build.s3.ListObjects',
+ set_list_objects_encoding_type_url),
('before-parameter-build.s3.ListObjectsV2',
set_list_objects_encoding_type_url),
('before-parameter-build.s3.ListObjectVersions',
set_list_objects_encoding_type_url),
- ('before-parameter-build.s3.CopyObject',
- handle_copy_source_param),
- ('before-parameter-build.s3.UploadPartCopy',
- handle_copy_source_param),
- ('before-parameter-build.s3.CopyObject', validate_ascii_metadata),
- ('before-parameter-build.s3.PutObject', validate_ascii_metadata),
- ('before-parameter-build.s3.CreateMultipartUpload',
- validate_ascii_metadata),
- ('docs.*.s3.CopyObject.complete-section', document_copy_source_form),
- ('docs.*.s3.UploadPartCopy.complete-section', document_copy_source_form),
-
- ('before-call.s3', add_expect_header),
- ('before-call.glacier', add_glacier_version),
- ('before-call.apigateway', add_accept_header),
+ ('before-parameter-build.s3.CopyObject',
+ handle_copy_source_param),
+ ('before-parameter-build.s3.UploadPartCopy',
+ handle_copy_source_param),
+ ('before-parameter-build.s3.CopyObject', validate_ascii_metadata),
+ ('before-parameter-build.s3.PutObject', validate_ascii_metadata),
+ ('before-parameter-build.s3.CreateMultipartUpload',
+ validate_ascii_metadata),
+ ('docs.*.s3.CopyObject.complete-section', document_copy_source_form),
+ ('docs.*.s3.UploadPartCopy.complete-section', document_copy_source_form),
+
+ ('before-call.s3', add_expect_header),
+ ('before-call.glacier', add_glacier_version),
+ ('before-call.apigateway', add_accept_header),
('before-call.s3.PutObject', conditionally_calculate_md5),
('before-call.s3.UploadPart', conditionally_calculate_md5),
('before-call.s3.DeleteObjects', escape_xml_payload),
('before-call.s3.PutBucketLifecycleConfiguration', escape_xml_payload),
- ('before-call.glacier.UploadArchive', add_glacier_checksums),
- ('before-call.glacier.UploadMultipartPart', add_glacier_checksums),
- ('before-call.ec2.CopySnapshot', inject_presigned_url_ec2),
- ('request-created.machinelearning.Predict', switch_host_machinelearning),
- ('needs-retry.s3.UploadPartCopy', check_for_200_error, REGISTER_FIRST),
- ('needs-retry.s3.CopyObject', check_for_200_error, REGISTER_FIRST),
- ('needs-retry.s3.CompleteMultipartUpload', check_for_200_error,
- REGISTER_FIRST),
- ('choose-signer.cognito-identity.GetId', disable_signing),
- ('choose-signer.cognito-identity.GetOpenIdToken', disable_signing),
- ('choose-signer.cognito-identity.UnlinkIdentity', disable_signing),
- ('choose-signer.cognito-identity.GetCredentialsForIdentity',
- disable_signing),
- ('choose-signer.sts.AssumeRoleWithSAML', disable_signing),
- ('choose-signer.sts.AssumeRoleWithWebIdentity', disable_signing),
- ('choose-signer', set_operation_specific_signer),
- ('before-parameter-build.s3.HeadObject', sse_md5),
- ('before-parameter-build.s3.GetObject', sse_md5),
- ('before-parameter-build.s3.PutObject', sse_md5),
- ('before-parameter-build.s3.CopyObject', sse_md5),
- ('before-parameter-build.s3.CopyObject', copy_source_sse_md5),
- ('before-parameter-build.s3.CreateMultipartUpload', sse_md5),
- ('before-parameter-build.s3.UploadPart', sse_md5),
- ('before-parameter-build.s3.UploadPartCopy', sse_md5),
- ('before-parameter-build.s3.UploadPartCopy', copy_source_sse_md5),
- ('before-parameter-build.ec2.RunInstances', base64_encode_user_data),
- ('before-parameter-build.autoscaling.CreateLaunchConfiguration',
- base64_encode_user_data),
- ('before-parameter-build.route53', fix_route53_ids),
- ('before-parameter-build.glacier', inject_account_id),
- ('after-call.s3.ListObjects', decode_list_object),
+ ('before-call.glacier.UploadArchive', add_glacier_checksums),
+ ('before-call.glacier.UploadMultipartPart', add_glacier_checksums),
+ ('before-call.ec2.CopySnapshot', inject_presigned_url_ec2),
+ ('request-created.machinelearning.Predict', switch_host_machinelearning),
+ ('needs-retry.s3.UploadPartCopy', check_for_200_error, REGISTER_FIRST),
+ ('needs-retry.s3.CopyObject', check_for_200_error, REGISTER_FIRST),
+ ('needs-retry.s3.CompleteMultipartUpload', check_for_200_error,
+ REGISTER_FIRST),
+ ('choose-signer.cognito-identity.GetId', disable_signing),
+ ('choose-signer.cognito-identity.GetOpenIdToken', disable_signing),
+ ('choose-signer.cognito-identity.UnlinkIdentity', disable_signing),
+ ('choose-signer.cognito-identity.GetCredentialsForIdentity',
+ disable_signing),
+ ('choose-signer.sts.AssumeRoleWithSAML', disable_signing),
+ ('choose-signer.sts.AssumeRoleWithWebIdentity', disable_signing),
+ ('choose-signer', set_operation_specific_signer),
+ ('before-parameter-build.s3.HeadObject', sse_md5),
+ ('before-parameter-build.s3.GetObject', sse_md5),
+ ('before-parameter-build.s3.PutObject', sse_md5),
+ ('before-parameter-build.s3.CopyObject', sse_md5),
+ ('before-parameter-build.s3.CopyObject', copy_source_sse_md5),
+ ('before-parameter-build.s3.CreateMultipartUpload', sse_md5),
+ ('before-parameter-build.s3.UploadPart', sse_md5),
+ ('before-parameter-build.s3.UploadPartCopy', sse_md5),
+ ('before-parameter-build.s3.UploadPartCopy', copy_source_sse_md5),
+ ('before-parameter-build.ec2.RunInstances', base64_encode_user_data),
+ ('before-parameter-build.autoscaling.CreateLaunchConfiguration',
+ base64_encode_user_data),
+ ('before-parameter-build.route53', fix_route53_ids),
+ ('before-parameter-build.glacier', inject_account_id),
+ ('after-call.s3.ListObjects', decode_list_object),
('after-call.s3.ListObjectsV2', decode_list_object_v2),
('after-call.s3.ListObjectsV1Ext', decode_list_object_v1ext),
('after-call.s3.ListObjectVersions', decode_list_object_versions),
-
- # Cloudsearchdomain search operation will be sent by HTTP POST
- ('request-created.cloudsearchdomain.Search',
- change_get_to_post),
- # Glacier documentation customizations
- ('docs.*.glacier.*.complete-section',
- AutoPopulatedParam('accountId', 'Note: this parameter is set to "-" by'
- 'default if no value is not specified.')
- .document_auto_populated_param),
- ('docs.*.glacier.UploadArchive.complete-section',
- AutoPopulatedParam('checksum').document_auto_populated_param),
- ('docs.*.glacier.UploadMultipartPart.complete-section',
- AutoPopulatedParam('checksum').document_auto_populated_param),
- ('docs.request-params.glacier.CompleteMultipartUpload.complete-section',
- document_glacier_tree_hash_checksum()),
- # Cloudformation documentation customizations
- ('docs.*.cloudformation.GetTemplate.complete-section',
- document_cloudformation_get_template_return_type),
-
- # UserData base64 encoding documentation customizations
- ('docs.*.ec2.RunInstances.complete-section',
- document_base64_encoding('UserData')),
- ('docs.*.autoscaling.CreateLaunchConfiguration.complete-section',
- document_base64_encoding('UserData')),
-
- # EC2 CopySnapshot documentation customizations
- ('docs.*.ec2.CopySnapshot.complete-section',
- AutoPopulatedParam('PresignedUrl').document_auto_populated_param),
- ('docs.*.ec2.CopySnapshot.complete-section',
- AutoPopulatedParam('DestinationRegion').document_auto_populated_param),
- # S3 SSE documentation modifications
- ('docs.*.s3.*.complete-section',
- AutoPopulatedParam('SSECustomerKeyMD5').document_auto_populated_param),
- # S3 SSE Copy Source documentation modifications
- ('docs.*.s3.*.complete-section',
- AutoPopulatedParam(
- 'CopySourceSSECustomerKeyMD5').document_auto_populated_param),
- # Add base64 information to Lambda
- ('docs.*.lambda.UpdateFunctionCode.complete-section',
- document_base64_encoding('ZipFile')),
- # The following S3 operations cannot actually accept a ContentMD5
- ('docs.*.s3.*.complete-section',
- HideParamFromOperations(
- 's3', 'ContentMD5',
- ['DeleteObjects', 'PutBucketAcl', 'PutBucketCors',
- 'PutBucketLifecycle', 'PutBucketLogging', 'PutBucketNotification',
- 'PutBucketPolicy', 'PutBucketReplication', 'PutBucketRequestPayment',
- 'PutBucketTagging', 'PutBucketVersioning', 'PutBucketWebsite',
+
+ # Cloudsearchdomain search operation will be sent by HTTP POST
+ ('request-created.cloudsearchdomain.Search',
+ change_get_to_post),
+ # Glacier documentation customizations
+ ('docs.*.glacier.*.complete-section',
+ AutoPopulatedParam('accountId', 'Note: this parameter is set to "-" by'
+ 'default if no value is not specified.')
+ .document_auto_populated_param),
+ ('docs.*.glacier.UploadArchive.complete-section',
+ AutoPopulatedParam('checksum').document_auto_populated_param),
+ ('docs.*.glacier.UploadMultipartPart.complete-section',
+ AutoPopulatedParam('checksum').document_auto_populated_param),
+ ('docs.request-params.glacier.CompleteMultipartUpload.complete-section',
+ document_glacier_tree_hash_checksum()),
+ # Cloudformation documentation customizations
+ ('docs.*.cloudformation.GetTemplate.complete-section',
+ document_cloudformation_get_template_return_type),
+
+ # UserData base64 encoding documentation customizations
+ ('docs.*.ec2.RunInstances.complete-section',
+ document_base64_encoding('UserData')),
+ ('docs.*.autoscaling.CreateLaunchConfiguration.complete-section',
+ document_base64_encoding('UserData')),
+
+ # EC2 CopySnapshot documentation customizations
+ ('docs.*.ec2.CopySnapshot.complete-section',
+ AutoPopulatedParam('PresignedUrl').document_auto_populated_param),
+ ('docs.*.ec2.CopySnapshot.complete-section',
+ AutoPopulatedParam('DestinationRegion').document_auto_populated_param),
+ # S3 SSE documentation modifications
+ ('docs.*.s3.*.complete-section',
+ AutoPopulatedParam('SSECustomerKeyMD5').document_auto_populated_param),
+ # S3 SSE Copy Source documentation modifications
+ ('docs.*.s3.*.complete-section',
+ AutoPopulatedParam(
+ 'CopySourceSSECustomerKeyMD5').document_auto_populated_param),
+ # Add base64 information to Lambda
+ ('docs.*.lambda.UpdateFunctionCode.complete-section',
+ document_base64_encoding('ZipFile')),
+ # The following S3 operations cannot actually accept a ContentMD5
+ ('docs.*.s3.*.complete-section',
+ HideParamFromOperations(
+ 's3', 'ContentMD5',
+ ['DeleteObjects', 'PutBucketAcl', 'PutBucketCors',
+ 'PutBucketLifecycle', 'PutBucketLogging', 'PutBucketNotification',
+ 'PutBucketPolicy', 'PutBucketReplication', 'PutBucketRequestPayment',
+ 'PutBucketTagging', 'PutBucketVersioning', 'PutBucketWebsite',
'PutObjectAcl']).hide_param),
#############
@@ -1154,5 +1154,5 @@ BUILTIN_HANDLERS = [
'pinpoint-sms-voice').inject_deprecation_notice),
('before-call', inject_api_version_header_if_needed),
-]
-_add_parameter_aliases(BUILTIN_HANDLERS)
+]
+_add_parameter_aliases(BUILTIN_HANDLERS)
diff --git a/contrib/python/botocore/botocore/history.py b/contrib/python/botocore/botocore/history.py
index 991ba1ab6e..f4c72dbe46 100644
--- a/contrib/python/botocore/botocore/history.py
+++ b/contrib/python/botocore/botocore/history.py
@@ -1,43 +1,43 @@
-# Copyright 2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-import logging
-
-
-HISTORY_RECORDER = None
-logger = logging.getLogger(__name__)
-
-
-class BaseHistoryHandler(object):
- def emit(self, event_type, payload, source):
- raise NotImplementedError('emit()')
-
-
-class HistoryRecorder(object):
- def __init__(self):
- self._enabled = False
- self._handlers = []
-
- def enable(self):
- self._enabled = True
-
- def disable(self):
- self._enabled = False
-
- def add_handler(self, handler):
- self._handlers.append(handler)
-
- def record(self, event_type, payload, source='BOTOCORE'):
- if self._enabled and self._handlers:
+# Copyright 2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+import logging
+
+
+HISTORY_RECORDER = None
+logger = logging.getLogger(__name__)
+
+
+class BaseHistoryHandler(object):
+ def emit(self, event_type, payload, source):
+ raise NotImplementedError('emit()')
+
+
+class HistoryRecorder(object):
+ def __init__(self):
+ self._enabled = False
+ self._handlers = []
+
+ def enable(self):
+ self._enabled = True
+
+ def disable(self):
+ self._enabled = False
+
+ def add_handler(self, handler):
+ self._handlers.append(handler)
+
+ def record(self, event_type, payload, source='BOTOCORE'):
+ if self._enabled and self._handlers:
for handler in self._handlers:
try:
handler.emit(event_type, payload, source)
@@ -46,10 +46,10 @@ class HistoryRecorder(object):
# a record collection handler.
logger.debug("Exception raised in %s.", handler,
exc_info=True)
-
-
-def get_global_history_recorder():
- global HISTORY_RECORDER
- if HISTORY_RECORDER is None:
- HISTORY_RECORDER = HistoryRecorder()
- return HISTORY_RECORDER
+
+
+def get_global_history_recorder():
+ global HISTORY_RECORDER
+ if HISTORY_RECORDER is None:
+ HISTORY_RECORDER = HistoryRecorder()
+ return HISTORY_RECORDER
diff --git a/contrib/python/botocore/botocore/hooks.py b/contrib/python/botocore/botocore/hooks.py
index 1df17d0a40..a5a297dfea 100644
--- a/contrib/python/botocore/botocore/hooks.py
+++ b/contrib/python/botocore/botocore/hooks.py
@@ -1,349 +1,349 @@
-# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-import copy
-import logging
-from collections import defaultdict, deque, namedtuple
-from botocore.compat import accepts_kwargs, six
+# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+import copy
+import logging
+from collections import defaultdict, deque, namedtuple
+from botocore.compat import accepts_kwargs, six
from botocore.utils import EVENT_ALIASES
-
-logger = logging.getLogger(__name__)
-
-
-_NodeList = namedtuple('NodeList', ['first', 'middle', 'last'])
-_FIRST = 0
-_MIDDLE = 1
-_LAST = 2
-
-
-class NodeList(_NodeList):
-
- def __copy__(self):
- first_copy = copy.copy(self.first)
- middle_copy = copy.copy(self.middle)
- last_copy = copy.copy(self.last)
- copied = NodeList(first_copy, middle_copy, last_copy)
- return copied
-
-
-def first_non_none_response(responses, default=None):
- """Find first non None response in a list of tuples.
-
- This function can be used to find the first non None response from
- handlers connected to an event. This is useful if you are interested
- in the returned responses from event handlers. Example usage::
-
- print(first_non_none_response([(func1, None), (func2, 'foo'),
- (func3, 'bar')]))
- # This will print 'foo'
-
- :type responses: list of tuples
- :param responses: The responses from the ``EventHooks.emit`` method.
- This is a list of tuples, and each tuple is
- (handler, handler_response).
-
- :param default: If no non-None responses are found, then this default
- value will be returned.
-
- :return: The first non-None response in the list of tuples.
-
- """
- for response in responses:
- if response[1] is not None:
- return response[1]
- return default
-
-
-class BaseEventHooks(object):
- def emit(self, event_name, **kwargs):
- """Call all handlers subscribed to an event.
-
- :type event_name: str
- :param event_name: The name of the event to emit.
-
- :type **kwargs: dict
- :param **kwargs: Arbitrary kwargs to pass through to the
- subscribed handlers. The ``event_name`` will be injected
- into the kwargs so it's not necesary to add this to **kwargs.
-
- :rtype: list of tuples
- :return: A list of ``(handler_func, handler_func_return_value)``
-
- """
- return []
-
- def register(self, event_name, handler, unique_id=None,
- unique_id_uses_count=False):
- """Register an event handler for a given event.
-
- If a ``unique_id`` is given, the handler will not be registered
- if a handler with the ``unique_id`` has already been registered.
-
- Handlers are called in the order they have been registered.
- Note handlers can also be registered with ``register_first()``
- and ``register_last()``. All handlers registered with
- ``register_first()`` are called before handlers registered
- with ``register()`` which are called before handlers registered
- with ``register_last()``.
-
- """
- self._verify_and_register(event_name, handler, unique_id,
- register_method=self._register,
- unique_id_uses_count=unique_id_uses_count)
-
- def register_first(self, event_name, handler, unique_id=None,
- unique_id_uses_count=False):
- """Register an event handler to be called first for an event.
-
- All event handlers registered with ``register_first()`` will
- be called before handlers registered with ``register()`` and
- ``register_last()``.
-
- """
- self._verify_and_register(event_name, handler, unique_id,
- register_method=self._register_first,
- unique_id_uses_count=unique_id_uses_count)
-
- def register_last(self, event_name, handler, unique_id=None,
- unique_id_uses_count=False):
- """Register an event handler to be called last for an event.
-
- All event handlers registered with ``register_last()`` will be called
- after handlers registered with ``register_first()`` and ``register()``.
-
- """
- self._verify_and_register(event_name, handler, unique_id,
- register_method=self._register_last,
- unique_id_uses_count=unique_id_uses_count)
-
- def _verify_and_register(self, event_name, handler, unique_id,
- register_method, unique_id_uses_count):
- self._verify_is_callable(handler)
- self._verify_accept_kwargs(handler)
- register_method(event_name, handler, unique_id, unique_id_uses_count)
-
- def unregister(self, event_name, handler=None, unique_id=None,
- unique_id_uses_count=False):
- """Unregister an event handler for a given event.
-
- If no ``unique_id`` was given during registration, then the
- first instance of the event handler is removed (if the event
- handler has been registered multiple times).
-
- """
- pass
-
- def _verify_is_callable(self, func):
- if not six.callable(func):
- raise ValueError("Event handler %s must be callable." % func)
-
- def _verify_accept_kwargs(self, func):
- """Verifies a callable accepts kwargs
-
- :type func: callable
- :param func: A callable object.
-
- :returns: True, if ``func`` accepts kwargs, otherwise False.
-
- """
- try:
- if not accepts_kwargs(func):
- raise ValueError("Event handler %s must accept keyword "
- "arguments (**kwargs)" % func)
- except TypeError:
- return False
-
-
-class HierarchicalEmitter(BaseEventHooks):
- def __init__(self):
- # We keep a reference to the handlers for quick
- # read only access (we never modify self._handlers).
- # A cache of event name to handler list.
- self._lookup_cache = {}
- self._handlers = _PrefixTrie()
- # This is used to ensure that unique_id's are only
- # registered once.
- self._unique_id_handlers = {}
-
- def _emit(self, event_name, kwargs, stop_on_response=False):
- """
- Emit an event with optional keyword arguments.
-
- :type event_name: string
- :param event_name: Name of the event
- :type kwargs: dict
- :param kwargs: Arguments to be passed to the handler functions.
- :type stop_on_response: boolean
- :param stop_on_response: Whether to stop on the first non-None
- response. If False, then all handlers
- will be called. This is especially useful
- to handlers which mutate data and then
- want to stop propagation of the event.
- :rtype: list
- :return: List of (handler, response) tuples from all processed
- handlers.
- """
- responses = []
- # Invoke the event handlers from most specific
- # to least specific, each time stripping off a dot.
- handlers_to_call = self._lookup_cache.get(event_name)
- if handlers_to_call is None:
- handlers_to_call = self._handlers.prefix_search(event_name)
- self._lookup_cache[event_name] = handlers_to_call
- elif not handlers_to_call:
- # Short circuit and return an empty response is we have
- # no handlers to call. This is the common case where
- # for the majority of signals, nothing is listening.
- return []
- kwargs['event_name'] = event_name
- responses = []
- for handler in handlers_to_call:
- logger.debug('Event %s: calling handler %s', event_name, handler)
- response = handler(**kwargs)
- responses.append((handler, response))
- if stop_on_response and response is not None:
- return responses
- return responses
-
- def emit(self, event_name, **kwargs):
- """
- Emit an event by name with arguments passed as keyword args.
-
- >>> responses = emitter.emit(
- ... 'my-event.service.operation', arg1='one', arg2='two')
-
- :rtype: list
- :return: List of (handler, response) tuples from all processed
- handlers.
- """
- return self._emit(event_name, kwargs)
-
- def emit_until_response(self, event_name, **kwargs):
- """
- Emit an event by name with arguments passed as keyword args,
- until the first non-``None`` response is received. This
- method prevents subsequent handlers from being invoked.
-
- >>> handler, response = emitter.emit_until_response(
- 'my-event.service.operation', arg1='one', arg2='two')
-
- :rtype: tuple
- :return: The first (handler, response) tuple where the response
- is not ``None``, otherwise (``None``, ``None``).
- """
- responses = self._emit(event_name, kwargs, stop_on_response=True)
- if responses:
- return responses[-1]
- else:
- return (None, None)
-
- def _register(self, event_name, handler, unique_id=None,
- unique_id_uses_count=False):
- self._register_section(event_name, handler, unique_id,
- unique_id_uses_count, section=_MIDDLE)
-
- def _register_first(self, event_name, handler, unique_id=None,
- unique_id_uses_count=False):
- self._register_section(event_name, handler, unique_id,
- unique_id_uses_count, section=_FIRST)
-
- def _register_last(self, event_name, handler, unique_id,
- unique_id_uses_count=False):
- self._register_section(event_name, handler, unique_id,
- unique_id_uses_count, section=_LAST)
-
- def _register_section(self, event_name, handler, unique_id,
- unique_id_uses_count, section):
- if unique_id is not None:
- if unique_id in self._unique_id_handlers:
- # We've already registered a handler using this unique_id
- # so we don't need to register it again.
- count = self._unique_id_handlers[unique_id].get('count', None)
- if unique_id_uses_count:
- if not count:
- raise ValueError(
- "Initial registration of unique id %s was "
- "specified to use a counter. Subsequent register "
- "calls to unique id must specify use of a counter "
- "as well." % unique_id)
- else:
- self._unique_id_handlers[unique_id]['count'] += 1
- else:
- if count:
- raise ValueError(
- "Initial registration of unique id %s was "
- "specified to not use a counter. Subsequent "
- "register calls to unique id must specify not to "
- "use a counter as well." % unique_id)
- return
- else:
- # Note that the trie knows nothing about the unique
- # id. We track uniqueness in this class via the
- # _unique_id_handlers.
- self._handlers.append_item(event_name, handler,
- section=section)
- unique_id_handler_item = {'handler': handler}
- if unique_id_uses_count:
- unique_id_handler_item['count'] = 1
- self._unique_id_handlers[unique_id] = unique_id_handler_item
- else:
- self._handlers.append_item(event_name, handler, section=section)
- # Super simple caching strategy for now, if we change the registrations
- # clear the cache. This has the opportunity for smarter invalidations.
- self._lookup_cache = {}
-
- def unregister(self, event_name, handler=None, unique_id=None,
- unique_id_uses_count=False):
- if unique_id is not None:
- try:
- count = self._unique_id_handlers[unique_id].get('count', None)
- except KeyError:
- # There's no handler matching that unique_id so we have
- # nothing to unregister.
- return
- if unique_id_uses_count:
- if count is None:
- raise ValueError(
- "Initial registration of unique id %s was specified to "
- "use a counter. Subsequent unregister calls to unique "
- "id must specify use of a counter as well." % unique_id)
- elif count == 1:
- handler = self._unique_id_handlers.pop(unique_id)['handler']
- else:
- self._unique_id_handlers[unique_id]['count'] -= 1
- return
- else:
- if count:
- raise ValueError(
- "Initial registration of unique id %s was specified "
- "to not use a counter. Subsequent unregister calls "
- "to unique id must specify not to use a counter as "
- "well." % unique_id)
- handler = self._unique_id_handlers.pop(unique_id)['handler']
- try:
- self._handlers.remove_item(event_name, handler)
- self._lookup_cache = {}
- except ValueError:
- pass
-
- def __copy__(self):
- new_instance = self.__class__()
- new_state = self.__dict__.copy()
- new_state['_handlers'] = copy.copy(self._handlers)
- new_state['_unique_id_handlers'] = copy.copy(self._unique_id_handlers)
- new_instance.__dict__ = new_state
- return new_instance
-
-
+
+logger = logging.getLogger(__name__)
+
+
+_NodeList = namedtuple('NodeList', ['first', 'middle', 'last'])
+_FIRST = 0
+_MIDDLE = 1
+_LAST = 2
+
+
+class NodeList(_NodeList):
+
+ def __copy__(self):
+ first_copy = copy.copy(self.first)
+ middle_copy = copy.copy(self.middle)
+ last_copy = copy.copy(self.last)
+ copied = NodeList(first_copy, middle_copy, last_copy)
+ return copied
+
+
+def first_non_none_response(responses, default=None):
+ """Find first non None response in a list of tuples.
+
+ This function can be used to find the first non None response from
+ handlers connected to an event. This is useful if you are interested
+ in the returned responses from event handlers. Example usage::
+
+ print(first_non_none_response([(func1, None), (func2, 'foo'),
+ (func3, 'bar')]))
+ # This will print 'foo'
+
+ :type responses: list of tuples
+ :param responses: The responses from the ``EventHooks.emit`` method.
+ This is a list of tuples, and each tuple is
+ (handler, handler_response).
+
+ :param default: If no non-None responses are found, then this default
+ value will be returned.
+
+ :return: The first non-None response in the list of tuples.
+
+ """
+ for response in responses:
+ if response[1] is not None:
+ return response[1]
+ return default
+
+
+class BaseEventHooks(object):
+ def emit(self, event_name, **kwargs):
+ """Call all handlers subscribed to an event.
+
+ :type event_name: str
+ :param event_name: The name of the event to emit.
+
+ :type **kwargs: dict
+ :param **kwargs: Arbitrary kwargs to pass through to the
+ subscribed handlers. The ``event_name`` will be injected
+ into the kwargs so it's not necesary to add this to **kwargs.
+
+ :rtype: list of tuples
+ :return: A list of ``(handler_func, handler_func_return_value)``
+
+ """
+ return []
+
+ def register(self, event_name, handler, unique_id=None,
+ unique_id_uses_count=False):
+ """Register an event handler for a given event.
+
+ If a ``unique_id`` is given, the handler will not be registered
+ if a handler with the ``unique_id`` has already been registered.
+
+ Handlers are called in the order they have been registered.
+ Note handlers can also be registered with ``register_first()``
+ and ``register_last()``. All handlers registered with
+ ``register_first()`` are called before handlers registered
+ with ``register()`` which are called before handlers registered
+ with ``register_last()``.
+
+ """
+ self._verify_and_register(event_name, handler, unique_id,
+ register_method=self._register,
+ unique_id_uses_count=unique_id_uses_count)
+
+ def register_first(self, event_name, handler, unique_id=None,
+ unique_id_uses_count=False):
+ """Register an event handler to be called first for an event.
+
+ All event handlers registered with ``register_first()`` will
+ be called before handlers registered with ``register()`` and
+ ``register_last()``.
+
+ """
+ self._verify_and_register(event_name, handler, unique_id,
+ register_method=self._register_first,
+ unique_id_uses_count=unique_id_uses_count)
+
+ def register_last(self, event_name, handler, unique_id=None,
+ unique_id_uses_count=False):
+ """Register an event handler to be called last for an event.
+
+ All event handlers registered with ``register_last()`` will be called
+ after handlers registered with ``register_first()`` and ``register()``.
+
+ """
+ self._verify_and_register(event_name, handler, unique_id,
+ register_method=self._register_last,
+ unique_id_uses_count=unique_id_uses_count)
+
+ def _verify_and_register(self, event_name, handler, unique_id,
+ register_method, unique_id_uses_count):
+ self._verify_is_callable(handler)
+ self._verify_accept_kwargs(handler)
+ register_method(event_name, handler, unique_id, unique_id_uses_count)
+
+ def unregister(self, event_name, handler=None, unique_id=None,
+ unique_id_uses_count=False):
+ """Unregister an event handler for a given event.
+
+ If no ``unique_id`` was given during registration, then the
+ first instance of the event handler is removed (if the event
+ handler has been registered multiple times).
+
+ """
+ pass
+
+ def _verify_is_callable(self, func):
+ if not six.callable(func):
+ raise ValueError("Event handler %s must be callable." % func)
+
+ def _verify_accept_kwargs(self, func):
+ """Verifies a callable accepts kwargs
+
+ :type func: callable
+ :param func: A callable object.
+
+ :returns: True, if ``func`` accepts kwargs, otherwise False.
+
+ """
+ try:
+ if not accepts_kwargs(func):
+ raise ValueError("Event handler %s must accept keyword "
+ "arguments (**kwargs)" % func)
+ except TypeError:
+ return False
+
+
+class HierarchicalEmitter(BaseEventHooks):
+ def __init__(self):
+ # We keep a reference to the handlers for quick
+ # read only access (we never modify self._handlers).
+ # A cache of event name to handler list.
+ self._lookup_cache = {}
+ self._handlers = _PrefixTrie()
+ # This is used to ensure that unique_id's are only
+ # registered once.
+ self._unique_id_handlers = {}
+
+ def _emit(self, event_name, kwargs, stop_on_response=False):
+ """
+ Emit an event with optional keyword arguments.
+
+ :type event_name: string
+ :param event_name: Name of the event
+ :type kwargs: dict
+ :param kwargs: Arguments to be passed to the handler functions.
+ :type stop_on_response: boolean
+ :param stop_on_response: Whether to stop on the first non-None
+ response. If False, then all handlers
+ will be called. This is especially useful
+ to handlers which mutate data and then
+ want to stop propagation of the event.
+ :rtype: list
+ :return: List of (handler, response) tuples from all processed
+ handlers.
+ """
+ responses = []
+ # Invoke the event handlers from most specific
+ # to least specific, each time stripping off a dot.
+ handlers_to_call = self._lookup_cache.get(event_name)
+ if handlers_to_call is None:
+ handlers_to_call = self._handlers.prefix_search(event_name)
+ self._lookup_cache[event_name] = handlers_to_call
+ elif not handlers_to_call:
+ # Short circuit and return an empty response is we have
+ # no handlers to call. This is the common case where
+ # for the majority of signals, nothing is listening.
+ return []
+ kwargs['event_name'] = event_name
+ responses = []
+ for handler in handlers_to_call:
+ logger.debug('Event %s: calling handler %s', event_name, handler)
+ response = handler(**kwargs)
+ responses.append((handler, response))
+ if stop_on_response and response is not None:
+ return responses
+ return responses
+
+ def emit(self, event_name, **kwargs):
+ """
+ Emit an event by name with arguments passed as keyword args.
+
+ >>> responses = emitter.emit(
+ ... 'my-event.service.operation', arg1='one', arg2='two')
+
+ :rtype: list
+ :return: List of (handler, response) tuples from all processed
+ handlers.
+ """
+ return self._emit(event_name, kwargs)
+
+ def emit_until_response(self, event_name, **kwargs):
+ """
+ Emit an event by name with arguments passed as keyword args,
+ until the first non-``None`` response is received. This
+ method prevents subsequent handlers from being invoked.
+
+ >>> handler, response = emitter.emit_until_response(
+ 'my-event.service.operation', arg1='one', arg2='two')
+
+ :rtype: tuple
+ :return: The first (handler, response) tuple where the response
+ is not ``None``, otherwise (``None``, ``None``).
+ """
+ responses = self._emit(event_name, kwargs, stop_on_response=True)
+ if responses:
+ return responses[-1]
+ else:
+ return (None, None)
+
+ def _register(self, event_name, handler, unique_id=None,
+ unique_id_uses_count=False):
+ self._register_section(event_name, handler, unique_id,
+ unique_id_uses_count, section=_MIDDLE)
+
+ def _register_first(self, event_name, handler, unique_id=None,
+ unique_id_uses_count=False):
+ self._register_section(event_name, handler, unique_id,
+ unique_id_uses_count, section=_FIRST)
+
+ def _register_last(self, event_name, handler, unique_id,
+ unique_id_uses_count=False):
+ self._register_section(event_name, handler, unique_id,
+ unique_id_uses_count, section=_LAST)
+
+ def _register_section(self, event_name, handler, unique_id,
+ unique_id_uses_count, section):
+ if unique_id is not None:
+ if unique_id in self._unique_id_handlers:
+ # We've already registered a handler using this unique_id
+ # so we don't need to register it again.
+ count = self._unique_id_handlers[unique_id].get('count', None)
+ if unique_id_uses_count:
+ if not count:
+ raise ValueError(
+ "Initial registration of unique id %s was "
+ "specified to use a counter. Subsequent register "
+ "calls to unique id must specify use of a counter "
+ "as well." % unique_id)
+ else:
+ self._unique_id_handlers[unique_id]['count'] += 1
+ else:
+ if count:
+ raise ValueError(
+ "Initial registration of unique id %s was "
+ "specified to not use a counter. Subsequent "
+ "register calls to unique id must specify not to "
+ "use a counter as well." % unique_id)
+ return
+ else:
+ # Note that the trie knows nothing about the unique
+ # id. We track uniqueness in this class via the
+ # _unique_id_handlers.
+ self._handlers.append_item(event_name, handler,
+ section=section)
+ unique_id_handler_item = {'handler': handler}
+ if unique_id_uses_count:
+ unique_id_handler_item['count'] = 1
+ self._unique_id_handlers[unique_id] = unique_id_handler_item
+ else:
+ self._handlers.append_item(event_name, handler, section=section)
+ # Super simple caching strategy for now, if we change the registrations
+ # clear the cache. This has the opportunity for smarter invalidations.
+ self._lookup_cache = {}
+
+ def unregister(self, event_name, handler=None, unique_id=None,
+ unique_id_uses_count=False):
+ if unique_id is not None:
+ try:
+ count = self._unique_id_handlers[unique_id].get('count', None)
+ except KeyError:
+ # There's no handler matching that unique_id so we have
+ # nothing to unregister.
+ return
+ if unique_id_uses_count:
+ if count is None:
+ raise ValueError(
+ "Initial registration of unique id %s was specified to "
+ "use a counter. Subsequent unregister calls to unique "
+ "id must specify use of a counter as well." % unique_id)
+ elif count == 1:
+ handler = self._unique_id_handlers.pop(unique_id)['handler']
+ else:
+ self._unique_id_handlers[unique_id]['count'] -= 1
+ return
+ else:
+ if count:
+ raise ValueError(
+ "Initial registration of unique id %s was specified "
+ "to not use a counter. Subsequent unregister calls "
+ "to unique id must specify not to use a counter as "
+ "well." % unique_id)
+ handler = self._unique_id_handlers.pop(unique_id)['handler']
+ try:
+ self._handlers.remove_item(event_name, handler)
+ self._lookup_cache = {}
+ except ValueError:
+ pass
+
+ def __copy__(self):
+ new_instance = self.__class__()
+ new_state = self.__dict__.copy()
+ new_state['_handlers'] = copy.copy(self._handlers)
+ new_state['_unique_id_handlers'] = copy.copy(self._unique_id_handlers)
+ new_instance.__dict__ = new_state
+ return new_instance
+
+
class EventAliaser(BaseEventHooks):
def __init__(self, event_emitter, event_aliases=None):
self._event_aliases = event_aliases
@@ -433,157 +433,157 @@ class EventAliaser(BaseEventHooks):
)
-class _PrefixTrie(object):
- """Specialized prefix trie that handles wildcards.
-
- The prefixes in this case are based on dot separated
- names so 'foo.bar.baz' is::
-
- foo -> bar -> baz
-
- Wildcard support just means that having a key such as 'foo.bar.*.baz' will
- be matched with a call to ``get_items(key='foo.bar.ANYTHING.baz')``.
-
- You can think of this prefix trie as the equivalent as defaultdict(list),
- except that it can do prefix searches:
-
- foo.bar.baz -> A
- foo.bar -> B
- foo -> C
-
- Calling ``get_items('foo.bar.baz')`` will return [A + B + C], from
- most specific to least specific.
-
- """
- def __init__(self):
- # Each dictionary can be though of as a node, where a node
- # has values associated with the node, and children is a link
- # to more nodes. So 'foo.bar' would have a 'foo' node with
- # a 'bar' node as a child of foo.
- # {'foo': {'children': {'bar': {...}}}}.
- self._root = {'chunk': None, 'children': {}, 'values': None}
-
- def append_item(self, key, value, section=_MIDDLE):
- """Add an item to a key.
-
- If a value is already associated with that key, the new
- value is appended to the list for the key.
- """
- key_parts = key.split('.')
- current = self._root
- for part in key_parts:
- if part not in current['children']:
- new_child = {'chunk': part, 'values': None, 'children': {}}
- current['children'][part] = new_child
- current = new_child
- else:
- current = current['children'][part]
- if current['values'] is None:
- current['values'] = NodeList([], [], [])
- current['values'][section].append(value)
-
- def prefix_search(self, key):
- """Collect all items that are prefixes of key.
-
- Prefix in this case are delineated by '.' characters so
- 'foo.bar.baz' is a 3 chunk sequence of 3 "prefixes" (
- "foo", "bar", and "baz").
-
- """
- collected = deque()
- key_parts = key.split('.')
- current = self._root
- self._get_items(current, key_parts, collected, 0)
- return collected
-
- def _get_items(self, starting_node, key_parts, collected, starting_index):
- stack = [(starting_node, starting_index)]
- key_parts_len = len(key_parts)
- # Traverse down the nodes, where at each level we add the
- # next part from key_parts as well as the wildcard element '*'.
- # This means for each node we see we potentially add two more
- # elements to our stack.
- while stack:
- current_node, index = stack.pop()
- if current_node['values']:
- # We're using extendleft because we want
- # the values associated with the node furthest
- # from the root to come before nodes closer
- # to the root. extendleft() also adds its items
- # in right-left order so .extendleft([1, 2, 3])
- # will result in final_list = [3, 2, 1], which is
- # why we reverse the lists.
- node_list = current_node['values']
- complete_order = (node_list.first + node_list.middle +
- node_list.last)
- collected.extendleft(reversed(complete_order))
- if not index == key_parts_len:
- children = current_node['children']
- directs = children.get(key_parts[index])
- wildcard = children.get('*')
- next_index = index + 1
- if wildcard is not None:
- stack.append((wildcard, next_index))
- if directs is not None:
- stack.append((directs, next_index))
-
- def remove_item(self, key, value):
- """Remove an item associated with a key.
-
- If the value is not associated with the key a ``ValueError``
- will be raised. If the key does not exist in the trie, a
- ``ValueError`` will be raised.
-
- """
- key_parts = key.split('.')
- current = self._root
- self._remove_item(current, key_parts, value, index=0)
-
- def _remove_item(self, current_node, key_parts, value, index):
- if current_node is None:
- return
- elif index < len(key_parts):
- next_node = current_node['children'].get(key_parts[index])
- if next_node is not None:
- self._remove_item(next_node, key_parts, value, index + 1)
- if index == len(key_parts) - 1:
- node_list = next_node['values']
- if value in node_list.first:
- node_list.first.remove(value)
- elif value in node_list.middle:
- node_list.middle.remove(value)
- elif value in node_list.last:
- node_list.last.remove(value)
- if not next_node['children'] and not next_node['values']:
- # Then this is a leaf node with no values so
- # we can just delete this link from the parent node.
- # This makes subsequent search faster in the case
- # where a key does not exist.
- del current_node['children'][key_parts[index]]
- else:
- raise ValueError(
- "key is not in trie: %s" % '.'.join(key_parts))
-
- def __copy__(self):
- # The fact that we're using a nested dict under the covers
- # is an implementation detail, and the user shouldn't have
- # to know that they'd normally need a deepcopy so we expose
- # __copy__ instead of __deepcopy__.
- new_copy = self.__class__()
- copied_attrs = self._recursive_copy(self.__dict__)
- new_copy.__dict__ = copied_attrs
- return new_copy
-
- def _recursive_copy(self, node):
- # We can't use copy.deepcopy because we actually only want to copy
- # the structure of the trie, not the handlers themselves.
- # Each node has a chunk, children, and values.
- copied_node = {}
- for key, value in node.items():
- if isinstance(value, NodeList):
- copied_node[key] = copy.copy(value)
- elif isinstance(value, dict):
- copied_node[key] = self._recursive_copy(value)
- else:
- copied_node[key] = value
- return copied_node
+class _PrefixTrie(object):
+ """Specialized prefix trie that handles wildcards.
+
+ The prefixes in this case are based on dot separated
+ names so 'foo.bar.baz' is::
+
+ foo -> bar -> baz
+
+ Wildcard support just means that having a key such as 'foo.bar.*.baz' will
+ be matched with a call to ``get_items(key='foo.bar.ANYTHING.baz')``.
+
+ You can think of this prefix trie as the equivalent as defaultdict(list),
+ except that it can do prefix searches:
+
+ foo.bar.baz -> A
+ foo.bar -> B
+ foo -> C
+
+ Calling ``get_items('foo.bar.baz')`` will return [A + B + C], from
+ most specific to least specific.
+
+ """
+ def __init__(self):
+ # Each dictionary can be though of as a node, where a node
+ # has values associated with the node, and children is a link
+ # to more nodes. So 'foo.bar' would have a 'foo' node with
+ # a 'bar' node as a child of foo.
+ # {'foo': {'children': {'bar': {...}}}}.
+ self._root = {'chunk': None, 'children': {}, 'values': None}
+
+ def append_item(self, key, value, section=_MIDDLE):
+ """Add an item to a key.
+
+ If a value is already associated with that key, the new
+ value is appended to the list for the key.
+ """
+ key_parts = key.split('.')
+ current = self._root
+ for part in key_parts:
+ if part not in current['children']:
+ new_child = {'chunk': part, 'values': None, 'children': {}}
+ current['children'][part] = new_child
+ current = new_child
+ else:
+ current = current['children'][part]
+ if current['values'] is None:
+ current['values'] = NodeList([], [], [])
+ current['values'][section].append(value)
+
+ def prefix_search(self, key):
+ """Collect all items that are prefixes of key.
+
+ Prefix in this case are delineated by '.' characters so
+ 'foo.bar.baz' is a 3 chunk sequence of 3 "prefixes" (
+ "foo", "bar", and "baz").
+
+ """
+ collected = deque()
+ key_parts = key.split('.')
+ current = self._root
+ self._get_items(current, key_parts, collected, 0)
+ return collected
+
+ def _get_items(self, starting_node, key_parts, collected, starting_index):
+ stack = [(starting_node, starting_index)]
+ key_parts_len = len(key_parts)
+ # Traverse down the nodes, where at each level we add the
+ # next part from key_parts as well as the wildcard element '*'.
+ # This means for each node we see we potentially add two more
+ # elements to our stack.
+ while stack:
+ current_node, index = stack.pop()
+ if current_node['values']:
+ # We're using extendleft because we want
+ # the values associated with the node furthest
+ # from the root to come before nodes closer
+ # to the root. extendleft() also adds its items
+ # in right-left order so .extendleft([1, 2, 3])
+ # will result in final_list = [3, 2, 1], which is
+ # why we reverse the lists.
+ node_list = current_node['values']
+ complete_order = (node_list.first + node_list.middle +
+ node_list.last)
+ collected.extendleft(reversed(complete_order))
+ if not index == key_parts_len:
+ children = current_node['children']
+ directs = children.get(key_parts[index])
+ wildcard = children.get('*')
+ next_index = index + 1
+ if wildcard is not None:
+ stack.append((wildcard, next_index))
+ if directs is not None:
+ stack.append((directs, next_index))
+
+ def remove_item(self, key, value):
+ """Remove an item associated with a key.
+
+ If the value is not associated with the key a ``ValueError``
+ will be raised. If the key does not exist in the trie, a
+ ``ValueError`` will be raised.
+
+ """
+ key_parts = key.split('.')
+ current = self._root
+ self._remove_item(current, key_parts, value, index=0)
+
+ def _remove_item(self, current_node, key_parts, value, index):
+ if current_node is None:
+ return
+ elif index < len(key_parts):
+ next_node = current_node['children'].get(key_parts[index])
+ if next_node is not None:
+ self._remove_item(next_node, key_parts, value, index + 1)
+ if index == len(key_parts) - 1:
+ node_list = next_node['values']
+ if value in node_list.first:
+ node_list.first.remove(value)
+ elif value in node_list.middle:
+ node_list.middle.remove(value)
+ elif value in node_list.last:
+ node_list.last.remove(value)
+ if not next_node['children'] and not next_node['values']:
+ # Then this is a leaf node with no values so
+ # we can just delete this link from the parent node.
+ # This makes subsequent search faster in the case
+ # where a key does not exist.
+ del current_node['children'][key_parts[index]]
+ else:
+ raise ValueError(
+ "key is not in trie: %s" % '.'.join(key_parts))
+
+ def __copy__(self):
+ # The fact that we're using a nested dict under the covers
+ # is an implementation detail, and the user shouldn't have
+ # to know that they'd normally need a deepcopy so we expose
+ # __copy__ instead of __deepcopy__.
+ new_copy = self.__class__()
+ copied_attrs = self._recursive_copy(self.__dict__)
+ new_copy.__dict__ = copied_attrs
+ return new_copy
+
+ def _recursive_copy(self, node):
+ # We can't use copy.deepcopy because we actually only want to copy
+ # the structure of the trie, not the handlers themselves.
+ # Each node has a chunk, children, and values.
+ copied_node = {}
+ for key, value in node.items():
+ if isinstance(value, NodeList):
+ copied_node[key] = copy.copy(value)
+ elif isinstance(value, dict):
+ copied_node[key] = self._recursive_copy(value)
+ else:
+ copied_node[key] = value
+ return copied_node
diff --git a/contrib/python/botocore/botocore/loaders.py b/contrib/python/botocore/botocore/loaders.py
index 1f38988d39..8eaf58aab7 100644
--- a/contrib/python/botocore/botocore/loaders.py
+++ b/contrib/python/botocore/botocore/loaders.py
@@ -1,184 +1,184 @@
-# Copyright 2012-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-"""Module for loading various model files.
-
-This module provides the classes that are used to load models used
-by botocore. This can include:
-
- * Service models (e.g. the model for EC2, S3, DynamoDB, etc.)
- * Service model extras which customize the service models
- * Other models associated with a service (pagination, waiters)
- * Non service-specific config (Endpoint data, retry config)
-
-Loading a module is broken down into several steps:
-
- * Determining the path to load
- * Search the data_path for files to load
- * The mechanics of loading the file
- * Searching for extras and applying them to the loaded file
-
-The last item is used so that other faster loading mechanism
-besides the default JSON loader can be used.
-
-The Search Path
-===============
-
-Similar to how the PATH environment variable is to finding executables
-and the PYTHONPATH environment variable is to finding python modules
-to import, the botocore loaders have the concept of a data path exposed
-through AWS_DATA_PATH.
-
-This enables end users to provide additional search paths where we
-will attempt to load models outside of the models we ship with
-botocore. When you create a ``Loader``, there are two paths
-automatically added to the model search path:
-
- * <botocore root>/data/
- * ~/.aws/models
-
-The first value is the path where all the model files shipped with
-botocore are located.
-
-The second path is so that users can just drop new model files in
-``~/.aws/models`` without having to mess around with the AWS_DATA_PATH.
-
-The AWS_DATA_PATH using the platform specific path separator to
-separate entries (typically ``:`` on linux and ``;`` on windows).
-
-
-Directory Layout
-================
-
-The Loader expects a particular directory layout. In order for any
-directory specified in AWS_DATA_PATH to be considered, it must have
-this structure for service models::
-
- <root>
- |
- |-- servicename1
- | |-- 2012-10-25
- | |-- service-2.json
- |-- ec2
- | |-- 2014-01-01
- | | |-- paginators-1.json
- | | |-- service-2.json
- | | |-- waiters-2.json
- | |-- 2015-03-01
- | |-- paginators-1.json
- | |-- service-2.json
- | |-- waiters-2.json
- | |-- service-2.sdk-extras.json
-
-
-That is:
-
- * The root directory contains sub directories that are the name
- of the services.
- * Within each service directory, there's a sub directory for each
- available API version.
- * Within each API version, there are model specific files, including
- (but not limited to): service-2.json, waiters-2.json, paginators-1.json
-
-The ``-1`` and ``-2`` suffix at the end of the model files denote which version
-schema is used within the model. Even though this information is available in
-the ``version`` key within the model, this version is also part of the filename
-so that code does not need to load the JSON model in order to determine which
-version to use.
-
-The ``sdk-extras`` and similar files represent extra data that needs to be
-applied to the model after it is loaded. Data in these files might represent
-information that doesn't quite fit in the original models, but is still needed
-for the sdk. For instance, additional operation parameters might be added here
-which don't represent the actual service api.
-"""
+# Copyright 2012-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+"""Module for loading various model files.
+
+This module provides the classes that are used to load models used
+by botocore. This can include:
+
+ * Service models (e.g. the model for EC2, S3, DynamoDB, etc.)
+ * Service model extras which customize the service models
+ * Other models associated with a service (pagination, waiters)
+ * Non service-specific config (Endpoint data, retry config)
+
+Loading a module is broken down into several steps:
+
+ * Determining the path to load
+ * Search the data_path for files to load
+ * The mechanics of loading the file
+ * Searching for extras and applying them to the loaded file
+
+The last item is used so that other faster loading mechanism
+besides the default JSON loader can be used.
+
+The Search Path
+===============
+
+Similar to how the PATH environment variable is to finding executables
+and the PYTHONPATH environment variable is to finding python modules
+to import, the botocore loaders have the concept of a data path exposed
+through AWS_DATA_PATH.
+
+This enables end users to provide additional search paths where we
+will attempt to load models outside of the models we ship with
+botocore. When you create a ``Loader``, there are two paths
+automatically added to the model search path:
+
+ * <botocore root>/data/
+ * ~/.aws/models
+
+The first value is the path where all the model files shipped with
+botocore are located.
+
+The second path is so that users can just drop new model files in
+``~/.aws/models`` without having to mess around with the AWS_DATA_PATH.
+
+The AWS_DATA_PATH using the platform specific path separator to
+separate entries (typically ``:`` on linux and ``;`` on windows).
+
+
+Directory Layout
+================
+
+The Loader expects a particular directory layout. In order for any
+directory specified in AWS_DATA_PATH to be considered, it must have
+this structure for service models::
+
+ <root>
+ |
+ |-- servicename1
+ | |-- 2012-10-25
+ | |-- service-2.json
+ |-- ec2
+ | |-- 2014-01-01
+ | | |-- paginators-1.json
+ | | |-- service-2.json
+ | | |-- waiters-2.json
+ | |-- 2015-03-01
+ | |-- paginators-1.json
+ | |-- service-2.json
+ | |-- waiters-2.json
+ | |-- service-2.sdk-extras.json
+
+
+That is:
+
+ * The root directory contains sub directories that are the name
+ of the services.
+ * Within each service directory, there's a sub directory for each
+ available API version.
+ * Within each API version, there are model specific files, including
+ (but not limited to): service-2.json, waiters-2.json, paginators-1.json
+
+The ``-1`` and ``-2`` suffix at the end of the model files denote which version
+schema is used within the model. Even though this information is available in
+the ``version`` key within the model, this version is also part of the filename
+so that code does not need to load the JSON model in order to determine which
+version to use.
+
+The ``sdk-extras`` and similar files represent extra data that needs to be
+applied to the model after it is loaded. Data in these files might represent
+information that doesn't quite fit in the original models, but is still needed
+for the sdk. For instance, additional operation parameters might be added here
+which don't represent the actual service api.
+"""
import collections
import os
import logging
-
-from botocore import BOTOCORE_ROOT
-from botocore.compat import json
+
+from botocore import BOTOCORE_ROOT
+from botocore.compat import json
from botocore.compat import six
-from botocore.compat import OrderedDict
-from botocore.exceptions import DataNotFoundError, UnknownServiceError
-from botocore.utils import deep_merge
-
+from botocore.compat import OrderedDict
+from botocore.exceptions import DataNotFoundError, UnknownServiceError
+from botocore.utils import deep_merge
+
from library.python import resource
-
-
-logger = logging.getLogger(__name__)
-
-
-def instance_cache(func):
- """Cache the result of a method on a per instance basis.
-
- This is not a general purpose caching decorator. In order
- for this to be used, it must be used on methods on an
- instance, and that instance *must* provide a
- ``self._cache`` dictionary.
-
- """
- def _wrapper(self, *args, **kwargs):
- key = (func.__name__,) + args
- for pair in sorted(kwargs.items()):
- key += pair
- if key in self._cache:
- return self._cache[key]
- data = func(self, *args, **kwargs)
- self._cache[key] = data
- return data
- return _wrapper
-
-
-class JSONFileLoader(object):
- """Loader JSON files.
-
- This class can load the default format of models, which is a JSON file.
-
- """
- def exists(self, file_path):
- """Checks if the file exists.
-
- :type file_path: str
- :param file_path: The full path to the file to load without
- the '.json' extension.
-
- :return: True if file path exists, False otherwise.
-
- """
- return os.path.isfile(file_path + '.json')
-
- def load_file(self, file_path):
- """Attempt to load the file path.
-
- :type file_path: str
- :param file_path: The full path to the file to load without
- the '.json' extension.
-
- :return: The loaded data if it exists, otherwise None.
-
- """
- full_path = file_path + '.json'
- if not os.path.isfile(full_path):
- return
-
- # By default the file will be opened with locale encoding on Python 3.
- # We specify "utf8" here to ensure the correct behavior.
- with open(full_path, 'rb') as fp:
- payload = fp.read().decode('utf-8')
- logger.debug("Loading JSON file: %s", full_path)
- return json.loads(payload, object_pairs_hook=OrderedDict)
-
-
+
+
+logger = logging.getLogger(__name__)
+
+
+def instance_cache(func):
+ """Cache the result of a method on a per instance basis.
+
+ This is not a general purpose caching decorator. In order
+ for this to be used, it must be used on methods on an
+ instance, and that instance *must* provide a
+ ``self._cache`` dictionary.
+
+ """
+ def _wrapper(self, *args, **kwargs):
+ key = (func.__name__,) + args
+ for pair in sorted(kwargs.items()):
+ key += pair
+ if key in self._cache:
+ return self._cache[key]
+ data = func(self, *args, **kwargs)
+ self._cache[key] = data
+ return data
+ return _wrapper
+
+
+class JSONFileLoader(object):
+ """Loader JSON files.
+
+ This class can load the default format of models, which is a JSON file.
+
+ """
+ def exists(self, file_path):
+ """Checks if the file exists.
+
+ :type file_path: str
+ :param file_path: The full path to the file to load without
+ the '.json' extension.
+
+ :return: True if file path exists, False otherwise.
+
+ """
+ return os.path.isfile(file_path + '.json')
+
+ def load_file(self, file_path):
+ """Attempt to load the file path.
+
+ :type file_path: str
+ :param file_path: The full path to the file to load without
+ the '.json' extension.
+
+ :return: The loaded data if it exists, otherwise None.
+
+ """
+ full_path = file_path + '.json'
+ if not os.path.isfile(full_path):
+ return
+
+ # By default the file will be opened with locale encoding on Python 3.
+ # We specify "utf8" here to ensure the correct behavior.
+ with open(full_path, 'rb') as fp:
+ payload = fp.read().decode('utf-8')
+ logger.debug("Loading JSON file: %s", full_path)
+ return json.loads(payload, object_pairs_hook=OrderedDict)
+
+
# SQS-119
class HybridJsonLoader(JSONFileLoader):
@@ -224,303 +224,303 @@ class HybridJsonLoader(JSONFileLoader):
return super(HybridJsonLoader, self).load_file(file_path)
-def create_loader(search_path_string=None):
- """Create a Loader class.
-
- This factory function creates a loader given a search string path.
-
- :type search_string_path: str
- :param search_string_path: The AWS_DATA_PATH value. A string
- of data path values separated by the ``os.path.pathsep`` value,
- which is typically ``:`` on POSIX platforms and ``;`` on
- windows.
-
- :return: A ``Loader`` instance.
-
- """
- if search_path_string is None:
- return Loader()
- paths = []
- extra_paths = search_path_string.split(os.pathsep)
- for path in extra_paths:
- path = os.path.expanduser(os.path.expandvars(path))
- paths.append(path)
- return Loader(extra_search_paths=paths)
-
-
-class Loader(object):
- """Find and load data models.
-
- This class will handle searching for and loading data models.
-
- The main method used here is ``load_service_model``, which is a
- convenience method over ``load_data`` and ``determine_latest_version``.
-
- """
+def create_loader(search_path_string=None):
+ """Create a Loader class.
+
+ This factory function creates a loader given a search string path.
+
+ :type search_string_path: str
+ :param search_string_path: The AWS_DATA_PATH value. A string
+ of data path values separated by the ``os.path.pathsep`` value,
+ which is typically ``:`` on POSIX platforms and ``;`` on
+ windows.
+
+ :return: A ``Loader`` instance.
+
+ """
+ if search_path_string is None:
+ return Loader()
+ paths = []
+ extra_paths = search_path_string.split(os.pathsep)
+ for path in extra_paths:
+ path = os.path.expanduser(os.path.expandvars(path))
+ paths.append(path)
+ return Loader(extra_search_paths=paths)
+
+
+class Loader(object):
+ """Find and load data models.
+
+ This class will handle searching for and loading data models.
+
+ The main method used here is ``load_service_model``, which is a
+ convenience method over ``load_data`` and ``determine_latest_version``.
+
+ """
FILE_LOADER_CLASS = HybridJsonLoader
- # The included models in botocore/data/ that we ship with botocore.
- BUILTIN_DATA_PATH = os.path.join(BOTOCORE_ROOT, 'data')
- # For convenience we automatically add ~/.aws/models to the data path.
- CUSTOMER_DATA_PATH = os.path.join(os.path.expanduser('~'),
- '.aws', 'models')
- BUILTIN_EXTRAS_TYPES = ['sdk']
-
- def __init__(self, extra_search_paths=None, file_loader=None,
- cache=None, include_default_search_paths=True,
- include_default_extras=True):
- self._cache = {}
- if file_loader is None:
- file_loader = self.FILE_LOADER_CLASS()
- self.file_loader = file_loader
- if extra_search_paths is not None:
- self._search_paths = extra_search_paths
- else:
- self._search_paths = []
- if include_default_search_paths:
- self._search_paths.extend([self.CUSTOMER_DATA_PATH,
- self.BUILTIN_DATA_PATH])
-
- self._extras_types = []
- if include_default_extras:
- self._extras_types.extend(self.BUILTIN_EXTRAS_TYPES)
-
- self._extras_processor = ExtrasProcessor()
-
- @property
- def search_paths(self):
- return self._search_paths
-
- @property
- def extras_types(self):
- return self._extras_types
-
- @instance_cache
- def list_available_services(self, type_name):
- """List all known services.
-
- This will traverse the search path and look for all known
- services.
-
- :type type_name: str
- :param type_name: The type of the service (service-2,
- paginators-1, waiters-2, etc). This is needed because
- the list of available services depends on the service
- type. For example, the latest API version available for
- a resource-1.json file may not be the latest API version
- available for a services-2.json file.
-
- :return: A list of all services. The list of services will
- be sorted.
-
- """
- services = set()
- for possible_path in self._potential_locations():
- # Any directory in the search path is potentially a service.
- # We'll collect any initial list of potential services,
- # but we'll then need to further process these directories
- # by searching for the corresponding type_name in each
- # potential directory.
- possible_services = [
- d for d in os.listdir(possible_path)
- if os.path.isdir(os.path.join(possible_path, d))]
- for service_name in possible_services:
- full_dirname = os.path.join(possible_path, service_name)
- api_versions = os.listdir(full_dirname)
- for api_version in api_versions:
- full_load_path = os.path.join(full_dirname,
- api_version,
- type_name)
- if self.file_loader.exists(full_load_path):
- services.add(service_name)
- break
+ # The included models in botocore/data/ that we ship with botocore.
+ BUILTIN_DATA_PATH = os.path.join(BOTOCORE_ROOT, 'data')
+ # For convenience we automatically add ~/.aws/models to the data path.
+ CUSTOMER_DATA_PATH = os.path.join(os.path.expanduser('~'),
+ '.aws', 'models')
+ BUILTIN_EXTRAS_TYPES = ['sdk']
+
+ def __init__(self, extra_search_paths=None, file_loader=None,
+ cache=None, include_default_search_paths=True,
+ include_default_extras=True):
+ self._cache = {}
+ if file_loader is None:
+ file_loader = self.FILE_LOADER_CLASS()
+ self.file_loader = file_loader
+ if extra_search_paths is not None:
+ self._search_paths = extra_search_paths
+ else:
+ self._search_paths = []
+ if include_default_search_paths:
+ self._search_paths.extend([self.CUSTOMER_DATA_PATH,
+ self.BUILTIN_DATA_PATH])
+
+ self._extras_types = []
+ if include_default_extras:
+ self._extras_types.extend(self.BUILTIN_EXTRAS_TYPES)
+
+ self._extras_processor = ExtrasProcessor()
+
+ @property
+ def search_paths(self):
+ return self._search_paths
+
+ @property
+ def extras_types(self):
+ return self._extras_types
+
+ @instance_cache
+ def list_available_services(self, type_name):
+ """List all known services.
+
+ This will traverse the search path and look for all known
+ services.
+
+ :type type_name: str
+ :param type_name: The type of the service (service-2,
+ paginators-1, waiters-2, etc). This is needed because
+ the list of available services depends on the service
+ type. For example, the latest API version available for
+ a resource-1.json file may not be the latest API version
+ available for a services-2.json file.
+
+ :return: A list of all services. The list of services will
+ be sorted.
+
+ """
+ services = set()
+ for possible_path in self._potential_locations():
+ # Any directory in the search path is potentially a service.
+ # We'll collect any initial list of potential services,
+ # but we'll then need to further process these directories
+ # by searching for the corresponding type_name in each
+ # potential directory.
+ possible_services = [
+ d for d in os.listdir(possible_path)
+ if os.path.isdir(os.path.join(possible_path, d))]
+ for service_name in possible_services:
+ full_dirname = os.path.join(possible_path, service_name)
+ api_versions = os.listdir(full_dirname)
+ for api_version in api_versions:
+ full_load_path = os.path.join(full_dirname,
+ api_version,
+ type_name)
+ if self.file_loader.exists(full_load_path):
+ services.add(service_name)
+ break
# SQS-119
HybridJsonLoader.collect_service_data()
services = services.union(HybridJsonLoader.type_data_cache[type_name].keys())
- return sorted(services)
-
- @instance_cache
- def determine_latest_version(self, service_name, type_name):
- """Find the latest API version available for a service.
-
- :type service_name: str
- :param service_name: The name of the service.
-
- :type type_name: str
- :param type_name: The type of the service (service-2,
- paginators-1, waiters-2, etc). This is needed because
- the latest API version available can depend on the service
- type. For example, the latest API version available for
- a resource-1.json file may not be the latest API version
- available for a services-2.json file.
-
- :rtype: str
- :return: The latest API version. If the service does not exist
- or does not have any available API data, then a
- ``DataNotFoundError`` exception will be raised.
-
- """
- return max(self.list_api_versions(service_name, type_name))
-
- @instance_cache
- def list_api_versions(self, service_name, type_name):
- """List all API versions available for a particular service type
-
- :type service_name: str
- :param service_name: The name of the service
-
- :type type_name: str
- :param type_name: The type name for the service (i.e service-2,
- paginators-1, etc.)
-
- :rtype: list
- :return: A list of API version strings in sorted order.
-
- """
- known_api_versions = set()
- for possible_path in self._potential_locations(service_name,
- must_exist=True,
- is_dir=True):
- for dirname in os.listdir(possible_path):
- full_path = os.path.join(possible_path, dirname, type_name)
- # Only add to the known_api_versions if the directory
- # contains a service-2, paginators-1, etc. file corresponding
- # to the type_name passed in.
- if self.file_loader.exists(full_path):
- known_api_versions.add(dirname)
+ return sorted(services)
+
+ @instance_cache
+ def determine_latest_version(self, service_name, type_name):
+ """Find the latest API version available for a service.
+
+ :type service_name: str
+ :param service_name: The name of the service.
+
+ :type type_name: str
+ :param type_name: The type of the service (service-2,
+ paginators-1, waiters-2, etc). This is needed because
+ the latest API version available can depend on the service
+ type. For example, the latest API version available for
+ a resource-1.json file may not be the latest API version
+ available for a services-2.json file.
+
+ :rtype: str
+ :return: The latest API version. If the service does not exist
+ or does not have any available API data, then a
+ ``DataNotFoundError`` exception will be raised.
+
+ """
+ return max(self.list_api_versions(service_name, type_name))
+
+ @instance_cache
+ def list_api_versions(self, service_name, type_name):
+ """List all API versions available for a particular service type
+
+ :type service_name: str
+ :param service_name: The name of the service
+
+ :type type_name: str
+ :param type_name: The type name for the service (i.e service-2,
+ paginators-1, etc.)
+
+ :rtype: list
+ :return: A list of API version strings in sorted order.
+
+ """
+ known_api_versions = set()
+ for possible_path in self._potential_locations(service_name,
+ must_exist=True,
+ is_dir=True):
+ for dirname in os.listdir(possible_path):
+ full_path = os.path.join(possible_path, dirname, type_name)
+ # Only add to the known_api_versions if the directory
+ # contains a service-2, paginators-1, etc. file corresponding
+ # to the type_name passed in.
+ if self.file_loader.exists(full_path):
+ known_api_versions.add(dirname)
# SQS-119
HybridJsonLoader.collect_service_data()
known_api_versions = known_api_versions.union(HybridJsonLoader.type_data_cache[type_name][service_name])
- if not known_api_versions:
- raise DataNotFoundError(data_path=service_name)
- return sorted(known_api_versions)
-
- @instance_cache
- def load_service_model(self, service_name, type_name, api_version=None):
- """Load a botocore service model
-
- This is the main method for loading botocore models (e.g. a service
- model, pagination configs, waiter configs, etc.).
-
- :type service_name: str
- :param service_name: The name of the service (e.g ``ec2``, ``s3``).
-
- :type type_name: str
- :param type_name: The model type. Valid types include, but are not
- limited to: ``service-2``, ``paginators-1``, ``waiters-2``.
-
- :type api_version: str
- :param api_version: The API version to load. If this is not
- provided, then the latest API version will be used.
-
- :type load_extras: bool
- :param load_extras: Whether or not to load the tool extras which
- contain additional data to be added to the model.
-
- :raises: UnknownServiceError if there is no known service with
- the provided service_name.
-
- :raises: DataNotFoundError if no data could be found for the
- service_name/type_name/api_version.
-
- :return: The loaded data, as a python type (e.g. dict, list, etc).
- """
- # Wrapper around the load_data. This will calculate the path
- # to call load_data with.
- known_services = self.list_available_services(type_name)
- if service_name not in known_services:
- raise UnknownServiceError(
- service_name=service_name,
- known_service_names=', '.join(sorted(known_services)))
- if api_version is None:
- api_version = self.determine_latest_version(
- service_name, type_name)
- full_path = os.path.join(service_name, api_version, type_name)
- model = self.load_data(full_path)
-
- # Load in all the extras
- extras_data = self._find_extras(service_name, type_name, api_version)
- self._extras_processor.process(model, extras_data)
-
- return model
-
- def _find_extras(self, service_name, type_name, api_version):
- """Creates an iterator over all the extras data."""
- for extras_type in self.extras_types:
- extras_name = '%s.%s-extras' % (type_name, extras_type)
- full_path = os.path.join(service_name, api_version, extras_name)
-
- try:
- yield self.load_data(full_path)
- except DataNotFoundError:
- pass
-
- @instance_cache
- def load_data(self, name):
- """Load data given a data path.
-
- This is a low level method that will search through the various
- search paths until it's able to load a value. This is typically
- only needed to load *non* model files (such as _endpoints and
- _retry). If you need to load model files, you should prefer
- ``load_service_model``.
-
- :type name: str
- :param name: The data path, i.e ``ec2/2015-03-01/service-2``.
-
- :return: The loaded data. If no data could be found then
- a DataNotFoundError is raised.
-
- """
- for possible_path in self._potential_locations(name):
- found = self.file_loader.load_file(possible_path)
- if found is not None:
- return found
+ if not known_api_versions:
+ raise DataNotFoundError(data_path=service_name)
+ return sorted(known_api_versions)
+
+ @instance_cache
+ def load_service_model(self, service_name, type_name, api_version=None):
+ """Load a botocore service model
+
+ This is the main method for loading botocore models (e.g. a service
+ model, pagination configs, waiter configs, etc.).
+
+ :type service_name: str
+ :param service_name: The name of the service (e.g ``ec2``, ``s3``).
+
+ :type type_name: str
+ :param type_name: The model type. Valid types include, but are not
+ limited to: ``service-2``, ``paginators-1``, ``waiters-2``.
+
+ :type api_version: str
+ :param api_version: The API version to load. If this is not
+ provided, then the latest API version will be used.
+
+ :type load_extras: bool
+ :param load_extras: Whether or not to load the tool extras which
+ contain additional data to be added to the model.
+
+ :raises: UnknownServiceError if there is no known service with
+ the provided service_name.
+
+ :raises: DataNotFoundError if no data could be found for the
+ service_name/type_name/api_version.
+
+ :return: The loaded data, as a python type (e.g. dict, list, etc).
+ """
+ # Wrapper around the load_data. This will calculate the path
+ # to call load_data with.
+ known_services = self.list_available_services(type_name)
+ if service_name not in known_services:
+ raise UnknownServiceError(
+ service_name=service_name,
+ known_service_names=', '.join(sorted(known_services)))
+ if api_version is None:
+ api_version = self.determine_latest_version(
+ service_name, type_name)
+ full_path = os.path.join(service_name, api_version, type_name)
+ model = self.load_data(full_path)
+
+ # Load in all the extras
+ extras_data = self._find_extras(service_name, type_name, api_version)
+ self._extras_processor.process(model, extras_data)
+
+ return model
+
+ def _find_extras(self, service_name, type_name, api_version):
+ """Creates an iterator over all the extras data."""
+ for extras_type in self.extras_types:
+ extras_name = '%s.%s-extras' % (type_name, extras_type)
+ full_path = os.path.join(service_name, api_version, extras_name)
+
+ try:
+ yield self.load_data(full_path)
+ except DataNotFoundError:
+ pass
+
+ @instance_cache
+ def load_data(self, name):
+ """Load data given a data path.
+
+ This is a low level method that will search through the various
+ search paths until it's able to load a value. This is typically
+ only needed to load *non* model files (such as _endpoints and
+ _retry). If you need to load model files, you should prefer
+ ``load_service_model``.
+
+ :type name: str
+ :param name: The data path, i.e ``ec2/2015-03-01/service-2``.
+
+ :return: The loaded data. If no data could be found then
+ a DataNotFoundError is raised.
+
+ """
+ for possible_path in self._potential_locations(name):
+ found = self.file_loader.load_file(possible_path)
+ if found is not None:
+ return found
# SQS-119
found_by_arcadia_loader = self.file_loader.load_file(name)
if found_by_arcadia_loader is not None:
return found_by_arcadia_loader
- # We didn't find anything that matched on any path.
- raise DataNotFoundError(data_path=name)
-
- def _potential_locations(self, name=None, must_exist=False,
- is_dir=False):
- # Will give an iterator over the full path of potential locations
- # according to the search path.
- for path in self.search_paths:
- if os.path.isdir(path):
- full_path = path
- if name is not None:
- full_path = os.path.join(path, name)
- if not must_exist:
- yield full_path
- else:
- if is_dir and os.path.isdir(full_path):
- yield full_path
- elif os.path.exists(full_path):
- yield full_path
-
-
-class ExtrasProcessor(object):
- """Processes data from extras files into service models."""
- def process(self, original_model, extra_models):
- """Processes data from a list of loaded extras files into a model
-
- :type original_model: dict
- :param original_model: The service model to load all the extras into.
-
- :type extra_models: iterable of dict
- :param extra_models: A list of loaded extras models.
- """
- for extras in extra_models:
- self._process(original_model, extras)
-
- def _process(self, model, extra_model):
- """Process a single extras model into a service model."""
- if 'merge' in extra_model:
- deep_merge(model, extra_model['merge'])
+ # We didn't find anything that matched on any path.
+ raise DataNotFoundError(data_path=name)
+
+ def _potential_locations(self, name=None, must_exist=False,
+ is_dir=False):
+ # Will give an iterator over the full path of potential locations
+ # according to the search path.
+ for path in self.search_paths:
+ if os.path.isdir(path):
+ full_path = path
+ if name is not None:
+ full_path = os.path.join(path, name)
+ if not must_exist:
+ yield full_path
+ else:
+ if is_dir and os.path.isdir(full_path):
+ yield full_path
+ elif os.path.exists(full_path):
+ yield full_path
+
+
+class ExtrasProcessor(object):
+ """Processes data from extras files into service models."""
+ def process(self, original_model, extra_models):
+ """Processes data from a list of loaded extras files into a model
+
+ :type original_model: dict
+ :param original_model: The service model to load all the extras into.
+
+ :type extra_models: iterable of dict
+ :param extra_models: A list of loaded extras models.
+ """
+ for extras in extra_models:
+ self._process(original_model, extras)
+
+ def _process(self, model, extra_model):
+ """Process a single extras model into a service model."""
+ if 'merge' in extra_model:
+ deep_merge(model, extra_model['merge'])
diff --git a/contrib/python/botocore/botocore/model.py b/contrib/python/botocore/botocore/model.py
index 7b3f41e32a..34b7abb89a 100644
--- a/contrib/python/botocore/botocore/model.py
+++ b/contrib/python/botocore/botocore/model.py
@@ -1,201 +1,201 @@
-# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-"""Abstractions to interact with service models."""
-from collections import defaultdict
-
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+"""Abstractions to interact with service models."""
+from collections import defaultdict
+
from botocore.utils import (CachedProperty, instance_cache,
hyphenize_service_id)
-from botocore.compat import OrderedDict
+from botocore.compat import OrderedDict
from botocore.exceptions import MissingServiceIdError
from botocore.exceptions import UndefinedModelAttributeError
-
-NOT_SET = object()
-
-
-class NoShapeFoundError(Exception):
- pass
-
-
-class InvalidShapeError(Exception):
- pass
-
-
-class OperationNotFoundError(Exception):
- pass
-
-
-class InvalidShapeReferenceError(Exception):
- pass
-
-
+
+NOT_SET = object()
+
+
+class NoShapeFoundError(Exception):
+ pass
+
+
+class InvalidShapeError(Exception):
+ pass
+
+
+class OperationNotFoundError(Exception):
+ pass
+
+
+class InvalidShapeReferenceError(Exception):
+ pass
+
+
class ServiceId(str):
def hyphenize(self):
return hyphenize_service_id(self)
-
-
-class Shape(object):
- """Object representing a shape from the service model."""
- # To simplify serialization logic, all shape params that are
- # related to serialization are moved from the top level hash into
- # a 'serialization' hash. This list below contains the names of all
- # the attributes that should be moved.
- SERIALIZED_ATTRS = ['locationName', 'queryName', 'flattened', 'location',
- 'payload', 'streaming', 'timestampFormat',
- 'xmlNamespace', 'resultWrapper', 'xmlAttribute',
+
+
+class Shape(object):
+ """Object representing a shape from the service model."""
+ # To simplify serialization logic, all shape params that are
+ # related to serialization are moved from the top level hash into
+ # a 'serialization' hash. This list below contains the names of all
+ # the attributes that should be moved.
+ SERIALIZED_ATTRS = ['locationName', 'queryName', 'flattened', 'location',
+ 'payload', 'streaming', 'timestampFormat',
+ 'xmlNamespace', 'resultWrapper', 'xmlAttribute',
'eventstream', 'event', 'eventheader', 'eventpayload',
'jsonvalue', 'timestampFormat', 'hostLabel']
- METADATA_ATTRS = ['required', 'min', 'max', 'sensitive', 'enum',
+ METADATA_ATTRS = ['required', 'min', 'max', 'sensitive', 'enum',
'idempotencyToken', 'error', 'exception',
'endpointdiscoveryid', 'retryable', 'document']
- MAP_TYPE = OrderedDict
-
- def __init__(self, shape_name, shape_model, shape_resolver=None):
- """
-
- :type shape_name: string
- :param shape_name: The name of the shape.
-
- :type shape_model: dict
- :param shape_model: The shape model. This would be the value
- associated with the key in the "shapes" dict of the
- service model (i.e ``model['shapes'][shape_name]``)
-
- :type shape_resolver: botocore.model.ShapeResolver
- :param shape_resolver: A shape resolver object. This is used to
- resolve references to other shapes. For scalar shape types
- (string, integer, boolean, etc.), this argument is not
- required. If a shape_resolver is not provided for a complex
- type, then a ``ValueError`` will be raised when an attempt
- to resolve a shape is made.
-
- """
- self.name = shape_name
- self.type_name = shape_model['type']
- self.documentation = shape_model.get('documentation', '')
- self._shape_model = shape_model
- if shape_resolver is None:
- # If a shape_resolver is not provided, we create an object
- # that will throw errors if you attempt to resolve
- # a shape. This is actually ok for scalar shapes
- # because they don't need to resolve shapes and shouldn't
- # be required to provide an object they won't use.
- shape_resolver = UnresolvableShapeMap()
- self._shape_resolver = shape_resolver
- self._cache = {}
-
- @CachedProperty
- def serialization(self):
- """Serialization information about the shape.
-
- This contains information that may be needed for input serialization
- or response parsing. This can include:
-
- * name
- * queryName
- * flattened
- * location
- * payload
- * streaming
- * xmlNamespace
- * resultWrapper
- * xmlAttribute
- * jsonvalue
+ MAP_TYPE = OrderedDict
+
+ def __init__(self, shape_name, shape_model, shape_resolver=None):
+ """
+
+ :type shape_name: string
+ :param shape_name: The name of the shape.
+
+ :type shape_model: dict
+ :param shape_model: The shape model. This would be the value
+ associated with the key in the "shapes" dict of the
+ service model (i.e ``model['shapes'][shape_name]``)
+
+ :type shape_resolver: botocore.model.ShapeResolver
+ :param shape_resolver: A shape resolver object. This is used to
+ resolve references to other shapes. For scalar shape types
+ (string, integer, boolean, etc.), this argument is not
+ required. If a shape_resolver is not provided for a complex
+ type, then a ``ValueError`` will be raised when an attempt
+ to resolve a shape is made.
+
+ """
+ self.name = shape_name
+ self.type_name = shape_model['type']
+ self.documentation = shape_model.get('documentation', '')
+ self._shape_model = shape_model
+ if shape_resolver is None:
+ # If a shape_resolver is not provided, we create an object
+ # that will throw errors if you attempt to resolve
+ # a shape. This is actually ok for scalar shapes
+ # because they don't need to resolve shapes and shouldn't
+ # be required to provide an object they won't use.
+ shape_resolver = UnresolvableShapeMap()
+ self._shape_resolver = shape_resolver
+ self._cache = {}
+
+ @CachedProperty
+ def serialization(self):
+ """Serialization information about the shape.
+
+ This contains information that may be needed for input serialization
+ or response parsing. This can include:
+
+ * name
+ * queryName
+ * flattened
+ * location
+ * payload
+ * streaming
+ * xmlNamespace
+ * resultWrapper
+ * xmlAttribute
+ * jsonvalue
* timestampFormat
-
- :rtype: dict
- :return: Serialization information about the shape.
-
- """
- model = self._shape_model
- serialization = {}
- for attr in self.SERIALIZED_ATTRS:
- if attr in self._shape_model:
- serialization[attr] = model[attr]
- # For consistency, locationName is renamed to just 'name'.
- if 'locationName' in serialization:
- serialization['name'] = serialization.pop('locationName')
- return serialization
-
- @CachedProperty
- def metadata(self):
- """Metadata about the shape.
-
- This requires optional information about the shape, including:
-
- * min
- * max
- * enum
- * sensitive
- * required
- * idempotencyToken
+
+ :rtype: dict
+ :return: Serialization information about the shape.
+
+ """
+ model = self._shape_model
+ serialization = {}
+ for attr in self.SERIALIZED_ATTRS:
+ if attr in self._shape_model:
+ serialization[attr] = model[attr]
+ # For consistency, locationName is renamed to just 'name'.
+ if 'locationName' in serialization:
+ serialization['name'] = serialization.pop('locationName')
+ return serialization
+
+ @CachedProperty
+ def metadata(self):
+ """Metadata about the shape.
+
+ This requires optional information about the shape, including:
+
+ * min
+ * max
+ * enum
+ * sensitive
+ * required
+ * idempotencyToken
* document
-
- :rtype: dict
- :return: Metadata about the shape.
-
- """
- model = self._shape_model
- metadata = {}
- for attr in self.METADATA_ATTRS:
- if attr in self._shape_model:
- metadata[attr] = model[attr]
- return metadata
-
- @CachedProperty
- def required_members(self):
- """A list of members that are required.
-
- A structure shape can define members that are required.
- This value will return a list of required members. If there
- are no required members an empty list is returned.
-
- """
- return self.metadata.get('required', [])
-
- def _resolve_shape_ref(self, shape_ref):
- return self._shape_resolver.resolve_shape_ref(shape_ref)
-
- def __repr__(self):
- return "<%s(%s)>" % (self.__class__.__name__,
- self.name)
-
+
+ :rtype: dict
+ :return: Metadata about the shape.
+
+ """
+ model = self._shape_model
+ metadata = {}
+ for attr in self.METADATA_ATTRS:
+ if attr in self._shape_model:
+ metadata[attr] = model[attr]
+ return metadata
+
+ @CachedProperty
+ def required_members(self):
+ """A list of members that are required.
+
+ A structure shape can define members that are required.
+ This value will return a list of required members. If there
+ are no required members an empty list is returned.
+
+ """
+ return self.metadata.get('required', [])
+
+ def _resolve_shape_ref(self, shape_ref):
+ return self._shape_resolver.resolve_shape_ref(shape_ref)
+
+ def __repr__(self):
+ return "<%s(%s)>" % (self.__class__.__name__,
+ self.name)
+
@property
def event_stream_name(self):
return None
-
-class StructureShape(Shape):
- @CachedProperty
- def members(self):
+
+class StructureShape(Shape):
+ @CachedProperty
+ def members(self):
members = self._shape_model.get('members', self.MAP_TYPE())
- # The members dict looks like:
- # 'members': {
- # 'MemberName': {'shape': 'shapeName'},
- # 'MemberName2': {'shape': 'shapeName'},
- # }
- # We return a dict of member name to Shape object.
- shape_members = self.MAP_TYPE()
- for name, shape_ref in members.items():
- shape_members[name] = self._resolve_shape_ref(shape_ref)
- return shape_members
-
+ # The members dict looks like:
+ # 'members': {
+ # 'MemberName': {'shape': 'shapeName'},
+ # 'MemberName2': {'shape': 'shapeName'},
+ # }
+ # We return a dict of member name to Shape object.
+ shape_members = self.MAP_TYPE()
+ for name, shape_ref in members.items():
+ shape_members[name] = self._resolve_shape_ref(shape_ref)
+ return shape_members
+
@CachedProperty
def event_stream_name(self):
for member_name, member in self.members.items():
if member.serialization.get('eventstream'):
return member_name
return None
-
+
@CachedProperty
def error_code(self):
if not self.metadata.get('exception', False):
@@ -212,69 +212,69 @@ class StructureShape(Shape):
return self.metadata.get('document', False)
-class ListShape(Shape):
- @CachedProperty
- def member(self):
- return self._resolve_shape_ref(self._shape_model['member'])
-
-
-class MapShape(Shape):
- @CachedProperty
- def key(self):
- return self._resolve_shape_ref(self._shape_model['key'])
-
- @CachedProperty
- def value(self):
- return self._resolve_shape_ref(self._shape_model['value'])
-
-
-class StringShape(Shape):
- @CachedProperty
- def enum(self):
- return self.metadata.get('enum', [])
-
-
-class ServiceModel(object):
- """
-
- :ivar service_description: The parsed service description dictionary.
-
- """
-
- def __init__(self, service_description, service_name=None):
- """
-
- :type service_description: dict
- :param service_description: The service description model. This value
- is obtained from a botocore.loader.Loader, or from directly loading
- the file yourself::
-
- service_description = json.load(
- open('/path/to/service-description-model.json'))
- model = ServiceModel(service_description)
-
- :type service_name: str
- :param service_name: The name of the service. Normally this is
- the endpoint prefix defined in the service_description. However,
- you can override this value to provide a more convenient name.
- This is done in a few places in botocore (ses instead of email,
- emr instead of elasticmapreduce). If this value is not provided,
- it will default to the endpointPrefix defined in the model.
-
- """
- self._service_description = service_description
- # We want clients to be able to access metadata directly.
- self.metadata = service_description.get('metadata', {})
- self._shape_resolver = ShapeResolver(
- service_description.get('shapes', {}))
- self._signature_version = NOT_SET
- self._service_name = service_name
- self._instance_cache = {}
-
- def shape_for(self, shape_name, member_traits=None):
- return self._shape_resolver.get_shape_by_name(
- shape_name, member_traits)
-
+class ListShape(Shape):
+ @CachedProperty
+ def member(self):
+ return self._resolve_shape_ref(self._shape_model['member'])
+
+
+class MapShape(Shape):
+ @CachedProperty
+ def key(self):
+ return self._resolve_shape_ref(self._shape_model['key'])
+
+ @CachedProperty
+ def value(self):
+ return self._resolve_shape_ref(self._shape_model['value'])
+
+
+class StringShape(Shape):
+ @CachedProperty
+ def enum(self):
+ return self.metadata.get('enum', [])
+
+
+class ServiceModel(object):
+ """
+
+ :ivar service_description: The parsed service description dictionary.
+
+ """
+
+ def __init__(self, service_description, service_name=None):
+ """
+
+ :type service_description: dict
+ :param service_description: The service description model. This value
+ is obtained from a botocore.loader.Loader, or from directly loading
+ the file yourself::
+
+ service_description = json.load(
+ open('/path/to/service-description-model.json'))
+ model = ServiceModel(service_description)
+
+ :type service_name: str
+ :param service_name: The name of the service. Normally this is
+ the endpoint prefix defined in the service_description. However,
+ you can override this value to provide a more convenient name.
+ This is done in a few places in botocore (ses instead of email,
+ emr instead of elasticmapreduce). If this value is not provided,
+ it will default to the endpointPrefix defined in the model.
+
+ """
+ self._service_description = service_description
+ # We want clients to be able to access metadata directly.
+ self.metadata = service_description.get('metadata', {})
+ self._shape_resolver = ShapeResolver(
+ service_description.get('shapes', {}))
+ self._signature_version = NOT_SET
+ self._service_name = service_name
+ self._instance_cache = {}
+
+ def shape_for(self, shape_name, member_traits=None):
+ return self._shape_resolver.get_shape_by_name(
+ shape_name, member_traits)
+
def shape_for_error_code(self, error_code):
return self._error_code_cache.get(error_code, None)
@@ -286,13 +286,13 @@ class ServiceModel(object):
error_code_cache[code] = error_shape
return error_code_cache
- def resolve_shape_ref(self, shape_ref):
- return self._shape_resolver.resolve_shape_ref(shape_ref)
-
- @CachedProperty
- def shape_names(self):
- return list(self._service_description.get('shapes', {}))
-
+ def resolve_shape_ref(self, shape_ref):
+ return self._shape_resolver.resolve_shape_ref(shape_ref)
+
+ @CachedProperty
+ def shape_names(self):
+ return list(self._service_description.get('shapes', {}))
+
@CachedProperty
def error_shapes(self):
error_shapes = []
@@ -302,40 +302,40 @@ class ServiceModel(object):
error_shapes.append(error_shape)
return error_shapes
- @instance_cache
- def operation_model(self, operation_name):
- try:
- model = self._service_description['operations'][operation_name]
- except KeyError:
- raise OperationNotFoundError(operation_name)
- return OperationModel(model, self, operation_name)
-
- @CachedProperty
- def documentation(self):
- return self._service_description.get('documentation', '')
-
- @CachedProperty
- def operation_names(self):
- return list(self._service_description.get('operations', []))
-
- @CachedProperty
- def service_name(self):
- """The name of the service.
-
- This defaults to the endpointPrefix defined in the service model.
- However, this value can be overriden when a ``ServiceModel`` is
- created. If a service_name was not provided when the ``ServiceModel``
- was created and if there is no endpointPrefix defined in the
- service model, then an ``UndefinedModelAttributeError`` exception
- will be raised.
-
- """
- if self._service_name is not None:
- return self._service_name
- else:
- return self.endpoint_prefix
-
- @CachedProperty
+ @instance_cache
+ def operation_model(self, operation_name):
+ try:
+ model = self._service_description['operations'][operation_name]
+ except KeyError:
+ raise OperationNotFoundError(operation_name)
+ return OperationModel(model, self, operation_name)
+
+ @CachedProperty
+ def documentation(self):
+ return self._service_description.get('documentation', '')
+
+ @CachedProperty
+ def operation_names(self):
+ return list(self._service_description.get('operations', []))
+
+ @CachedProperty
+ def service_name(self):
+ """The name of the service.
+
+ This defaults to the endpointPrefix defined in the service model.
+ However, this value can be overriden when a ``ServiceModel`` is
+ created. If a service_name was not provided when the ``ServiceModel``
+ was created and if there is no endpointPrefix defined in the
+ service model, then an ``UndefinedModelAttributeError`` exception
+ will be raised.
+
+ """
+ if self._service_name is not None:
+ return self._service_name
+ else:
+ return self.endpoint_prefix
+
+ @CachedProperty
def service_id(self):
try:
return ServiceId(self._get_metadata_property('serviceId'))
@@ -345,29 +345,29 @@ class ServiceModel(object):
)
@CachedProperty
- def signing_name(self):
- """The name to use when computing signatures.
-
- If the model does not define a signing name, this
- value will be the endpoint prefix defined in the model.
- """
- signing_name = self.metadata.get('signingName')
- if signing_name is None:
- signing_name = self.endpoint_prefix
- return signing_name
-
- @CachedProperty
- def api_version(self):
- return self._get_metadata_property('apiVersion')
-
- @CachedProperty
- def protocol(self):
- return self._get_metadata_property('protocol')
-
- @CachedProperty
- def endpoint_prefix(self):
- return self._get_metadata_property('endpointPrefix')
-
+ def signing_name(self):
+ """The name to use when computing signatures.
+
+ If the model does not define a signing name, this
+ value will be the endpoint prefix defined in the model.
+ """
+ signing_name = self.metadata.get('signingName')
+ if signing_name is None:
+ signing_name = self.endpoint_prefix
+ return signing_name
+
+ @CachedProperty
+ def api_version(self):
+ return self._get_metadata_property('apiVersion')
+
+ @CachedProperty
+ def protocol(self):
+ return self._get_metadata_property('protocol')
+
+ @CachedProperty
+ def endpoint_prefix(self):
+ return self._get_metadata_property('endpointPrefix')
+
@CachedProperty
def endpoint_discovery_operation(self):
for operation in self.operation_names:
@@ -384,105 +384,105 @@ class ServiceModel(object):
return True
return False
- def _get_metadata_property(self, name):
- try:
- return self.metadata[name]
- except KeyError:
- raise UndefinedModelAttributeError(
+ def _get_metadata_property(self, name):
+ try:
+ return self.metadata[name]
+ except KeyError:
+ raise UndefinedModelAttributeError(
'"%s" not defined in the metadata of the model: %s' %
- (name, self))
-
- # Signature version is one of the rare properties
- # than can be modified so a CachedProperty is not used here.
-
- @property
- def signature_version(self):
- if self._signature_version is NOT_SET:
- signature_version = self.metadata.get('signatureVersion')
- self._signature_version = signature_version
- return self._signature_version
-
- @signature_version.setter
- def signature_version(self, value):
- self._signature_version = value
-
+ (name, self))
+
+ # Signature version is one of the rare properties
+ # than can be modified so a CachedProperty is not used here.
+
+ @property
+ def signature_version(self):
+ if self._signature_version is NOT_SET:
+ signature_version = self.metadata.get('signatureVersion')
+ self._signature_version = signature_version
+ return self._signature_version
+
+ @signature_version.setter
+ def signature_version(self, value):
+ self._signature_version = value
+
def __repr__(self):
return '%s(%s)' % (self.__class__.__name__, self.service_name)
-
-
-
-class OperationModel(object):
- def __init__(self, operation_model, service_model, name=None):
- """
-
- :type operation_model: dict
- :param operation_model: The operation model. This comes from the
- service model, and is the value associated with the operation
- name in the service model (i.e ``model['operations'][op_name]``).
-
- :type service_model: botocore.model.ServiceModel
- :param service_model: The service model associated with the operation.
-
- :type name: string
- :param name: The operation name. This is the operation name exposed to
- the users of this model. This can potentially be different from
- the "wire_name", which is the operation name that *must* by
- provided over the wire. For example, given::
-
- "CreateCloudFrontOriginAccessIdentity":{
- "name":"CreateCloudFrontOriginAccessIdentity2014_11_06",
- ...
- }
-
- The ``name`` would be ``CreateCloudFrontOriginAccessIdentity``,
- but the ``self.wire_name`` would be
- ``CreateCloudFrontOriginAccessIdentity2014_11_06``, which is the
- value we must send in the corresponding HTTP request.
-
- """
- self._operation_model = operation_model
- self._service_model = service_model
- self._api_name = name
- # Clients can access '.name' to get the operation name
- # and '.metadata' to get the top level metdata of the service.
- self._wire_name = operation_model.get('name')
- self.metadata = service_model.metadata
- self.http = operation_model.get('http', {})
-
- @CachedProperty
- def name(self):
- if self._api_name is not None:
- return self._api_name
- else:
- return self.wire_name
-
- @property
- def wire_name(self):
- """The wire name of the operation.
-
- In many situations this is the same value as the
- ``name``, value, but in some services, the operation name
- exposed to the user is different from the operaiton name
- we send across the wire (e.g cloudfront).
-
- Any serialization code should use ``wire_name``.
-
- """
- return self._operation_model.get('name')
-
- @property
- def service_model(self):
- return self._service_model
-
- @CachedProperty
- def documentation(self):
- return self._operation_model.get('documentation', '')
-
- @CachedProperty
- def deprecated(self):
- return self._operation_model.get('deprecated', False)
-
- @CachedProperty
+
+
+
+class OperationModel(object):
+ def __init__(self, operation_model, service_model, name=None):
+ """
+
+ :type operation_model: dict
+ :param operation_model: The operation model. This comes from the
+ service model, and is the value associated with the operation
+ name in the service model (i.e ``model['operations'][op_name]``).
+
+ :type service_model: botocore.model.ServiceModel
+ :param service_model: The service model associated with the operation.
+
+ :type name: string
+ :param name: The operation name. This is the operation name exposed to
+ the users of this model. This can potentially be different from
+ the "wire_name", which is the operation name that *must* by
+ provided over the wire. For example, given::
+
+ "CreateCloudFrontOriginAccessIdentity":{
+ "name":"CreateCloudFrontOriginAccessIdentity2014_11_06",
+ ...
+ }
+
+ The ``name`` would be ``CreateCloudFrontOriginAccessIdentity``,
+ but the ``self.wire_name`` would be
+ ``CreateCloudFrontOriginAccessIdentity2014_11_06``, which is the
+ value we must send in the corresponding HTTP request.
+
+ """
+ self._operation_model = operation_model
+ self._service_model = service_model
+ self._api_name = name
+ # Clients can access '.name' to get the operation name
+ # and '.metadata' to get the top level metdata of the service.
+ self._wire_name = operation_model.get('name')
+ self.metadata = service_model.metadata
+ self.http = operation_model.get('http', {})
+
+ @CachedProperty
+ def name(self):
+ if self._api_name is not None:
+ return self._api_name
+ else:
+ return self.wire_name
+
+ @property
+ def wire_name(self):
+ """The wire name of the operation.
+
+ In many situations this is the same value as the
+ ``name``, value, but in some services, the operation name
+ exposed to the user is different from the operaiton name
+ we send across the wire (e.g cloudfront).
+
+ Any serialization code should use ``wire_name``.
+
+ """
+ return self._operation_model.get('name')
+
+ @property
+ def service_model(self):
+ return self._service_model
+
+ @CachedProperty
+ def documentation(self):
+ return self._operation_model.get('documentation', '')
+
+ @CachedProperty
+ def deprecated(self):
+ return self._operation_model.get('deprecated', False)
+
+ @CachedProperty
def endpoint_discovery(self):
# Explicit None default. An empty dictionary for this trait means it is
# enabled but not required to be used.
@@ -493,44 +493,44 @@ class OperationModel(object):
return self._operation_model.get('endpointoperation', False)
@CachedProperty
- def input_shape(self):
- if 'input' not in self._operation_model:
- # Some operations do not accept any input and do not define an
- # input shape.
- return None
- return self._service_model.resolve_shape_ref(
- self._operation_model['input'])
-
- @CachedProperty
- def output_shape(self):
- if 'output' not in self._operation_model:
- # Some operations do not define an output shape,
- # in which case we return None to indicate the
- # operation has no expected output.
- return None
- return self._service_model.resolve_shape_ref(
- self._operation_model['output'])
-
- @CachedProperty
- def idempotent_members(self):
- input_shape = self.input_shape
- if not input_shape:
- return []
-
- return [name for (name, shape) in input_shape.members.items()
- if 'idempotencyToken' in shape.metadata and
- shape.metadata['idempotencyToken']]
-
- @CachedProperty
- def auth_type(self):
- return self._operation_model.get('authtype')
-
- @CachedProperty
- def error_shapes(self):
- shapes = self._operation_model.get("errors", [])
- return list(self._service_model.resolve_shape_ref(s) for s in shapes)
-
- @CachedProperty
+ def input_shape(self):
+ if 'input' not in self._operation_model:
+ # Some operations do not accept any input and do not define an
+ # input shape.
+ return None
+ return self._service_model.resolve_shape_ref(
+ self._operation_model['input'])
+
+ @CachedProperty
+ def output_shape(self):
+ if 'output' not in self._operation_model:
+ # Some operations do not define an output shape,
+ # in which case we return None to indicate the
+ # operation has no expected output.
+ return None
+ return self._service_model.resolve_shape_ref(
+ self._operation_model['output'])
+
+ @CachedProperty
+ def idempotent_members(self):
+ input_shape = self.input_shape
+ if not input_shape:
+ return []
+
+ return [name for (name, shape) in input_shape.members.items()
+ if 'idempotencyToken' in shape.metadata and
+ shape.metadata['idempotencyToken']]
+
+ @CachedProperty
+ def auth_type(self):
+ return self._operation_model.get('authtype')
+
+ @CachedProperty
+ def error_shapes(self):
+ shapes = self._operation_model.get("errors", [])
+ return list(self._service_model.resolve_shape_ref(s) for s in shapes)
+
+ @CachedProperty
def endpoint(self):
return self._operation_model.get('endpoint')
@@ -562,269 +562,269 @@ class OperationModel(object):
return None
@CachedProperty
- def has_streaming_input(self):
- return self.get_streaming_input() is not None
-
- @CachedProperty
- def has_streaming_output(self):
- return self.get_streaming_output() is not None
-
- def get_streaming_input(self):
- return self._get_streaming_body(self.input_shape)
-
- def get_streaming_output(self):
- return self._get_streaming_body(self.output_shape)
-
- def _get_streaming_body(self, shape):
- """Returns the streaming member's shape if any; or None otherwise."""
- if shape is None:
- return None
- payload = shape.serialization.get('payload')
- if payload is not None:
- payload_shape = shape.members[payload]
- if payload_shape.type_name == 'blob':
- return payload_shape
- return None
-
- def __repr__(self):
- return '%s(name=%s)' % (self.__class__.__name__, self.name)
-
-
-class ShapeResolver(object):
- """Resolves shape references."""
-
- # Any type not in this mapping will default to the Shape class.
- SHAPE_CLASSES = {
- 'structure': StructureShape,
- 'list': ListShape,
- 'map': MapShape,
- 'string': StringShape
- }
-
- def __init__(self, shape_map):
- self._shape_map = shape_map
- self._shape_cache = {}
-
- def get_shape_by_name(self, shape_name, member_traits=None):
- try:
- shape_model = self._shape_map[shape_name]
- except KeyError:
- raise NoShapeFoundError(shape_name)
- try:
- shape_cls = self.SHAPE_CLASSES.get(shape_model['type'], Shape)
- except KeyError:
- raise InvalidShapeError("Shape is missing required key 'type': %s"
- % shape_model)
- if member_traits:
- shape_model = shape_model.copy()
- shape_model.update(member_traits)
- result = shape_cls(shape_name, shape_model, self)
- return result
-
- def resolve_shape_ref(self, shape_ref):
- # A shape_ref is a dict that has a 'shape' key that
- # refers to a shape name as well as any additional
- # member traits that are then merged over the shape
- # definition. For example:
- # {"shape": "StringType", "locationName": "Foobar"}
- if len(shape_ref) == 1 and 'shape' in shape_ref:
- # It's just a shape ref with no member traits, we can avoid
- # a .copy(). This is the common case so it's specifically
- # called out here.
- return self.get_shape_by_name(shape_ref['shape'])
- else:
- member_traits = shape_ref.copy()
- try:
- shape_name = member_traits.pop('shape')
- except KeyError:
- raise InvalidShapeReferenceError(
- "Invalid model, missing shape reference: %s" % shape_ref)
- return self.get_shape_by_name(shape_name, member_traits)
-
-
-class UnresolvableShapeMap(object):
- """A ShapeResolver that will throw ValueErrors when shapes are resolved.
- """
- def get_shape_by_name(self, shape_name, member_traits=None):
- raise ValueError("Attempted to lookup shape '%s', but no shape "
- "map was provided.")
-
- def resolve_shape_ref(self, shape_ref):
- raise ValueError("Attempted to resolve shape '%s', but no shape "
- "map was provided.")
-
-
-class DenormalizedStructureBuilder(object):
- """Build a StructureShape from a denormalized model.
-
- This is a convenience builder class that makes it easy to construct
- ``StructureShape``s based on a denormalized model.
-
- It will handle the details of creating unique shape names and creating
- the appropriate shape map needed by the ``StructureShape`` class.
-
- Example usage::
-
- builder = DenormalizedStructureBuilder()
- shape = builder.with_members({
- 'A': {
- 'type': 'structure',
- 'members': {
- 'B': {
- 'type': 'structure',
- 'members': {
- 'C': {
- 'type': 'string',
- }
- }
- }
- }
- }
- }).build_model()
- # ``shape`` is now an instance of botocore.model.StructureShape
-
- :type dict_type: class
- :param dict_type: The dictionary type to use, allowing you to opt-in
- to using OrderedDict or another dict type. This can
- be particularly useful for testing when order
- matters, such as for documentation.
-
- """
- def __init__(self, name=None):
- self.members = OrderedDict()
- self._name_generator = ShapeNameGenerator()
- if name is None:
- self.name = self._name_generator.new_shape_name('structure')
-
- def with_members(self, members):
- """
-
- :type members: dict
- :param members: The denormalized members.
-
- :return: self
-
- """
- self._members = members
- return self
-
- def build_model(self):
- """Build the model based on the provided members.
-
- :rtype: botocore.model.StructureShape
- :return: The built StructureShape object.
-
- """
- shapes = OrderedDict()
- denormalized = {
- 'type': 'structure',
- 'members': self._members,
- }
- self._build_model(denormalized, shapes, self.name)
- resolver = ShapeResolver(shape_map=shapes)
- return StructureShape(shape_name=self.name,
- shape_model=shapes[self.name],
- shape_resolver=resolver)
-
- def _build_model(self, model, shapes, shape_name):
- if model['type'] == 'structure':
- shapes[shape_name] = self._build_structure(model, shapes)
- elif model['type'] == 'list':
- shapes[shape_name] = self._build_list(model, shapes)
- elif model['type'] == 'map':
- shapes[shape_name] = self._build_map(model, shapes)
- elif model['type'] in ['string', 'integer', 'boolean', 'blob', 'float',
- 'timestamp', 'long', 'double', 'char']:
- shapes[shape_name] = self._build_scalar(model)
- else:
- raise InvalidShapeError("Unknown shape type: %s" % model['type'])
-
- def _build_structure(self, model, shapes):
- members = OrderedDict()
- shape = self._build_initial_shape(model)
- shape['members'] = members
-
+ def has_streaming_input(self):
+ return self.get_streaming_input() is not None
+
+ @CachedProperty
+ def has_streaming_output(self):
+ return self.get_streaming_output() is not None
+
+ def get_streaming_input(self):
+ return self._get_streaming_body(self.input_shape)
+
+ def get_streaming_output(self):
+ return self._get_streaming_body(self.output_shape)
+
+ def _get_streaming_body(self, shape):
+ """Returns the streaming member's shape if any; or None otherwise."""
+ if shape is None:
+ return None
+ payload = shape.serialization.get('payload')
+ if payload is not None:
+ payload_shape = shape.members[payload]
+ if payload_shape.type_name == 'blob':
+ return payload_shape
+ return None
+
+ def __repr__(self):
+ return '%s(name=%s)' % (self.__class__.__name__, self.name)
+
+
+class ShapeResolver(object):
+ """Resolves shape references."""
+
+ # Any type not in this mapping will default to the Shape class.
+ SHAPE_CLASSES = {
+ 'structure': StructureShape,
+ 'list': ListShape,
+ 'map': MapShape,
+ 'string': StringShape
+ }
+
+ def __init__(self, shape_map):
+ self._shape_map = shape_map
+ self._shape_cache = {}
+
+ def get_shape_by_name(self, shape_name, member_traits=None):
+ try:
+ shape_model = self._shape_map[shape_name]
+ except KeyError:
+ raise NoShapeFoundError(shape_name)
+ try:
+ shape_cls = self.SHAPE_CLASSES.get(shape_model['type'], Shape)
+ except KeyError:
+ raise InvalidShapeError("Shape is missing required key 'type': %s"
+ % shape_model)
+ if member_traits:
+ shape_model = shape_model.copy()
+ shape_model.update(member_traits)
+ result = shape_cls(shape_name, shape_model, self)
+ return result
+
+ def resolve_shape_ref(self, shape_ref):
+ # A shape_ref is a dict that has a 'shape' key that
+ # refers to a shape name as well as any additional
+ # member traits that are then merged over the shape
+ # definition. For example:
+ # {"shape": "StringType", "locationName": "Foobar"}
+ if len(shape_ref) == 1 and 'shape' in shape_ref:
+ # It's just a shape ref with no member traits, we can avoid
+ # a .copy(). This is the common case so it's specifically
+ # called out here.
+ return self.get_shape_by_name(shape_ref['shape'])
+ else:
+ member_traits = shape_ref.copy()
+ try:
+ shape_name = member_traits.pop('shape')
+ except KeyError:
+ raise InvalidShapeReferenceError(
+ "Invalid model, missing shape reference: %s" % shape_ref)
+ return self.get_shape_by_name(shape_name, member_traits)
+
+
+class UnresolvableShapeMap(object):
+ """A ShapeResolver that will throw ValueErrors when shapes are resolved.
+ """
+ def get_shape_by_name(self, shape_name, member_traits=None):
+ raise ValueError("Attempted to lookup shape '%s', but no shape "
+ "map was provided.")
+
+ def resolve_shape_ref(self, shape_ref):
+ raise ValueError("Attempted to resolve shape '%s', but no shape "
+ "map was provided.")
+
+
+class DenormalizedStructureBuilder(object):
+ """Build a StructureShape from a denormalized model.
+
+ This is a convenience builder class that makes it easy to construct
+ ``StructureShape``s based on a denormalized model.
+
+ It will handle the details of creating unique shape names and creating
+ the appropriate shape map needed by the ``StructureShape`` class.
+
+ Example usage::
+
+ builder = DenormalizedStructureBuilder()
+ shape = builder.with_members({
+ 'A': {
+ 'type': 'structure',
+ 'members': {
+ 'B': {
+ 'type': 'structure',
+ 'members': {
+ 'C': {
+ 'type': 'string',
+ }
+ }
+ }
+ }
+ }
+ }).build_model()
+ # ``shape`` is now an instance of botocore.model.StructureShape
+
+ :type dict_type: class
+ :param dict_type: The dictionary type to use, allowing you to opt-in
+ to using OrderedDict or another dict type. This can
+ be particularly useful for testing when order
+ matters, such as for documentation.
+
+ """
+ def __init__(self, name=None):
+ self.members = OrderedDict()
+ self._name_generator = ShapeNameGenerator()
+ if name is None:
+ self.name = self._name_generator.new_shape_name('structure')
+
+ def with_members(self, members):
+ """
+
+ :type members: dict
+ :param members: The denormalized members.
+
+ :return: self
+
+ """
+ self._members = members
+ return self
+
+ def build_model(self):
+ """Build the model based on the provided members.
+
+ :rtype: botocore.model.StructureShape
+ :return: The built StructureShape object.
+
+ """
+ shapes = OrderedDict()
+ denormalized = {
+ 'type': 'structure',
+ 'members': self._members,
+ }
+ self._build_model(denormalized, shapes, self.name)
+ resolver = ShapeResolver(shape_map=shapes)
+ return StructureShape(shape_name=self.name,
+ shape_model=shapes[self.name],
+ shape_resolver=resolver)
+
+ def _build_model(self, model, shapes, shape_name):
+ if model['type'] == 'structure':
+ shapes[shape_name] = self._build_structure(model, shapes)
+ elif model['type'] == 'list':
+ shapes[shape_name] = self._build_list(model, shapes)
+ elif model['type'] == 'map':
+ shapes[shape_name] = self._build_map(model, shapes)
+ elif model['type'] in ['string', 'integer', 'boolean', 'blob', 'float',
+ 'timestamp', 'long', 'double', 'char']:
+ shapes[shape_name] = self._build_scalar(model)
+ else:
+ raise InvalidShapeError("Unknown shape type: %s" % model['type'])
+
+ def _build_structure(self, model, shapes):
+ members = OrderedDict()
+ shape = self._build_initial_shape(model)
+ shape['members'] = members
+
for name, member_model in model.get('members', OrderedDict()).items():
- member_shape_name = self._get_shape_name(member_model)
- members[name] = {'shape': member_shape_name}
- self._build_model(member_model, shapes, member_shape_name)
- return shape
-
- def _build_list(self, model, shapes):
- member_shape_name = self._get_shape_name(model)
- shape = self._build_initial_shape(model)
- shape['member'] = {'shape': member_shape_name}
- self._build_model(model['member'], shapes, member_shape_name)
- return shape
-
- def _build_map(self, model, shapes):
- key_shape_name = self._get_shape_name(model['key'])
- value_shape_name = self._get_shape_name(model['value'])
- shape = self._build_initial_shape(model)
- shape['key'] = {'shape': key_shape_name}
- shape['value'] = {'shape': value_shape_name}
- self._build_model(model['key'], shapes, key_shape_name)
- self._build_model(model['value'], shapes, value_shape_name)
- return shape
-
- def _build_initial_shape(self, model):
- shape = {
- 'type': model['type'],
- }
- if 'documentation' in model:
- shape['documentation'] = model['documentation']
+ member_shape_name = self._get_shape_name(member_model)
+ members[name] = {'shape': member_shape_name}
+ self._build_model(member_model, shapes, member_shape_name)
+ return shape
+
+ def _build_list(self, model, shapes):
+ member_shape_name = self._get_shape_name(model)
+ shape = self._build_initial_shape(model)
+ shape['member'] = {'shape': member_shape_name}
+ self._build_model(model['member'], shapes, member_shape_name)
+ return shape
+
+ def _build_map(self, model, shapes):
+ key_shape_name = self._get_shape_name(model['key'])
+ value_shape_name = self._get_shape_name(model['value'])
+ shape = self._build_initial_shape(model)
+ shape['key'] = {'shape': key_shape_name}
+ shape['value'] = {'shape': value_shape_name}
+ self._build_model(model['key'], shapes, key_shape_name)
+ self._build_model(model['value'], shapes, value_shape_name)
+ return shape
+
+ def _build_initial_shape(self, model):
+ shape = {
+ 'type': model['type'],
+ }
+ if 'documentation' in model:
+ shape['documentation'] = model['documentation']
for attr in Shape.METADATA_ATTRS:
if attr in model:
shape[attr] = model[attr]
- return shape
-
- def _build_scalar(self, model):
- return self._build_initial_shape(model)
-
- def _get_shape_name(self, model):
- if 'shape_name' in model:
- return model['shape_name']
- else:
- return self._name_generator.new_shape_name(model['type'])
-
-
-class ShapeNameGenerator(object):
- """Generate unique shape names for a type.
-
- This class can be used in conjunction with the DenormalizedStructureBuilder
- to generate unique shape names for a given type.
-
- """
- def __init__(self):
- self._name_cache = defaultdict(int)
-
- def new_shape_name(self, type_name):
- """Generate a unique shape name.
-
- This method will guarantee a unique shape name each time it is
- called with the same type.
-
- ::
-
- >>> s = ShapeNameGenerator()
- >>> s.new_shape_name('structure')
- 'StructureType1'
- >>> s.new_shape_name('structure')
- 'StructureType2'
- >>> s.new_shape_name('list')
- 'ListType1'
- >>> s.new_shape_name('list')
- 'ListType2'
-
-
- :type type_name: string
- :param type_name: The type name (structure, list, map, string, etc.)
-
- :rtype: string
- :return: A unique shape name for the given type
-
- """
- self._name_cache[type_name] += 1
- current_index = self._name_cache[type_name]
- return '%sType%s' % (type_name.capitalize(),
- current_index)
+ return shape
+
+ def _build_scalar(self, model):
+ return self._build_initial_shape(model)
+
+ def _get_shape_name(self, model):
+ if 'shape_name' in model:
+ return model['shape_name']
+ else:
+ return self._name_generator.new_shape_name(model['type'])
+
+
+class ShapeNameGenerator(object):
+ """Generate unique shape names for a type.
+
+ This class can be used in conjunction with the DenormalizedStructureBuilder
+ to generate unique shape names for a given type.
+
+ """
+ def __init__(self):
+ self._name_cache = defaultdict(int)
+
+ def new_shape_name(self, type_name):
+ """Generate a unique shape name.
+
+ This method will guarantee a unique shape name each time it is
+ called with the same type.
+
+ ::
+
+ >>> s = ShapeNameGenerator()
+ >>> s.new_shape_name('structure')
+ 'StructureType1'
+ >>> s.new_shape_name('structure')
+ 'StructureType2'
+ >>> s.new_shape_name('list')
+ 'ListType1'
+ >>> s.new_shape_name('list')
+ 'ListType2'
+
+
+ :type type_name: string
+ :param type_name: The type name (structure, list, map, string, etc.)
+
+ :rtype: string
+ :return: A unique shape name for the given type
+
+ """
+ self._name_cache[type_name] += 1
+ current_index = self._name_cache[type_name]
+ return '%sType%s' % (type_name.capitalize(),
+ current_index)
diff --git a/contrib/python/botocore/botocore/paginate.py b/contrib/python/botocore/botocore/paginate.py
index 5275d1a93f..b08c7ed8b7 100644
--- a/contrib/python/botocore/botocore/paginate.py
+++ b/contrib/python/botocore/botocore/paginate.py
@@ -1,677 +1,677 @@
-# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-
-from itertools import tee
-
-from botocore.compat import six
-
-import jmespath
-import json
-import base64
-import logging
-from botocore.exceptions import PaginationError
-from botocore.compat import zip
-from botocore.utils import set_value_from_jmespath, merge_dicts
-
-
-log = logging.getLogger(__name__)
-
-
-class TokenEncoder(object):
- """Encodes dictionaries into opaque strings.
-
- This for the most part json dumps + base64 encoding, but also supports
- having bytes in the dictionary in addition to the types that json can
- handle by default.
-
- This is intended for use in encoding pagination tokens, which in some
- cases can be complex structures and / or contain bytes.
- """
-
- def encode(self, token):
- """Encodes a dictionary to an opaque string.
-
- :type token: dict
- :param token: A dictionary containing pagination information,
- particularly the service pagination token(s) but also other boto
- metadata.
-
- :rtype: str
- :returns: An opaque string
- """
- try:
- # Try just using json dumps first to avoid having to traverse
- # and encode the dict. In 99.9999% of cases this will work.
- json_string = json.dumps(token)
- except (TypeError, UnicodeDecodeError):
- # If normal dumping failed, go through and base64 encode all bytes.
- encoded_token, encoded_keys = self._encode(token, [])
-
- # Save the list of all the encoded key paths. We can safely
- # assume that no service will ever use this key.
- encoded_token['boto_encoded_keys'] = encoded_keys
-
- # Now that the bytes are all encoded, dump the json.
- json_string = json.dumps(encoded_token)
-
- # base64 encode the json string to produce an opaque token string.
- return base64.b64encode(json_string.encode('utf-8')).decode('utf-8')
-
- def _encode(self, data, path):
- """Encode bytes in given data, keeping track of the path traversed."""
- if isinstance(data, dict):
- return self._encode_dict(data, path)
- elif isinstance(data, list):
- return self._encode_list(data, path)
- elif isinstance(data, six.binary_type):
- return self._encode_bytes(data, path)
- else:
- return data, []
-
- def _encode_list(self, data, path):
- """Encode any bytes in a list, noting the index of what is encoded."""
- new_data = []
- encoded = []
- for i, value in enumerate(data):
- new_path = path + [i]
- new_value, new_encoded = self._encode(value, new_path)
- new_data.append(new_value)
- encoded.extend(new_encoded)
- return new_data, encoded
-
- def _encode_dict(self, data, path):
- """Encode any bytes in a dict, noting the index of what is encoded."""
- new_data = {}
- encoded = []
- for key, value in data.items():
- new_path = path + [key]
- new_value, new_encoded = self._encode(value, new_path)
- new_data[key] = new_value
- encoded.extend(new_encoded)
- return new_data, encoded
-
- def _encode_bytes(self, data, path):
- """Base64 encode a byte string."""
- return base64.b64encode(data).decode('utf-8'), [path]
-
-
-class TokenDecoder(object):
- """Decodes token strings back into dictionaries.
-
- This performs the inverse operation to the TokenEncoder, accepting
- opaque strings and decoding them into a useable form.
- """
-
- def decode(self, token):
- """Decodes an opaque string to a dictionary.
-
- :type token: str
- :param token: A token string given by the botocore pagination
- interface.
-
- :rtype: dict
- :returns: A dictionary containing pagination information,
- particularly the service pagination token(s) but also other boto
- metadata.
- """
- json_string = base64.b64decode(token.encode('utf-8')).decode('utf-8')
- decoded_token = json.loads(json_string)
-
- # Remove the encoding metadata as it is read since it will no longer
- # be needed.
- encoded_keys = decoded_token.pop('boto_encoded_keys', None)
- if encoded_keys is None:
- return decoded_token
- else:
- return self._decode(decoded_token, encoded_keys)
-
- def _decode(self, token, encoded_keys):
- """Find each encoded value and decode it."""
- for key in encoded_keys:
- encoded = self._path_get(token, key)
- decoded = base64.b64decode(encoded.encode('utf-8'))
- self._path_set(token, key, decoded)
- return token
-
- def _path_get(self, data, path):
- """Return the nested data at the given path.
-
- For instance:
- data = {'foo': ['bar', 'baz']}
- path = ['foo', 0]
- ==> 'bar'
- """
- # jmespath isn't used here because it would be difficult to actually
- # create the jmespath query when taking all of the unknowns of key
- # structure into account. Gross though this is, it is simple and not
- # very error prone.
- d = data
- for step in path:
- d = d[step]
- return d
-
- def _path_set(self, data, path, value):
- """Set the value of a key in the given data.
-
- Example:
- data = {'foo': ['bar', 'baz']}
- path = ['foo', 1]
- value = 'bin'
- ==> data = {'foo': ['bar', 'bin']}
- """
- container = self._path_get(data, path[:-1])
- container[path[-1]] = value
-
-
-class PaginatorModel(object):
- def __init__(self, paginator_config):
- self._paginator_config = paginator_config['pagination']
-
- def get_paginator(self, operation_name):
- try:
- single_paginator_config = self._paginator_config[operation_name]
- except KeyError:
- raise ValueError("Paginator for operation does not exist: %s"
- % operation_name)
- return single_paginator_config
-
-
-class PageIterator(object):
- def __init__(self, method, input_token, output_token, more_results,
- result_keys, non_aggregate_keys, limit_key, max_items,
- starting_token, page_size, op_kwargs):
- self._method = method
- self._input_token = input_token
- self._output_token = output_token
- self._more_results = more_results
- self._result_keys = result_keys
- self._max_items = max_items
- self._limit_key = limit_key
- self._starting_token = starting_token
- self._page_size = page_size
- self._op_kwargs = op_kwargs
- self._resume_token = None
- self._non_aggregate_key_exprs = non_aggregate_keys
- self._non_aggregate_part = {}
- self._token_encoder = TokenEncoder()
- self._token_decoder = TokenDecoder()
-
- @property
- def result_keys(self):
- return self._result_keys
-
- @property
- def resume_token(self):
- """Token to specify to resume pagination."""
- return self._resume_token
-
- @resume_token.setter
- def resume_token(self, value):
- if not isinstance(value, dict):
- raise ValueError("Bad starting token: %s" % value)
-
- if 'boto_truncate_amount' in value:
- token_keys = sorted(self._input_token + ['boto_truncate_amount'])
- else:
- token_keys = sorted(self._input_token)
- dict_keys = sorted(value.keys())
-
- if token_keys == dict_keys:
- self._resume_token = self._token_encoder.encode(value)
- else:
- raise ValueError("Bad starting token: %s" % value)
-
- @property
- def non_aggregate_part(self):
- return self._non_aggregate_part
-
- def __iter__(self):
- current_kwargs = self._op_kwargs
- previous_next_token = None
- next_token = dict((key, None) for key in self._input_token)
- if self._starting_token is not None:
- # If the starting token exists, populate the next_token with the
- # values inside it. This ensures that we have the service's
- # pagination token on hand if we need to truncate after the
- # first response.
- next_token = self._parse_starting_token()[0]
- # The number of items from result_key we've seen so far.
- total_items = 0
- first_request = True
- primary_result_key = self.result_keys[0]
- starting_truncation = 0
- self._inject_starting_params(current_kwargs)
- while True:
- response = self._make_request(current_kwargs)
- parsed = self._extract_parsed_response(response)
- if first_request:
- # The first request is handled differently. We could
- # possibly have a resume/starting token that tells us where
- # to index into the retrieved page.
- if self._starting_token is not None:
- starting_truncation = self._handle_first_request(
- parsed, primary_result_key, starting_truncation)
- first_request = False
- self._record_non_aggregate_key_values(parsed)
- else:
- # If this isn't the first request, we have already sliced into
- # the first request and had to make additional requests after.
- # We no longer need to add this to truncation.
- starting_truncation = 0
- current_response = primary_result_key.search(parsed)
- if current_response is None:
- current_response = []
- num_current_response = len(current_response)
- truncate_amount = 0
- if self._max_items is not None:
- truncate_amount = (total_items + num_current_response) \
- - self._max_items
- if truncate_amount > 0:
- self._truncate_response(parsed, primary_result_key,
- truncate_amount, starting_truncation,
- next_token)
- yield response
- break
- else:
- yield response
- total_items += num_current_response
- next_token = self._get_next_token(parsed)
- if all(t is None for t in next_token.values()):
- break
- if self._max_items is not None and \
- total_items == self._max_items:
- # We're on a page boundary so we can set the current
- # next token to be the resume token.
- self.resume_token = next_token
- break
- if previous_next_token is not None and \
- previous_next_token == next_token:
- message = ("The same next token was received "
- "twice: %s" % next_token)
- raise PaginationError(message=message)
- self._inject_token_into_kwargs(current_kwargs, next_token)
- previous_next_token = next_token
-
- def search(self, expression):
- """Applies a JMESPath expression to a paginator
-
- Each page of results is searched using the provided JMESPath
- expression. If the result is not a list, it is yielded
- directly. If the result is a list, each element in the result
- is yielded individually (essentially implementing a flatmap in
- which the JMESPath search is the mapping function).
-
- :type expression: str
- :param expression: JMESPath expression to apply to each page.
-
- :return: Returns an iterator that yields the individual
- elements of applying a JMESPath expression to each page of
- results.
- """
- compiled = jmespath.compile(expression)
- for page in self:
- results = compiled.search(page)
- if isinstance(results, list):
- for element in results:
- yield element
- else:
- # Yield result directly if it is not a list.
- yield results
-
- def _make_request(self, current_kwargs):
- return self._method(**current_kwargs)
-
- def _extract_parsed_response(self, response):
- return response
-
- def _record_non_aggregate_key_values(self, response):
- non_aggregate_keys = {}
- for expression in self._non_aggregate_key_exprs:
- result = expression.search(response)
- set_value_from_jmespath(non_aggregate_keys,
- expression.expression,
- result)
- self._non_aggregate_part = non_aggregate_keys
-
- def _inject_starting_params(self, op_kwargs):
- # If the user has specified a starting token we need to
- # inject that into the operation's kwargs.
- if self._starting_token is not None:
- # Don't need to do anything special if there is no starting
- # token specified.
- next_token = self._parse_starting_token()[0]
- self._inject_token_into_kwargs(op_kwargs, next_token)
- if self._page_size is not None:
- # Pass the page size as the parameter name for limiting
- # page size, also known as the limit_key.
- op_kwargs[self._limit_key] = self._page_size
-
- def _inject_token_into_kwargs(self, op_kwargs, next_token):
- for name, token in next_token.items():
- if (token is not None) and (token != 'None'):
- op_kwargs[name] = token
- elif name in op_kwargs:
- del op_kwargs[name]
-
- def _handle_first_request(self, parsed, primary_result_key,
- starting_truncation):
- # If the payload is an array or string, we need to slice into it
- # and only return the truncated amount.
- starting_truncation = self._parse_starting_token()[1]
- all_data = primary_result_key.search(parsed)
- if isinstance(all_data, (list, six.string_types)):
- data = all_data[starting_truncation:]
- else:
- data = None
- set_value_from_jmespath(
- parsed,
- primary_result_key.expression,
- data
- )
- # We also need to truncate any secondary result keys
- # because they were not truncated in the previous last
- # response.
- for token in self.result_keys:
- if token == primary_result_key:
- continue
- sample = token.search(parsed)
- if isinstance(sample, list):
- empty_value = []
- elif isinstance(sample, six.string_types):
- empty_value = ''
- elif isinstance(sample, (int, float)):
- empty_value = 0
- else:
- empty_value = None
- set_value_from_jmespath(parsed, token.expression, empty_value)
- return starting_truncation
-
- def _truncate_response(self, parsed, primary_result_key, truncate_amount,
- starting_truncation, next_token):
- original = primary_result_key.search(parsed)
- if original is None:
- original = []
- amount_to_keep = len(original) - truncate_amount
- truncated = original[:amount_to_keep]
- set_value_from_jmespath(
- parsed,
- primary_result_key.expression,
- truncated
- )
- # The issue here is that even though we know how much we've truncated
- # we need to account for this globally including any starting
- # left truncation. For example:
- # Raw response: [0,1,2,3]
- # Starting index: 1
- # Max items: 1
- # Starting left truncation: [1, 2, 3]
- # End right truncation for max items: [1]
- # However, even though we only kept 1, this is post
- # left truncation so the next starting index should be 2, not 1
- # (left_truncation + amount_to_keep).
- next_token['boto_truncate_amount'] = \
- amount_to_keep + starting_truncation
- self.resume_token = next_token
-
- def _get_next_token(self, parsed):
- if self._more_results is not None:
- if not self._more_results.search(parsed):
- return {}
- next_tokens = {}
- for output_token, input_key in \
- zip(self._output_token, self._input_token):
- next_token = output_token.search(parsed)
- # We do not want to include any empty strings as actual tokens.
- # Treat them as None.
- if next_token:
- next_tokens[input_key] = next_token
- else:
- next_tokens[input_key] = None
- return next_tokens
-
- def result_key_iters(self):
- teed_results = tee(self, len(self.result_keys))
- return [ResultKeyIterator(i, result_key) for i, result_key
- in zip(teed_results, self.result_keys)]
-
- def build_full_result(self):
- complete_result = {}
- for response in self:
- page = response
- # We want to try to catch operation object pagination
- # and format correctly for those. They come in the form
- # of a tuple of two elements: (http_response, parsed_responsed).
- # We want the parsed_response as that is what the page iterator
- # uses. We can remove it though once operation objects are removed.
- if isinstance(response, tuple) and len(response) == 2:
- page = response[1]
- # We're incrementally building the full response page
- # by page. For each page in the response we need to
- # inject the necessary components from the page
- # into the complete_result.
- for result_expression in self.result_keys:
- # In order to incrementally update a result key
- # we need to search the existing value from complete_result,
- # then we need to search the _current_ page for the
- # current result key value. Then we append the current
- # value onto the existing value, and re-set that value
- # as the new value.
- result_value = result_expression.search(page)
- if result_value is None:
- continue
- existing_value = result_expression.search(complete_result)
- if existing_value is None:
- # Set the initial result
- set_value_from_jmespath(
- complete_result, result_expression.expression,
- result_value)
- continue
- # Now both result_value and existing_value contain something
- if isinstance(result_value, list):
- existing_value.extend(result_value)
- elif isinstance(result_value, (int, float, six.string_types)):
- # Modify the existing result with the sum or concatenation
- set_value_from_jmespath(
- complete_result, result_expression.expression,
- existing_value + result_value)
- merge_dicts(complete_result, self.non_aggregate_part)
- if self.resume_token is not None:
- complete_result['NextToken'] = self.resume_token
- return complete_result
-
- def _parse_starting_token(self):
- if self._starting_token is None:
- return None
-
- # The starting token is a dict passed as a base64 encoded string.
- next_token = self._starting_token
- try:
- next_token = self._token_decoder.decode(next_token)
- index = 0
- if 'boto_truncate_amount' in next_token:
- index = next_token.get('boto_truncate_amount')
- del next_token['boto_truncate_amount']
- except (ValueError, TypeError):
- next_token, index = self._parse_starting_token_deprecated()
- return next_token, index
-
- def _parse_starting_token_deprecated(self):
- """
- This handles parsing of old style starting tokens, and attempts to
- coerce them into the new style.
- """
- log.debug("Attempting to fall back to old starting token parser. For "
- "token: %s" % self._starting_token)
- if self._starting_token is None:
- return None
-
- parts = self._starting_token.split('___')
- next_token = []
- index = 0
- if len(parts) == len(self._input_token) + 1:
- try:
- index = int(parts.pop())
- except ValueError:
+# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+
+from itertools import tee
+
+from botocore.compat import six
+
+import jmespath
+import json
+import base64
+import logging
+from botocore.exceptions import PaginationError
+from botocore.compat import zip
+from botocore.utils import set_value_from_jmespath, merge_dicts
+
+
+log = logging.getLogger(__name__)
+
+
+class TokenEncoder(object):
+ """Encodes dictionaries into opaque strings.
+
+ This for the most part json dumps + base64 encoding, but also supports
+ having bytes in the dictionary in addition to the types that json can
+ handle by default.
+
+ This is intended for use in encoding pagination tokens, which in some
+ cases can be complex structures and / or contain bytes.
+ """
+
+ def encode(self, token):
+ """Encodes a dictionary to an opaque string.
+
+ :type token: dict
+ :param token: A dictionary containing pagination information,
+ particularly the service pagination token(s) but also other boto
+ metadata.
+
+ :rtype: str
+ :returns: An opaque string
+ """
+ try:
+ # Try just using json dumps first to avoid having to traverse
+ # and encode the dict. In 99.9999% of cases this will work.
+ json_string = json.dumps(token)
+ except (TypeError, UnicodeDecodeError):
+ # If normal dumping failed, go through and base64 encode all bytes.
+ encoded_token, encoded_keys = self._encode(token, [])
+
+ # Save the list of all the encoded key paths. We can safely
+ # assume that no service will ever use this key.
+ encoded_token['boto_encoded_keys'] = encoded_keys
+
+ # Now that the bytes are all encoded, dump the json.
+ json_string = json.dumps(encoded_token)
+
+ # base64 encode the json string to produce an opaque token string.
+ return base64.b64encode(json_string.encode('utf-8')).decode('utf-8')
+
+ def _encode(self, data, path):
+ """Encode bytes in given data, keeping track of the path traversed."""
+ if isinstance(data, dict):
+ return self._encode_dict(data, path)
+ elif isinstance(data, list):
+ return self._encode_list(data, path)
+ elif isinstance(data, six.binary_type):
+ return self._encode_bytes(data, path)
+ else:
+ return data, []
+
+ def _encode_list(self, data, path):
+ """Encode any bytes in a list, noting the index of what is encoded."""
+ new_data = []
+ encoded = []
+ for i, value in enumerate(data):
+ new_path = path + [i]
+ new_value, new_encoded = self._encode(value, new_path)
+ new_data.append(new_value)
+ encoded.extend(new_encoded)
+ return new_data, encoded
+
+ def _encode_dict(self, data, path):
+ """Encode any bytes in a dict, noting the index of what is encoded."""
+ new_data = {}
+ encoded = []
+ for key, value in data.items():
+ new_path = path + [key]
+ new_value, new_encoded = self._encode(value, new_path)
+ new_data[key] = new_value
+ encoded.extend(new_encoded)
+ return new_data, encoded
+
+ def _encode_bytes(self, data, path):
+ """Base64 encode a byte string."""
+ return base64.b64encode(data).decode('utf-8'), [path]
+
+
+class TokenDecoder(object):
+ """Decodes token strings back into dictionaries.
+
+ This performs the inverse operation to the TokenEncoder, accepting
+ opaque strings and decoding them into a useable form.
+ """
+
+ def decode(self, token):
+ """Decodes an opaque string to a dictionary.
+
+ :type token: str
+ :param token: A token string given by the botocore pagination
+ interface.
+
+ :rtype: dict
+ :returns: A dictionary containing pagination information,
+ particularly the service pagination token(s) but also other boto
+ metadata.
+ """
+ json_string = base64.b64decode(token.encode('utf-8')).decode('utf-8')
+ decoded_token = json.loads(json_string)
+
+ # Remove the encoding metadata as it is read since it will no longer
+ # be needed.
+ encoded_keys = decoded_token.pop('boto_encoded_keys', None)
+ if encoded_keys is None:
+ return decoded_token
+ else:
+ return self._decode(decoded_token, encoded_keys)
+
+ def _decode(self, token, encoded_keys):
+ """Find each encoded value and decode it."""
+ for key in encoded_keys:
+ encoded = self._path_get(token, key)
+ decoded = base64.b64decode(encoded.encode('utf-8'))
+ self._path_set(token, key, decoded)
+ return token
+
+ def _path_get(self, data, path):
+ """Return the nested data at the given path.
+
+ For instance:
+ data = {'foo': ['bar', 'baz']}
+ path = ['foo', 0]
+ ==> 'bar'
+ """
+ # jmespath isn't used here because it would be difficult to actually
+ # create the jmespath query when taking all of the unknowns of key
+ # structure into account. Gross though this is, it is simple and not
+ # very error prone.
+ d = data
+ for step in path:
+ d = d[step]
+ return d
+
+ def _path_set(self, data, path, value):
+ """Set the value of a key in the given data.
+
+ Example:
+ data = {'foo': ['bar', 'baz']}
+ path = ['foo', 1]
+ value = 'bin'
+ ==> data = {'foo': ['bar', 'bin']}
+ """
+ container = self._path_get(data, path[:-1])
+ container[path[-1]] = value
+
+
+class PaginatorModel(object):
+ def __init__(self, paginator_config):
+ self._paginator_config = paginator_config['pagination']
+
+ def get_paginator(self, operation_name):
+ try:
+ single_paginator_config = self._paginator_config[operation_name]
+ except KeyError:
+ raise ValueError("Paginator for operation does not exist: %s"
+ % operation_name)
+ return single_paginator_config
+
+
+class PageIterator(object):
+ def __init__(self, method, input_token, output_token, more_results,
+ result_keys, non_aggregate_keys, limit_key, max_items,
+ starting_token, page_size, op_kwargs):
+ self._method = method
+ self._input_token = input_token
+ self._output_token = output_token
+ self._more_results = more_results
+ self._result_keys = result_keys
+ self._max_items = max_items
+ self._limit_key = limit_key
+ self._starting_token = starting_token
+ self._page_size = page_size
+ self._op_kwargs = op_kwargs
+ self._resume_token = None
+ self._non_aggregate_key_exprs = non_aggregate_keys
+ self._non_aggregate_part = {}
+ self._token_encoder = TokenEncoder()
+ self._token_decoder = TokenDecoder()
+
+ @property
+ def result_keys(self):
+ return self._result_keys
+
+ @property
+ def resume_token(self):
+ """Token to specify to resume pagination."""
+ return self._resume_token
+
+ @resume_token.setter
+ def resume_token(self, value):
+ if not isinstance(value, dict):
+ raise ValueError("Bad starting token: %s" % value)
+
+ if 'boto_truncate_amount' in value:
+ token_keys = sorted(self._input_token + ['boto_truncate_amount'])
+ else:
+ token_keys = sorted(self._input_token)
+ dict_keys = sorted(value.keys())
+
+ if token_keys == dict_keys:
+ self._resume_token = self._token_encoder.encode(value)
+ else:
+ raise ValueError("Bad starting token: %s" % value)
+
+ @property
+ def non_aggregate_part(self):
+ return self._non_aggregate_part
+
+ def __iter__(self):
+ current_kwargs = self._op_kwargs
+ previous_next_token = None
+ next_token = dict((key, None) for key in self._input_token)
+ if self._starting_token is not None:
+ # If the starting token exists, populate the next_token with the
+ # values inside it. This ensures that we have the service's
+ # pagination token on hand if we need to truncate after the
+ # first response.
+ next_token = self._parse_starting_token()[0]
+ # The number of items from result_key we've seen so far.
+ total_items = 0
+ first_request = True
+ primary_result_key = self.result_keys[0]
+ starting_truncation = 0
+ self._inject_starting_params(current_kwargs)
+ while True:
+ response = self._make_request(current_kwargs)
+ parsed = self._extract_parsed_response(response)
+ if first_request:
+ # The first request is handled differently. We could
+ # possibly have a resume/starting token that tells us where
+ # to index into the retrieved page.
+ if self._starting_token is not None:
+ starting_truncation = self._handle_first_request(
+ parsed, primary_result_key, starting_truncation)
+ first_request = False
+ self._record_non_aggregate_key_values(parsed)
+ else:
+ # If this isn't the first request, we have already sliced into
+ # the first request and had to make additional requests after.
+ # We no longer need to add this to truncation.
+ starting_truncation = 0
+ current_response = primary_result_key.search(parsed)
+ if current_response is None:
+ current_response = []
+ num_current_response = len(current_response)
+ truncate_amount = 0
+ if self._max_items is not None:
+ truncate_amount = (total_items + num_current_response) \
+ - self._max_items
+ if truncate_amount > 0:
+ self._truncate_response(parsed, primary_result_key,
+ truncate_amount, starting_truncation,
+ next_token)
+ yield response
+ break
+ else:
+ yield response
+ total_items += num_current_response
+ next_token = self._get_next_token(parsed)
+ if all(t is None for t in next_token.values()):
+ break
+ if self._max_items is not None and \
+ total_items == self._max_items:
+ # We're on a page boundary so we can set the current
+ # next token to be the resume token.
+ self.resume_token = next_token
+ break
+ if previous_next_token is not None and \
+ previous_next_token == next_token:
+ message = ("The same next token was received "
+ "twice: %s" % next_token)
+ raise PaginationError(message=message)
+ self._inject_token_into_kwargs(current_kwargs, next_token)
+ previous_next_token = next_token
+
+ def search(self, expression):
+ """Applies a JMESPath expression to a paginator
+
+ Each page of results is searched using the provided JMESPath
+ expression. If the result is not a list, it is yielded
+ directly. If the result is a list, each element in the result
+ is yielded individually (essentially implementing a flatmap in
+ which the JMESPath search is the mapping function).
+
+ :type expression: str
+ :param expression: JMESPath expression to apply to each page.
+
+ :return: Returns an iterator that yields the individual
+ elements of applying a JMESPath expression to each page of
+ results.
+ """
+ compiled = jmespath.compile(expression)
+ for page in self:
+ results = compiled.search(page)
+ if isinstance(results, list):
+ for element in results:
+ yield element
+ else:
+ # Yield result directly if it is not a list.
+ yield results
+
+ def _make_request(self, current_kwargs):
+ return self._method(**current_kwargs)
+
+ def _extract_parsed_response(self, response):
+ return response
+
+ def _record_non_aggregate_key_values(self, response):
+ non_aggregate_keys = {}
+ for expression in self._non_aggregate_key_exprs:
+ result = expression.search(response)
+ set_value_from_jmespath(non_aggregate_keys,
+ expression.expression,
+ result)
+ self._non_aggregate_part = non_aggregate_keys
+
+ def _inject_starting_params(self, op_kwargs):
+ # If the user has specified a starting token we need to
+ # inject that into the operation's kwargs.
+ if self._starting_token is not None:
+ # Don't need to do anything special if there is no starting
+ # token specified.
+ next_token = self._parse_starting_token()[0]
+ self._inject_token_into_kwargs(op_kwargs, next_token)
+ if self._page_size is not None:
+ # Pass the page size as the parameter name for limiting
+ # page size, also known as the limit_key.
+ op_kwargs[self._limit_key] = self._page_size
+
+ def _inject_token_into_kwargs(self, op_kwargs, next_token):
+ for name, token in next_token.items():
+ if (token is not None) and (token != 'None'):
+ op_kwargs[name] = token
+ elif name in op_kwargs:
+ del op_kwargs[name]
+
+ def _handle_first_request(self, parsed, primary_result_key,
+ starting_truncation):
+ # If the payload is an array or string, we need to slice into it
+ # and only return the truncated amount.
+ starting_truncation = self._parse_starting_token()[1]
+ all_data = primary_result_key.search(parsed)
+ if isinstance(all_data, (list, six.string_types)):
+ data = all_data[starting_truncation:]
+ else:
+ data = None
+ set_value_from_jmespath(
+ parsed,
+ primary_result_key.expression,
+ data
+ )
+ # We also need to truncate any secondary result keys
+ # because they were not truncated in the previous last
+ # response.
+ for token in self.result_keys:
+ if token == primary_result_key:
+ continue
+ sample = token.search(parsed)
+ if isinstance(sample, list):
+ empty_value = []
+ elif isinstance(sample, six.string_types):
+ empty_value = ''
+ elif isinstance(sample, (int, float)):
+ empty_value = 0
+ else:
+ empty_value = None
+ set_value_from_jmespath(parsed, token.expression, empty_value)
+ return starting_truncation
+
+ def _truncate_response(self, parsed, primary_result_key, truncate_amount,
+ starting_truncation, next_token):
+ original = primary_result_key.search(parsed)
+ if original is None:
+ original = []
+ amount_to_keep = len(original) - truncate_amount
+ truncated = original[:amount_to_keep]
+ set_value_from_jmespath(
+ parsed,
+ primary_result_key.expression,
+ truncated
+ )
+ # The issue here is that even though we know how much we've truncated
+ # we need to account for this globally including any starting
+ # left truncation. For example:
+ # Raw response: [0,1,2,3]
+ # Starting index: 1
+ # Max items: 1
+ # Starting left truncation: [1, 2, 3]
+ # End right truncation for max items: [1]
+ # However, even though we only kept 1, this is post
+ # left truncation so the next starting index should be 2, not 1
+ # (left_truncation + amount_to_keep).
+ next_token['boto_truncate_amount'] = \
+ amount_to_keep + starting_truncation
+ self.resume_token = next_token
+
+ def _get_next_token(self, parsed):
+ if self._more_results is not None:
+ if not self._more_results.search(parsed):
+ return {}
+ next_tokens = {}
+ for output_token, input_key in \
+ zip(self._output_token, self._input_token):
+ next_token = output_token.search(parsed)
+ # We do not want to include any empty strings as actual tokens.
+ # Treat them as None.
+ if next_token:
+ next_tokens[input_key] = next_token
+ else:
+ next_tokens[input_key] = None
+ return next_tokens
+
+ def result_key_iters(self):
+ teed_results = tee(self, len(self.result_keys))
+ return [ResultKeyIterator(i, result_key) for i, result_key
+ in zip(teed_results, self.result_keys)]
+
+ def build_full_result(self):
+ complete_result = {}
+ for response in self:
+ page = response
+ # We want to try to catch operation object pagination
+ # and format correctly for those. They come in the form
+ # of a tuple of two elements: (http_response, parsed_responsed).
+ # We want the parsed_response as that is what the page iterator
+ # uses. We can remove it though once operation objects are removed.
+ if isinstance(response, tuple) and len(response) == 2:
+ page = response[1]
+ # We're incrementally building the full response page
+ # by page. For each page in the response we need to
+ # inject the necessary components from the page
+ # into the complete_result.
+ for result_expression in self.result_keys:
+ # In order to incrementally update a result key
+ # we need to search the existing value from complete_result,
+ # then we need to search the _current_ page for the
+ # current result key value. Then we append the current
+ # value onto the existing value, and re-set that value
+ # as the new value.
+ result_value = result_expression.search(page)
+ if result_value is None:
+ continue
+ existing_value = result_expression.search(complete_result)
+ if existing_value is None:
+ # Set the initial result
+ set_value_from_jmespath(
+ complete_result, result_expression.expression,
+ result_value)
+ continue
+ # Now both result_value and existing_value contain something
+ if isinstance(result_value, list):
+ existing_value.extend(result_value)
+ elif isinstance(result_value, (int, float, six.string_types)):
+ # Modify the existing result with the sum or concatenation
+ set_value_from_jmespath(
+ complete_result, result_expression.expression,
+ existing_value + result_value)
+ merge_dicts(complete_result, self.non_aggregate_part)
+ if self.resume_token is not None:
+ complete_result['NextToken'] = self.resume_token
+ return complete_result
+
+ def _parse_starting_token(self):
+ if self._starting_token is None:
+ return None
+
+ # The starting token is a dict passed as a base64 encoded string.
+ next_token = self._starting_token
+ try:
+ next_token = self._token_decoder.decode(next_token)
+ index = 0
+ if 'boto_truncate_amount' in next_token:
+ index = next_token.get('boto_truncate_amount')
+ del next_token['boto_truncate_amount']
+ except (ValueError, TypeError):
+ next_token, index = self._parse_starting_token_deprecated()
+ return next_token, index
+
+ def _parse_starting_token_deprecated(self):
+ """
+ This handles parsing of old style starting tokens, and attempts to
+ coerce them into the new style.
+ """
+ log.debug("Attempting to fall back to old starting token parser. For "
+ "token: %s" % self._starting_token)
+ if self._starting_token is None:
+ return None
+
+ parts = self._starting_token.split('___')
+ next_token = []
+ index = 0
+ if len(parts) == len(self._input_token) + 1:
+ try:
+ index = int(parts.pop())
+ except ValueError:
# This doesn't look like a valid old-style token, so we're
# passing it along as an opaque service token.
parts = [self._starting_token]
- for part in parts:
- if part == 'None':
- next_token.append(None)
- else:
- next_token.append(part)
- return self._convert_deprecated_starting_token(next_token), index
-
- def _convert_deprecated_starting_token(self, deprecated_token):
- """
- This attempts to convert a deprecated starting token into the new
- style.
- """
- len_deprecated_token = len(deprecated_token)
- len_input_token = len(self._input_token)
- if len_deprecated_token > len_input_token:
- raise ValueError("Bad starting token: %s" % self._starting_token)
- elif len_deprecated_token < len_input_token:
- log.debug("Old format starting token does not contain all input "
- "tokens. Setting the rest, in order, as None.")
- for i in range(len_input_token - len_deprecated_token):
- deprecated_token.append(None)
- return dict(zip(self._input_token, deprecated_token))
-
-
-class Paginator(object):
- PAGE_ITERATOR_CLS = PageIterator
-
- def __init__(self, method, pagination_config, model):
- self._model = model
- self._method = method
- self._pagination_cfg = pagination_config
- self._output_token = self._get_output_tokens(self._pagination_cfg)
- self._input_token = self._get_input_tokens(self._pagination_cfg)
- self._more_results = self._get_more_results_token(self._pagination_cfg)
- self._non_aggregate_keys = self._get_non_aggregate_keys(
- self._pagination_cfg)
- self._result_keys = self._get_result_keys(self._pagination_cfg)
- self._limit_key = self._get_limit_key(self._pagination_cfg)
-
- @property
- def result_keys(self):
- return self._result_keys
-
- def _get_non_aggregate_keys(self, config):
- keys = []
- for key in config.get('non_aggregate_keys', []):
- keys.append(jmespath.compile(key))
- return keys
-
- def _get_output_tokens(self, config):
- output = []
- output_token = config['output_token']
- if not isinstance(output_token, list):
- output_token = [output_token]
- for config in output_token:
- output.append(jmespath.compile(config))
- return output
-
- def _get_input_tokens(self, config):
- input_token = self._pagination_cfg['input_token']
- if not isinstance(input_token, list):
- input_token = [input_token]
- return input_token
-
- def _get_more_results_token(self, config):
- more_results = config.get('more_results')
- if more_results is not None:
- return jmespath.compile(more_results)
-
- def _get_result_keys(self, config):
- result_key = config.get('result_key')
- if result_key is not None:
- if not isinstance(result_key, list):
- result_key = [result_key]
- result_key = [jmespath.compile(rk) for rk in result_key]
- return result_key
-
- def _get_limit_key(self, config):
- return config.get('limit_key')
-
- def paginate(self, **kwargs):
- """Create paginator object for an operation.
-
- This returns an iterable object. Iterating over
- this object will yield a single page of a response
- at a time.
-
- """
- page_params = self._extract_paging_params(kwargs)
- return self.PAGE_ITERATOR_CLS(
- self._method, self._input_token,
- self._output_token, self._more_results,
- self._result_keys, self._non_aggregate_keys,
- self._limit_key,
- page_params['MaxItems'],
- page_params['StartingToken'],
- page_params['PageSize'],
- kwargs)
-
- def _extract_paging_params(self, kwargs):
- pagination_config = kwargs.pop('PaginationConfig', {})
- max_items = pagination_config.get('MaxItems', None)
- if max_items is not None:
- max_items = int(max_items)
- page_size = pagination_config.get('PageSize', None)
- if page_size is not None:
- if self._limit_key is None:
- raise PaginationError(
- message="PageSize parameter is not supported for the "
- "pagination interface for this operation.")
- input_members = self._model.input_shape.members
- limit_key_shape = input_members.get(self._limit_key)
- if limit_key_shape.type_name == 'string':
- if not isinstance(page_size, six.string_types):
- page_size = str(page_size)
- else:
- page_size = int(page_size)
- return {
- 'MaxItems': max_items,
- 'StartingToken': pagination_config.get('StartingToken', None),
- 'PageSize': page_size,
- }
-
-
-class ResultKeyIterator(object):
- """Iterates over the results of paginated responses.
-
- Each iterator is associated with a single result key.
- Iterating over this object will give you each element in
- the result key list.
-
- :param pages_iterator: An iterator that will give you
- pages of results (a ``PageIterator`` class).
- :param result_key: The JMESPath expression representing
- the result key.
-
- """
-
- def __init__(self, pages_iterator, result_key):
- self._pages_iterator = pages_iterator
- self.result_key = result_key
-
- def __iter__(self):
- for page in self._pages_iterator:
- results = self.result_key.search(page)
- if results is None:
- results = []
- for result in results:
- yield result
+ for part in parts:
+ if part == 'None':
+ next_token.append(None)
+ else:
+ next_token.append(part)
+ return self._convert_deprecated_starting_token(next_token), index
+
+ def _convert_deprecated_starting_token(self, deprecated_token):
+ """
+ This attempts to convert a deprecated starting token into the new
+ style.
+ """
+ len_deprecated_token = len(deprecated_token)
+ len_input_token = len(self._input_token)
+ if len_deprecated_token > len_input_token:
+ raise ValueError("Bad starting token: %s" % self._starting_token)
+ elif len_deprecated_token < len_input_token:
+ log.debug("Old format starting token does not contain all input "
+ "tokens. Setting the rest, in order, as None.")
+ for i in range(len_input_token - len_deprecated_token):
+ deprecated_token.append(None)
+ return dict(zip(self._input_token, deprecated_token))
+
+
+class Paginator(object):
+ PAGE_ITERATOR_CLS = PageIterator
+
+ def __init__(self, method, pagination_config, model):
+ self._model = model
+ self._method = method
+ self._pagination_cfg = pagination_config
+ self._output_token = self._get_output_tokens(self._pagination_cfg)
+ self._input_token = self._get_input_tokens(self._pagination_cfg)
+ self._more_results = self._get_more_results_token(self._pagination_cfg)
+ self._non_aggregate_keys = self._get_non_aggregate_keys(
+ self._pagination_cfg)
+ self._result_keys = self._get_result_keys(self._pagination_cfg)
+ self._limit_key = self._get_limit_key(self._pagination_cfg)
+
+ @property
+ def result_keys(self):
+ return self._result_keys
+
+ def _get_non_aggregate_keys(self, config):
+ keys = []
+ for key in config.get('non_aggregate_keys', []):
+ keys.append(jmespath.compile(key))
+ return keys
+
+ def _get_output_tokens(self, config):
+ output = []
+ output_token = config['output_token']
+ if not isinstance(output_token, list):
+ output_token = [output_token]
+ for config in output_token:
+ output.append(jmespath.compile(config))
+ return output
+
+ def _get_input_tokens(self, config):
+ input_token = self._pagination_cfg['input_token']
+ if not isinstance(input_token, list):
+ input_token = [input_token]
+ return input_token
+
+ def _get_more_results_token(self, config):
+ more_results = config.get('more_results')
+ if more_results is not None:
+ return jmespath.compile(more_results)
+
+ def _get_result_keys(self, config):
+ result_key = config.get('result_key')
+ if result_key is not None:
+ if not isinstance(result_key, list):
+ result_key = [result_key]
+ result_key = [jmespath.compile(rk) for rk in result_key]
+ return result_key
+
+ def _get_limit_key(self, config):
+ return config.get('limit_key')
+
+ def paginate(self, **kwargs):
+ """Create paginator object for an operation.
+
+ This returns an iterable object. Iterating over
+ this object will yield a single page of a response
+ at a time.
+
+ """
+ page_params = self._extract_paging_params(kwargs)
+ return self.PAGE_ITERATOR_CLS(
+ self._method, self._input_token,
+ self._output_token, self._more_results,
+ self._result_keys, self._non_aggregate_keys,
+ self._limit_key,
+ page_params['MaxItems'],
+ page_params['StartingToken'],
+ page_params['PageSize'],
+ kwargs)
+
+ def _extract_paging_params(self, kwargs):
+ pagination_config = kwargs.pop('PaginationConfig', {})
+ max_items = pagination_config.get('MaxItems', None)
+ if max_items is not None:
+ max_items = int(max_items)
+ page_size = pagination_config.get('PageSize', None)
+ if page_size is not None:
+ if self._limit_key is None:
+ raise PaginationError(
+ message="PageSize parameter is not supported for the "
+ "pagination interface for this operation.")
+ input_members = self._model.input_shape.members
+ limit_key_shape = input_members.get(self._limit_key)
+ if limit_key_shape.type_name == 'string':
+ if not isinstance(page_size, six.string_types):
+ page_size = str(page_size)
+ else:
+ page_size = int(page_size)
+ return {
+ 'MaxItems': max_items,
+ 'StartingToken': pagination_config.get('StartingToken', None),
+ 'PageSize': page_size,
+ }
+
+
+class ResultKeyIterator(object):
+ """Iterates over the results of paginated responses.
+
+ Each iterator is associated with a single result key.
+ Iterating over this object will give you each element in
+ the result key list.
+
+ :param pages_iterator: An iterator that will give you
+ pages of results (a ``PageIterator`` class).
+ :param result_key: The JMESPath expression representing
+ the result key.
+
+ """
+
+ def __init__(self, pages_iterator, result_key):
+ self._pages_iterator = pages_iterator
+ self.result_key = result_key
+
+ def __iter__(self):
+ for page in self._pages_iterator:
+ results = self.result_key.search(page)
+ if results is None:
+ results = []
+ for result in results:
+ yield result
diff --git a/contrib/python/botocore/botocore/parsers.py b/contrib/python/botocore/botocore/parsers.py
index 82a27de28a..b6f4985613 100644
--- a/contrib/python/botocore/botocore/parsers.py
+++ b/contrib/python/botocore/botocore/parsers.py
@@ -1,70 +1,70 @@
-# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-"""Response parsers for the various protocol types.
-
-The module contains classes that can take an HTTP response, and given
-an output shape, parse the response into a dict according to the
-rules in the output shape.
-
-There are many similarities amongst the different protocols with regard
-to response parsing, and the code is structured in a way to avoid
-code duplication when possible. The diagram below is a diagram
-showing the inheritance hierarchy of the response classes.
-
-::
-
-
-
- +--------------+
- |ResponseParser|
- +--------------+
- ^ ^ ^
- +--------------------+ | +-------------------+
- | | |
- +----------+----------+ +------+-------+ +-------+------+
- |BaseXMLResponseParser| |BaseRestParser| |BaseJSONParser|
- +---------------------+ +--------------+ +--------------+
- ^ ^ ^ ^ ^ ^
- | | | | | |
- | | | | | |
- | ++----------+-+ +-+-----------++ |
- | |RestXMLParser| |RestJSONParser| |
- +-----+-----+ +-------------+ +--------------+ +----+-----+
- |QueryParser| |JSONParser|
- +-----------+ +----------+
-
-
-The diagram above shows that there is a base class, ``ResponseParser`` that
-contains logic that is similar amongst all the different protocols (``query``,
-``json``, ``rest-json``, ``rest-xml``). Amongst the various services there
-is shared logic that can be grouped several ways:
-
-* The ``query`` and ``rest-xml`` both have XML bodies that are parsed in the
- same way.
-* The ``json`` and ``rest-json`` protocols both have JSON bodies that are
- parsed in the same way.
-* The ``rest-json`` and ``rest-xml`` protocols have additional attributes
- besides body parameters that are parsed the same (headers, query string,
- status code).
-
-This is reflected in the class diagram above. The ``BaseXMLResponseParser``
-and the BaseJSONParser contain logic for parsing the XML/JSON body,
-and the BaseRestParser contains logic for parsing out attributes that
-come from other parts of the HTTP response. Classes like the
-``RestXMLParser`` inherit from the ``BaseXMLResponseParser`` to get the
-XML body parsing logic and the ``BaseRestParser`` to get the HTTP
-header/status code/query string parsing.
-
+# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+"""Response parsers for the various protocol types.
+
+The module contains classes that can take an HTTP response, and given
+an output shape, parse the response into a dict according to the
+rules in the output shape.
+
+There are many similarities amongst the different protocols with regard
+to response parsing, and the code is structured in a way to avoid
+code duplication when possible. The diagram below is a diagram
+showing the inheritance hierarchy of the response classes.
+
+::
+
+
+
+ +--------------+
+ |ResponseParser|
+ +--------------+
+ ^ ^ ^
+ +--------------------+ | +-------------------+
+ | | |
+ +----------+----------+ +------+-------+ +-------+------+
+ |BaseXMLResponseParser| |BaseRestParser| |BaseJSONParser|
+ +---------------------+ +--------------+ +--------------+
+ ^ ^ ^ ^ ^ ^
+ | | | | | |
+ | | | | | |
+ | ++----------+-+ +-+-----------++ |
+ | |RestXMLParser| |RestJSONParser| |
+ +-----+-----+ +-------------+ +--------------+ +----+-----+
+ |QueryParser| |JSONParser|
+ +-----------+ +----------+
+
+
+The diagram above shows that there is a base class, ``ResponseParser`` that
+contains logic that is similar amongst all the different protocols (``query``,
+``json``, ``rest-json``, ``rest-xml``). Amongst the various services there
+is shared logic that can be grouped several ways:
+
+* The ``query`` and ``rest-xml`` both have XML bodies that are parsed in the
+ same way.
+* The ``json`` and ``rest-json`` protocols both have JSON bodies that are
+ parsed in the same way.
+* The ``rest-json`` and ``rest-xml`` protocols have additional attributes
+ besides body parameters that are parsed the same (headers, query string,
+ status code).
+
+This is reflected in the class diagram above. The ``BaseXMLResponseParser``
+and the BaseJSONParser contain logic for parsing the XML/JSON body,
+and the BaseRestParser contains logic for parsing out attributes that
+come from other parts of the HTTP response. Classes like the
+``RestXMLParser`` inherit from the ``BaseXMLResponseParser`` to get the
+XML body parsing logic and the ``BaseRestParser`` to get the HTTP
+header/status code/query string parsing.
+
Additionally, there are event stream parsers that are used by the other parsers
to wrap streaming bodies that represent a stream of events. The
BaseEventStreamParser extends from ResponseParser and defines the logic for
@@ -89,315 +89,315 @@ encoded as JSON and XML through the following hierarchy.
|EventStreamXMLParser| |EventStreamJSONParser|
+--------------------+ +---------------------+
-Return Values
-=============
-
-Each call to ``parse()`` returns a dict has this form::
-
- Standard Response
-
- {
- "ResponseMetadata": {"RequestId": <requestid>}
- <response keys>
- }
-
- Error response
-
- {
- "ResponseMetadata": {"RequestId": <requestid>}
- "Error": {
- "Code": <string>,
- "Message": <string>,
- "Type": <string>,
- <additional keys>
- }
- }
-
-"""
-import re
-import base64
-import json
-import logging
-
+Return Values
+=============
+
+Each call to ``parse()`` returns a dict has this form::
+
+ Standard Response
+
+ {
+ "ResponseMetadata": {"RequestId": <requestid>}
+ <response keys>
+ }
+
+ Error response
+
+ {
+ "ResponseMetadata": {"RequestId": <requestid>}
+ "Error": {
+ "Code": <string>,
+ "Message": <string>,
+ "Type": <string>,
+ <additional keys>
+ }
+ }
+
+"""
+import re
+import base64
+import json
+import logging
+
from botocore.compat import six, ETree, XMLParseError
from botocore.eventstream import EventStream, NoInitialResponseError
-
-from botocore.utils import parse_timestamp, merge_dicts, \
+
+from botocore.utils import parse_timestamp, merge_dicts, \
is_json_value_header, lowercase_dict
-
-LOG = logging.getLogger(__name__)
-
-DEFAULT_TIMESTAMP_PARSER = parse_timestamp
-
-
-class ResponseParserFactory(object):
- def __init__(self):
- self._defaults = {}
-
- def set_parser_defaults(self, **kwargs):
- """Set default arguments when a parser instance is created.
-
- You can specify any kwargs that are allowed by a ResponseParser
- class. There are currently two arguments:
-
+
+LOG = logging.getLogger(__name__)
+
+DEFAULT_TIMESTAMP_PARSER = parse_timestamp
+
+
+class ResponseParserFactory(object):
+ def __init__(self):
+ self._defaults = {}
+
+ def set_parser_defaults(self, **kwargs):
+ """Set default arguments when a parser instance is created.
+
+ You can specify any kwargs that are allowed by a ResponseParser
+ class. There are currently two arguments:
+
* timestamp_parser - A callable that can parse a timestamp string
- * blob_parser - A callable that can parse a blob type
-
- """
- self._defaults.update(kwargs)
-
- def create_parser(self, protocol_name):
- parser_cls = PROTOCOL_PARSERS[protocol_name]
- return parser_cls(**self._defaults)
-
-
-def create_parser(protocol):
- return ResponseParserFactory().create_parser(protocol)
-
-
-def _text_content(func):
- # This decorator hides the difference between
- # an XML node with text or a plain string. It's used
- # to ensure that scalar processing operates only on text
- # strings, which allows the same scalar handlers to be used
- # for XML nodes from the body and HTTP headers.
- def _get_text_content(self, shape, node_or_string):
- if hasattr(node_or_string, 'text'):
- text = node_or_string.text
- if text is None:
- # If an XML node is empty <foo></foo>,
- # we want to parse that as an empty string,
- # not as a null/None value.
- text = ''
- else:
- text = node_or_string
- return func(self, shape, text)
- return _get_text_content
-
-
-class ResponseParserError(Exception):
- pass
-
-
-class ResponseParser(object):
- """Base class for response parsing.
-
- This class represents the interface that all ResponseParsers for the
- various protocols must implement.
-
- This class will take an HTTP response and a model shape and parse the
- HTTP response into a dictionary.
-
- There is a single public method exposed: ``parse``. See the ``parse``
- docstring for more info.
-
- """
- DEFAULT_ENCODING = 'utf-8'
+ * blob_parser - A callable that can parse a blob type
+
+ """
+ self._defaults.update(kwargs)
+
+ def create_parser(self, protocol_name):
+ parser_cls = PROTOCOL_PARSERS[protocol_name]
+ return parser_cls(**self._defaults)
+
+
+def create_parser(protocol):
+ return ResponseParserFactory().create_parser(protocol)
+
+
+def _text_content(func):
+ # This decorator hides the difference between
+ # an XML node with text or a plain string. It's used
+ # to ensure that scalar processing operates only on text
+ # strings, which allows the same scalar handlers to be used
+ # for XML nodes from the body and HTTP headers.
+ def _get_text_content(self, shape, node_or_string):
+ if hasattr(node_or_string, 'text'):
+ text = node_or_string.text
+ if text is None:
+ # If an XML node is empty <foo></foo>,
+ # we want to parse that as an empty string,
+ # not as a null/None value.
+ text = ''
+ else:
+ text = node_or_string
+ return func(self, shape, text)
+ return _get_text_content
+
+
+class ResponseParserError(Exception):
+ pass
+
+
+class ResponseParser(object):
+ """Base class for response parsing.
+
+ This class represents the interface that all ResponseParsers for the
+ various protocols must implement.
+
+ This class will take an HTTP response and a model shape and parse the
+ HTTP response into a dictionary.
+
+ There is a single public method exposed: ``parse``. See the ``parse``
+ docstring for more info.
+
+ """
+ DEFAULT_ENCODING = 'utf-8'
EVENT_STREAM_PARSER_CLS = None
-
- def __init__(self, timestamp_parser=None, blob_parser=None):
- if timestamp_parser is None:
- timestamp_parser = DEFAULT_TIMESTAMP_PARSER
- self._timestamp_parser = timestamp_parser
- if blob_parser is None:
- blob_parser = self._default_blob_parser
- self._blob_parser = blob_parser
+
+ def __init__(self, timestamp_parser=None, blob_parser=None):
+ if timestamp_parser is None:
+ timestamp_parser = DEFAULT_TIMESTAMP_PARSER
+ self._timestamp_parser = timestamp_parser
+ if blob_parser is None:
+ blob_parser = self._default_blob_parser
+ self._blob_parser = blob_parser
self._event_stream_parser = None
if self.EVENT_STREAM_PARSER_CLS is not None:
self._event_stream_parser = self.EVENT_STREAM_PARSER_CLS(
timestamp_parser, blob_parser)
-
- def _default_blob_parser(self, value):
- # Blobs are always returned as bytes type (this matters on python3).
- # We don't decode this to a str because it's entirely possible that the
- # blob contains binary data that actually can't be decoded.
- return base64.b64decode(value)
-
- def parse(self, response, shape):
- """Parse the HTTP response given a shape.
-
- :param response: The HTTP response dictionary. This is a dictionary
- that represents the HTTP request. The dictionary must have the
- following keys, ``body``, ``headers``, and ``status_code``.
-
- :param shape: The model shape describing the expected output.
- :return: Returns a dictionary representing the parsed response
- described by the model. In addition to the shape described from
- the model, each response will also have a ``ResponseMetadata``
- which contains metadata about the response, which contains at least
- two keys containing ``RequestId`` and ``HTTPStatusCode``. Some
- responses may populate additional keys, but ``RequestId`` will
- always be present.
-
- """
- LOG.debug('Response headers: %s', response['headers'])
- LOG.debug('Response body:\n%s', response['body'])
- if response['status_code'] >= 301:
- if self._is_generic_error_response(response):
- parsed = self._do_generic_error_parse(response)
+
+ def _default_blob_parser(self, value):
+ # Blobs are always returned as bytes type (this matters on python3).
+ # We don't decode this to a str because it's entirely possible that the
+ # blob contains binary data that actually can't be decoded.
+ return base64.b64decode(value)
+
+ def parse(self, response, shape):
+ """Parse the HTTP response given a shape.
+
+ :param response: The HTTP response dictionary. This is a dictionary
+ that represents the HTTP request. The dictionary must have the
+ following keys, ``body``, ``headers``, and ``status_code``.
+
+ :param shape: The model shape describing the expected output.
+ :return: Returns a dictionary representing the parsed response
+ described by the model. In addition to the shape described from
+ the model, each response will also have a ``ResponseMetadata``
+ which contains metadata about the response, which contains at least
+ two keys containing ``RequestId`` and ``HTTPStatusCode``. Some
+ responses may populate additional keys, but ``RequestId`` will
+ always be present.
+
+ """
+ LOG.debug('Response headers: %s', response['headers'])
+ LOG.debug('Response body:\n%s', response['body'])
+ if response['status_code'] >= 301:
+ if self._is_generic_error_response(response):
+ parsed = self._do_generic_error_parse(response)
elif self._is_modeled_error_shape(shape):
parsed = self._do_modeled_error_parse(response, shape)
# We don't want to decorate the modeled fields with metadata
return parsed
- else:
- parsed = self._do_error_parse(response, shape)
- else:
- parsed = self._do_parse(response, shape)
-
+ else:
+ parsed = self._do_error_parse(response, shape)
+ else:
+ parsed = self._do_parse(response, shape)
+
# We don't want to decorate event stream responses with metadata
if shape and shape.serialization.get('eventstream'):
return parsed
- # Add ResponseMetadata if it doesn't exist and inject the HTTP
- # status code and headers from the response.
- if isinstance(parsed, dict):
- response_metadata = parsed.get('ResponseMetadata', {})
- response_metadata['HTTPStatusCode'] = response['status_code']
+ # Add ResponseMetadata if it doesn't exist and inject the HTTP
+ # status code and headers from the response.
+ if isinstance(parsed, dict):
+ response_metadata = parsed.get('ResponseMetadata', {})
+ response_metadata['HTTPStatusCode'] = response['status_code']
# Ensure that the http header keys are all lower cased. Older
# versions of urllib3 (< 1.11) would unintentionally do this for us
# (see urllib3#633). We need to do this conversion manually now.
headers = response['headers']
response_metadata['HTTPHeaders'] = lowercase_dict(headers)
- parsed['ResponseMetadata'] = response_metadata
- return parsed
-
+ parsed['ResponseMetadata'] = response_metadata
+ return parsed
+
def _is_modeled_error_shape(self, shape):
return shape is not None and shape.metadata.get('exception', False)
- def _is_generic_error_response(self, response):
- # There are times when a service will respond with a generic
- # error response such as:
- # '<html><body><b>Http/1.1 Service Unavailable</b></body></html>'
- #
- # This can also happen if you're going through a proxy.
- # In this case the protocol specific _do_error_parse will either
- # fail to parse the response (in the best case) or silently succeed
- # and treat the HTML above as an XML response and return
- # non sensical parsed data.
- # To prevent this case from happening we first need to check
- # whether or not this response looks like the generic response.
- if response['status_code'] >= 500:
+ def _is_generic_error_response(self, response):
+ # There are times when a service will respond with a generic
+ # error response such as:
+ # '<html><body><b>Http/1.1 Service Unavailable</b></body></html>'
+ #
+ # This can also happen if you're going through a proxy.
+ # In this case the protocol specific _do_error_parse will either
+ # fail to parse the response (in the best case) or silently succeed
+ # and treat the HTML above as an XML response and return
+ # non sensical parsed data.
+ # To prevent this case from happening we first need to check
+ # whether or not this response looks like the generic response.
+ if response['status_code'] >= 500:
if 'body' not in response or response['body'] is None:
return True
- body = response['body'].strip()
- return body.startswith(b'<html>') or not body
-
- def _do_generic_error_parse(self, response):
- # There's not really much we can do when we get a generic
- # html response.
- LOG.debug("Received a non protocol specific error response from the "
- "service, unable to populate error code and message.")
- return {
- 'Error': {'Code': str(response['status_code']),
- 'Message': six.moves.http_client.responses.get(
- response['status_code'], '')},
- 'ResponseMetadata': {},
- }
-
- def _do_parse(self, response, shape):
- raise NotImplementedError("%s._do_parse" % self.__class__.__name__)
-
- def _do_error_parse(self, response, shape):
- raise NotImplementedError(
- "%s._do_error_parse" % self.__class__.__name__)
-
+ body = response['body'].strip()
+ return body.startswith(b'<html>') or not body
+
+ def _do_generic_error_parse(self, response):
+ # There's not really much we can do when we get a generic
+ # html response.
+ LOG.debug("Received a non protocol specific error response from the "
+ "service, unable to populate error code and message.")
+ return {
+ 'Error': {'Code': str(response['status_code']),
+ 'Message': six.moves.http_client.responses.get(
+ response['status_code'], '')},
+ 'ResponseMetadata': {},
+ }
+
+ def _do_parse(self, response, shape):
+ raise NotImplementedError("%s._do_parse" % self.__class__.__name__)
+
+ def _do_error_parse(self, response, shape):
+ raise NotImplementedError(
+ "%s._do_error_parse" % self.__class__.__name__)
+
def _do_modeled_error_parse(self, response, shape, parsed):
raise NotImplementedError(
"%s._do_modeled_error_parse" % self.__class__.__name__)
- def _parse_shape(self, shape, node):
- handler = getattr(self, '_handle_%s' % shape.type_name,
- self._default_handle)
- return handler(shape, node)
-
- def _handle_list(self, shape, node):
- # Enough implementations share list serialization that it's moved
- # up here in the base class.
- parsed = []
- member_shape = shape.member
- for item in node:
- parsed.append(self._parse_shape(member_shape, item))
- return parsed
-
- def _default_handle(self, shape, value):
- return value
-
+ def _parse_shape(self, shape, node):
+ handler = getattr(self, '_handle_%s' % shape.type_name,
+ self._default_handle)
+ return handler(shape, node)
+
+ def _handle_list(self, shape, node):
+ # Enough implementations share list serialization that it's moved
+ # up here in the base class.
+ parsed = []
+ member_shape = shape.member
+ for item in node:
+ parsed.append(self._parse_shape(member_shape, item))
+ return parsed
+
+ def _default_handle(self, shape, value):
+ return value
+
def _create_event_stream(self, response, shape):
parser = self._event_stream_parser
name = response['context'].get('operation_name')
return EventStream(response['body'], shape, parser, name)
-
-
-class BaseXMLResponseParser(ResponseParser):
- def __init__(self, timestamp_parser=None, blob_parser=None):
- super(BaseXMLResponseParser, self).__init__(timestamp_parser,
- blob_parser)
- self._namespace_re = re.compile('{.*}')
-
- def _handle_map(self, shape, node):
- parsed = {}
- key_shape = shape.key
- value_shape = shape.value
- key_location_name = key_shape.serialization.get('name') or 'key'
- value_location_name = value_shape.serialization.get('name') or 'value'
- if shape.serialization.get('flattened') and not isinstance(node, list):
- node = [node]
- for keyval_node in node:
- for single_pair in keyval_node:
- # Within each <entry> there's a <key> and a <value>
- tag_name = self._node_tag(single_pair)
- if tag_name == key_location_name:
- key_name = self._parse_shape(key_shape, single_pair)
- elif tag_name == value_location_name:
- val_name = self._parse_shape(value_shape, single_pair)
- else:
- raise ResponseParserError("Unknown tag: %s" % tag_name)
- parsed[key_name] = val_name
- return parsed
-
- def _node_tag(self, node):
- return self._namespace_re.sub('', node.tag)
-
- def _handle_list(self, shape, node):
- # When we use _build_name_to_xml_node, repeated elements are aggregated
- # into a list. However, we can't tell the difference between a scalar
- # value and a single element flattened list. So before calling the
- # real _handle_list, we know that "node" should actually be a list if
- # it's flattened, and if it's not, then we make it a one element list.
- if shape.serialization.get('flattened') and not isinstance(node, list):
- node = [node]
- return super(BaseXMLResponseParser, self)._handle_list(shape, node)
-
- def _handle_structure(self, shape, node):
- parsed = {}
- members = shape.members
+
+
+class BaseXMLResponseParser(ResponseParser):
+ def __init__(self, timestamp_parser=None, blob_parser=None):
+ super(BaseXMLResponseParser, self).__init__(timestamp_parser,
+ blob_parser)
+ self._namespace_re = re.compile('{.*}')
+
+ def _handle_map(self, shape, node):
+ parsed = {}
+ key_shape = shape.key
+ value_shape = shape.value
+ key_location_name = key_shape.serialization.get('name') or 'key'
+ value_location_name = value_shape.serialization.get('name') or 'value'
+ if shape.serialization.get('flattened') and not isinstance(node, list):
+ node = [node]
+ for keyval_node in node:
+ for single_pair in keyval_node:
+ # Within each <entry> there's a <key> and a <value>
+ tag_name = self._node_tag(single_pair)
+ if tag_name == key_location_name:
+ key_name = self._parse_shape(key_shape, single_pair)
+ elif tag_name == value_location_name:
+ val_name = self._parse_shape(value_shape, single_pair)
+ else:
+ raise ResponseParserError("Unknown tag: %s" % tag_name)
+ parsed[key_name] = val_name
+ return parsed
+
+ def _node_tag(self, node):
+ return self._namespace_re.sub('', node.tag)
+
+ def _handle_list(self, shape, node):
+ # When we use _build_name_to_xml_node, repeated elements are aggregated
+ # into a list. However, we can't tell the difference between a scalar
+ # value and a single element flattened list. So before calling the
+ # real _handle_list, we know that "node" should actually be a list if
+ # it's flattened, and if it's not, then we make it a one element list.
+ if shape.serialization.get('flattened') and not isinstance(node, list):
+ node = [node]
+ return super(BaseXMLResponseParser, self)._handle_list(shape, node)
+
+ def _handle_structure(self, shape, node):
+ parsed = {}
+ members = shape.members
if shape.metadata.get('exception', False):
node = self._get_error_root(node)
- xml_dict = self._build_name_to_xml_node(node)
- for member_name in members:
- member_shape = members[member_name]
+ xml_dict = self._build_name_to_xml_node(node)
+ for member_name in members:
+ member_shape = members[member_name]
if 'location' in member_shape.serialization or \
member_shape.serialization.get('eventheader'):
- # All members with locations have already been handled,
- # so we don't need to parse these members.
- continue
- xml_name = self._member_key_name(member_shape, member_name)
- member_node = xml_dict.get(xml_name)
- if member_node is not None:
- parsed[member_name] = self._parse_shape(
- member_shape, member_node)
- elif member_shape.serialization.get('xmlAttribute'):
- attribs = {}
- location_name = member_shape.serialization['name']
- for key, value in node.attrib.items():
- new_key = self._namespace_re.sub(
- location_name.split(':')[0] + ':', key)
- attribs[new_key] = value
- if location_name in attribs:
- parsed[member_name] = attribs[location_name]
- return parsed
-
+ # All members with locations have already been handled,
+ # so we don't need to parse these members.
+ continue
+ xml_name = self._member_key_name(member_shape, member_name)
+ member_node = xml_dict.get(xml_name)
+ if member_node is not None:
+ parsed[member_name] = self._parse_shape(
+ member_shape, member_node)
+ elif member_shape.serialization.get('xmlAttribute'):
+ attribs = {}
+ location_name = member_shape.serialization['name']
+ for key, value in node.attrib.items():
+ new_key = self._namespace_re.sub(
+ location_name.split(':')[0] + ':', key)
+ attribs[new_key] = value
+ if location_name in attribs:
+ parsed[member_name] = attribs[location_name]
+ return parsed
+
def _get_error_root(self, original_root):
if self._node_tag(original_root) == 'ErrorResponse':
for child in original_root:
@@ -405,180 +405,180 @@ class BaseXMLResponseParser(ResponseParser):
return child
return original_root
- def _member_key_name(self, shape, member_name):
- # This method is needed because we have to special case flattened list
- # with a serialization name. If this is the case we use the
- # locationName from the list's member shape as the key name for the
- # surrounding structure.
- if shape.type_name == 'list' and shape.serialization.get('flattened'):
- list_member_serialized_name = shape.member.serialization.get(
- 'name')
- if list_member_serialized_name is not None:
- return list_member_serialized_name
- serialized_name = shape.serialization.get('name')
- if serialized_name is not None:
- return serialized_name
- return member_name
-
- def _build_name_to_xml_node(self, parent_node):
- # If the parent node is actually a list. We should not be trying
- # to serialize it to a dictionary. Instead, return the first element
- # in the list.
- if isinstance(parent_node, list):
- return self._build_name_to_xml_node(parent_node[0])
- xml_dict = {}
- for item in parent_node:
- key = self._node_tag(item)
- if key in xml_dict:
- # If the key already exists, the most natural
- # way to handle this is to aggregate repeated
- # keys into a single list.
- # <foo>1</foo><foo>2</foo> -> {'foo': [Node(1), Node(2)]}
- if isinstance(xml_dict[key], list):
- xml_dict[key].append(item)
- else:
- # Convert from a scalar to a list.
- xml_dict[key] = [xml_dict[key], item]
- else:
- xml_dict[key] = item
- return xml_dict
-
- def _parse_xml_string_to_dom(self, xml_string):
- try:
+ def _member_key_name(self, shape, member_name):
+ # This method is needed because we have to special case flattened list
+ # with a serialization name. If this is the case we use the
+ # locationName from the list's member shape as the key name for the
+ # surrounding structure.
+ if shape.type_name == 'list' and shape.serialization.get('flattened'):
+ list_member_serialized_name = shape.member.serialization.get(
+ 'name')
+ if list_member_serialized_name is not None:
+ return list_member_serialized_name
+ serialized_name = shape.serialization.get('name')
+ if serialized_name is not None:
+ return serialized_name
+ return member_name
+
+ def _build_name_to_xml_node(self, parent_node):
+ # If the parent node is actually a list. We should not be trying
+ # to serialize it to a dictionary. Instead, return the first element
+ # in the list.
+ if isinstance(parent_node, list):
+ return self._build_name_to_xml_node(parent_node[0])
+ xml_dict = {}
+ for item in parent_node:
+ key = self._node_tag(item)
+ if key in xml_dict:
+ # If the key already exists, the most natural
+ # way to handle this is to aggregate repeated
+ # keys into a single list.
+ # <foo>1</foo><foo>2</foo> -> {'foo': [Node(1), Node(2)]}
+ if isinstance(xml_dict[key], list):
+ xml_dict[key].append(item)
+ else:
+ # Convert from a scalar to a list.
+ xml_dict[key] = [xml_dict[key], item]
+ else:
+ xml_dict[key] = item
+ return xml_dict
+
+ def _parse_xml_string_to_dom(self, xml_string):
+ try:
parser = ETree.XMLParser(
target=ETree.TreeBuilder(),
- encoding=self.DEFAULT_ENCODING)
- parser.feed(xml_string)
- root = parser.close()
- except XMLParseError as e:
- raise ResponseParserError(
- "Unable to parse response (%s), "
+ encoding=self.DEFAULT_ENCODING)
+ parser.feed(xml_string)
+ root = parser.close()
+ except XMLParseError as e:
+ raise ResponseParserError(
+ "Unable to parse response (%s), "
"invalid XML received. Further retries may succeed:\n%s" %
(e, xml_string))
- return root
-
- def _replace_nodes(self, parsed):
- for key, value in parsed.items():
+ return root
+
+ def _replace_nodes(self, parsed):
+ for key, value in parsed.items():
if list(value):
- sub_dict = self._build_name_to_xml_node(value)
- parsed[key] = self._replace_nodes(sub_dict)
- else:
- parsed[key] = value.text
- return parsed
-
- @_text_content
- def _handle_boolean(self, shape, text):
- if text == 'true':
- return True
- else:
- return False
-
- @_text_content
- def _handle_float(self, shape, text):
- return float(text)
-
- @_text_content
- def _handle_timestamp(self, shape, text):
- return self._timestamp_parser(text)
-
- @_text_content
- def _handle_integer(self, shape, text):
- return int(text)
-
- @_text_content
- def _handle_string(self, shape, text):
- return text
-
- @_text_content
- def _handle_blob(self, shape, text):
- return self._blob_parser(text)
-
- _handle_character = _handle_string
- _handle_double = _handle_float
- _handle_long = _handle_integer
-
-
-class QueryParser(BaseXMLResponseParser):
-
- def _do_error_parse(self, response, shape):
- xml_contents = response['body']
- root = self._parse_xml_string_to_dom(xml_contents)
- parsed = self._build_name_to_xml_node(root)
- self._replace_nodes(parsed)
- # Once we've converted xml->dict, we need to make one or two
- # more adjustments to extract nested errors and to be consistent
- # with ResponseMetadata for non-error responses:
- # 1. {"Errors": {"Error": {...}}} -> {"Error": {...}}
- # 2. {"RequestId": "id"} -> {"ResponseMetadata": {"RequestId": "id"}}
- if 'Errors' in parsed:
- parsed.update(parsed.pop('Errors'))
- if 'RequestId' in parsed:
- parsed['ResponseMetadata'] = {'RequestId': parsed.pop('RequestId')}
- return parsed
-
+ sub_dict = self._build_name_to_xml_node(value)
+ parsed[key] = self._replace_nodes(sub_dict)
+ else:
+ parsed[key] = value.text
+ return parsed
+
+ @_text_content
+ def _handle_boolean(self, shape, text):
+ if text == 'true':
+ return True
+ else:
+ return False
+
+ @_text_content
+ def _handle_float(self, shape, text):
+ return float(text)
+
+ @_text_content
+ def _handle_timestamp(self, shape, text):
+ return self._timestamp_parser(text)
+
+ @_text_content
+ def _handle_integer(self, shape, text):
+ return int(text)
+
+ @_text_content
+ def _handle_string(self, shape, text):
+ return text
+
+ @_text_content
+ def _handle_blob(self, shape, text):
+ return self._blob_parser(text)
+
+ _handle_character = _handle_string
+ _handle_double = _handle_float
+ _handle_long = _handle_integer
+
+
+class QueryParser(BaseXMLResponseParser):
+
+ def _do_error_parse(self, response, shape):
+ xml_contents = response['body']
+ root = self._parse_xml_string_to_dom(xml_contents)
+ parsed = self._build_name_to_xml_node(root)
+ self._replace_nodes(parsed)
+ # Once we've converted xml->dict, we need to make one or two
+ # more adjustments to extract nested errors and to be consistent
+ # with ResponseMetadata for non-error responses:
+ # 1. {"Errors": {"Error": {...}}} -> {"Error": {...}}
+ # 2. {"RequestId": "id"} -> {"ResponseMetadata": {"RequestId": "id"}}
+ if 'Errors' in parsed:
+ parsed.update(parsed.pop('Errors'))
+ if 'RequestId' in parsed:
+ parsed['ResponseMetadata'] = {'RequestId': parsed.pop('RequestId')}
+ return parsed
+
def _do_modeled_error_parse(self, response, shape):
return self._parse_body_as_xml(response, shape, inject_metadata=False)
- def _do_parse(self, response, shape):
+ def _do_parse(self, response, shape):
return self._parse_body_as_xml(response, shape, inject_metadata=True)
def _parse_body_as_xml(self, response, shape, inject_metadata=True):
- xml_contents = response['body']
- root = self._parse_xml_string_to_dom(xml_contents)
- parsed = {}
- if shape is not None:
- start = root
- if 'resultWrapper' in shape.serialization:
- start = self._find_result_wrapped_shape(
- shape.serialization['resultWrapper'],
- root)
- parsed = self._parse_shape(shape, start)
+ xml_contents = response['body']
+ root = self._parse_xml_string_to_dom(xml_contents)
+ parsed = {}
+ if shape is not None:
+ start = root
+ if 'resultWrapper' in shape.serialization:
+ start = self._find_result_wrapped_shape(
+ shape.serialization['resultWrapper'],
+ root)
+ parsed = self._parse_shape(shape, start)
if inject_metadata:
self._inject_response_metadata(root, parsed)
- return parsed
-
- def _find_result_wrapped_shape(self, element_name, xml_root_node):
- mapping = self._build_name_to_xml_node(xml_root_node)
- return mapping[element_name]
-
- def _inject_response_metadata(self, node, inject_into):
- mapping = self._build_name_to_xml_node(node)
- child_node = mapping.get('ResponseMetadata')
- if child_node is not None:
- sub_mapping = self._build_name_to_xml_node(child_node)
- for key, value in sub_mapping.items():
- sub_mapping[key] = value.text
- inject_into['ResponseMetadata'] = sub_mapping
-
-
-class EC2QueryParser(QueryParser):
-
- def _inject_response_metadata(self, node, inject_into):
- mapping = self._build_name_to_xml_node(node)
- child_node = mapping.get('requestId')
- if child_node is not None:
- inject_into['ResponseMetadata'] = {'RequestId': child_node.text}
-
- def _do_error_parse(self, response, shape):
- # EC2 errors look like:
- # <Response>
- # <Errors>
- # <Error>
- # <Code>InvalidInstanceID.Malformed</Code>
- # <Message>Invalid id: "1343124"</Message>
- # </Error>
- # </Errors>
- # <RequestID>12345</RequestID>
- # </Response>
- # This is different from QueryParser in that it's RequestID,
- # not RequestId
- original = super(EC2QueryParser, self)._do_error_parse(response, shape)
+ return parsed
+
+ def _find_result_wrapped_shape(self, element_name, xml_root_node):
+ mapping = self._build_name_to_xml_node(xml_root_node)
+ return mapping[element_name]
+
+ def _inject_response_metadata(self, node, inject_into):
+ mapping = self._build_name_to_xml_node(node)
+ child_node = mapping.get('ResponseMetadata')
+ if child_node is not None:
+ sub_mapping = self._build_name_to_xml_node(child_node)
+ for key, value in sub_mapping.items():
+ sub_mapping[key] = value.text
+ inject_into['ResponseMetadata'] = sub_mapping
+
+
+class EC2QueryParser(QueryParser):
+
+ def _inject_response_metadata(self, node, inject_into):
+ mapping = self._build_name_to_xml_node(node)
+ child_node = mapping.get('requestId')
+ if child_node is not None:
+ inject_into['ResponseMetadata'] = {'RequestId': child_node.text}
+
+ def _do_error_parse(self, response, shape):
+ # EC2 errors look like:
+ # <Response>
+ # <Errors>
+ # <Error>
+ # <Code>InvalidInstanceID.Malformed</Code>
+ # <Message>Invalid id: "1343124"</Message>
+ # </Error>
+ # </Errors>
+ # <RequestID>12345</RequestID>
+ # </Response>
+ # This is different from QueryParser in that it's RequestID,
+ # not RequestId
+ original = super(EC2QueryParser, self)._do_error_parse(response, shape)
if 'RequestID' in original:
original['ResponseMetadata'] = {
'RequestId': original.pop('RequestID')
}
- return original
-
+ return original
+
def _get_error_root(self, original_root):
for child in original_root:
if self._node_tag(child) == 'Errors':
@@ -586,12 +586,12 @@ class EC2QueryParser(QueryParser):
if self._node_tag(errors_child) == 'Error':
return errors_child
return original_root
-
-class BaseJSONParser(ResponseParser):
-
- def _handle_structure(self, shape, value):
- final_parsed = {}
+
+class BaseJSONParser(ResponseParser):
+
+ def _handle_structure(self, shape, value):
+ final_parsed = {}
if shape.is_document_type:
final_parsed = value
else:
@@ -610,69 +610,69 @@ class BaseJSONParser(ResponseParser):
final_parsed[member_name] = self._parse_shape(
member_shapes[member_name],
raw_value)
- return final_parsed
-
- def _handle_map(self, shape, value):
- parsed = {}
- key_shape = shape.key
- value_shape = shape.value
- for key, value in value.items():
- actual_key = self._parse_shape(key_shape, key)
- actual_value = self._parse_shape(value_shape, value)
- parsed[actual_key] = actual_value
- return parsed
-
- def _handle_blob(self, shape, value):
- return self._blob_parser(value)
-
- def _handle_timestamp(self, shape, value):
- return self._timestamp_parser(value)
-
- def _do_error_parse(self, response, shape):
- body = self._parse_body_as_json(response['body'])
- error = {"Error": {"Message": '', "Code": ''}, "ResponseMetadata": {}}
- # Error responses can have slightly different structures for json.
- # The basic structure is:
- #
- # {"__type":"ConnectClientException",
- # "message":"The error message."}
-
- # The error message can either come in the 'message' or 'Message' key
- # so we need to check for both.
- error['Error']['Message'] = body.get('message',
- body.get('Message', ''))
- # if the message did not contain an error code
- # include the response status code
- response_code = response.get('status_code')
- code = body.get('__type', response_code and str(response_code))
- if code is not None:
- # code has a couple forms as well:
- # * "com.aws.dynamodb.vAPI#ProvisionedThroughputExceededException"
- # * "ResourceNotFoundException"
- if '#' in code:
- code = code.rsplit('#', 1)[1]
- error['Error']['Code'] = code
- self._inject_response_metadata(error, response['headers'])
- return error
-
- def _inject_response_metadata(self, parsed, headers):
- if 'x-amzn-requestid' in headers:
- parsed.setdefault('ResponseMetadata', {})['RequestId'] = (
- headers['x-amzn-requestid'])
-
- def _parse_body_as_json(self, body_contents):
- if not body_contents:
- return {}
- body = body_contents.decode(self.DEFAULT_ENCODING)
- try:
- original_parsed = json.loads(body)
- return original_parsed
- except ValueError:
- # if the body cannot be parsed, include
- # the literal string as the message
- return { 'message': body }
-
-
+ return final_parsed
+
+ def _handle_map(self, shape, value):
+ parsed = {}
+ key_shape = shape.key
+ value_shape = shape.value
+ for key, value in value.items():
+ actual_key = self._parse_shape(key_shape, key)
+ actual_value = self._parse_shape(value_shape, value)
+ parsed[actual_key] = actual_value
+ return parsed
+
+ def _handle_blob(self, shape, value):
+ return self._blob_parser(value)
+
+ def _handle_timestamp(self, shape, value):
+ return self._timestamp_parser(value)
+
+ def _do_error_parse(self, response, shape):
+ body = self._parse_body_as_json(response['body'])
+ error = {"Error": {"Message": '', "Code": ''}, "ResponseMetadata": {}}
+ # Error responses can have slightly different structures for json.
+ # The basic structure is:
+ #
+ # {"__type":"ConnectClientException",
+ # "message":"The error message."}
+
+ # The error message can either come in the 'message' or 'Message' key
+ # so we need to check for both.
+ error['Error']['Message'] = body.get('message',
+ body.get('Message', ''))
+ # if the message did not contain an error code
+ # include the response status code
+ response_code = response.get('status_code')
+ code = body.get('__type', response_code and str(response_code))
+ if code is not None:
+ # code has a couple forms as well:
+ # * "com.aws.dynamodb.vAPI#ProvisionedThroughputExceededException"
+ # * "ResourceNotFoundException"
+ if '#' in code:
+ code = code.rsplit('#', 1)[1]
+ error['Error']['Code'] = code
+ self._inject_response_metadata(error, response['headers'])
+ return error
+
+ def _inject_response_metadata(self, parsed, headers):
+ if 'x-amzn-requestid' in headers:
+ parsed.setdefault('ResponseMetadata', {})['RequestId'] = (
+ headers['x-amzn-requestid'])
+
+ def _parse_body_as_json(self, body_contents):
+ if not body_contents:
+ return {}
+ body = body_contents.decode(self.DEFAULT_ENCODING)
+ try:
+ original_parsed = json.loads(body)
+ return original_parsed
+ except ValueError:
+ # if the body cannot be parsed, include
+ # the literal string as the message
+ return { 'message': body }
+
+
class BaseEventStreamParser(ResponseParser):
def _do_parse(self, response, shape):
@@ -765,22 +765,22 @@ class EventStreamXMLParser(BaseEventStreamParser, BaseXMLResponseParser):
return self._parse_xml_string_to_dom(xml_string)
-class JSONParser(BaseJSONParser):
+class JSONParser(BaseJSONParser):
EVENT_STREAM_PARSER_CLS = EventStreamJSONParser
"""Response parser for the "json" protocol."""
- def _do_parse(self, response, shape):
- parsed = {}
- if shape is not None:
+ def _do_parse(self, response, shape):
+ parsed = {}
+ if shape is not None:
event_name = shape.event_stream_name
if event_name:
parsed = self._handle_event_stream(response, shape, event_name)
else:
parsed = self._handle_json_body(response['body'], shape)
- self._inject_response_metadata(parsed, response['headers'])
- return parsed
-
+ self._inject_response_metadata(parsed, response['headers'])
+ return parsed
+
def _do_modeled_error_parse(self, response, shape):
return self._handle_json_body(response['body'], shape)
@@ -795,7 +795,7 @@ class JSONParser(BaseJSONParser):
parsed = self._handle_json_body(event.payload, shape)
parsed[event_name] = event_stream
return parsed
-
+
def _handle_json_body(self, raw_body, shape):
# The json.loads() gives us the primitive JSON types,
# but we need to traverse the parsed JSON data to convert
@@ -804,221 +804,221 @@ class JSONParser(BaseJSONParser):
return self._parse_shape(shape, parsed_json)
-class BaseRestParser(ResponseParser):
-
- def _do_parse(self, response, shape):
- final_parsed = {}
- final_parsed['ResponseMetadata'] = self._populate_response_metadata(
- response)
+class BaseRestParser(ResponseParser):
+
+ def _do_parse(self, response, shape):
+ final_parsed = {}
+ final_parsed['ResponseMetadata'] = self._populate_response_metadata(
+ response)
self._add_modeled_parse(response, shape, final_parsed)
return final_parsed
def _add_modeled_parse(self, response, shape, final_parsed):
- if shape is None:
- return final_parsed
- member_shapes = shape.members
- self._parse_non_payload_attrs(response, shape,
- member_shapes, final_parsed)
- self._parse_payload(response, shape, member_shapes, final_parsed)
+ if shape is None:
+ return final_parsed
+ member_shapes = shape.members
+ self._parse_non_payload_attrs(response, shape,
+ member_shapes, final_parsed)
+ self._parse_payload(response, shape, member_shapes, final_parsed)
def _do_modeled_error_parse(self, response, shape):
final_parsed = {}
self._add_modeled_parse(response, shape, final_parsed)
- return final_parsed
-
- def _populate_response_metadata(self, response):
- metadata = {}
- headers = response['headers']
- if 'x-amzn-requestid' in headers:
- metadata['RequestId'] = headers['x-amzn-requestid']
- elif 'x-amz-request-id' in headers:
- metadata['RequestId'] = headers['x-amz-request-id']
+ return final_parsed
+
+ def _populate_response_metadata(self, response):
+ metadata = {}
+ headers = response['headers']
+ if 'x-amzn-requestid' in headers:
+ metadata['RequestId'] = headers['x-amzn-requestid']
+ elif 'x-amz-request-id' in headers:
+ metadata['RequestId'] = headers['x-amz-request-id']
# HostId is what it's called whenever this value is returned
- # in an XML response body, so to be consistent, we'll always
- # call is HostId.
- metadata['HostId'] = headers.get('x-amz-id-2', '')
- return metadata
-
- def _parse_payload(self, response, shape, member_shapes, final_parsed):
- if 'payload' in shape.serialization:
- # If a payload is specified in the output shape, then only that
- # shape is used for the body payload.
- payload_member_name = shape.serialization['payload']
- body_shape = member_shapes[payload_member_name]
+ # in an XML response body, so to be consistent, we'll always
+ # call is HostId.
+ metadata['HostId'] = headers.get('x-amz-id-2', '')
+ return metadata
+
+ def _parse_payload(self, response, shape, member_shapes, final_parsed):
+ if 'payload' in shape.serialization:
+ # If a payload is specified in the output shape, then only that
+ # shape is used for the body payload.
+ payload_member_name = shape.serialization['payload']
+ body_shape = member_shapes[payload_member_name]
if body_shape.serialization.get('eventstream'):
body = self._create_event_stream(response, body_shape)
final_parsed[payload_member_name] = body
elif body_shape.type_name in ['string', 'blob']:
- # This is a stream
- body = response['body']
- if isinstance(body, bytes):
- body = body.decode(self.DEFAULT_ENCODING)
- final_parsed[payload_member_name] = body
- else:
- original_parsed = self._initial_body_parse(response['body'])
- final_parsed[payload_member_name] = self._parse_shape(
- body_shape, original_parsed)
- else:
- original_parsed = self._initial_body_parse(response['body'])
- body_parsed = self._parse_shape(shape, original_parsed)
- final_parsed.update(body_parsed)
-
- def _parse_non_payload_attrs(self, response, shape,
- member_shapes, final_parsed):
- headers = response['headers']
- for name in member_shapes:
- member_shape = member_shapes[name]
- location = member_shape.serialization.get('location')
- if location is None:
- continue
- elif location == 'statusCode':
- final_parsed[name] = self._parse_shape(
- member_shape, response['status_code'])
- elif location == 'headers':
- final_parsed[name] = self._parse_header_map(member_shape,
- headers)
- elif location == 'header':
- header_name = member_shape.serialization.get('name', name)
- if header_name in headers:
- final_parsed[name] = self._parse_shape(
- member_shape, headers[header_name])
-
- def _parse_header_map(self, shape, headers):
- # Note that headers are case insensitive, so we .lower()
- # all header names and header prefixes.
- parsed = {}
- prefix = shape.serialization.get('name', '').lower()
- for header_name in headers:
- if header_name.lower().startswith(prefix):
- # The key name inserted into the parsed hash
- # strips off the prefix.
- name = header_name[len(prefix):]
- parsed[name] = headers[header_name]
- return parsed
-
- def _initial_body_parse(self, body_contents):
- # This method should do the initial xml/json parsing of the
- # body. We we still need to walk the parsed body in order
- # to convert types, but this method will do the first round
- # of parsing.
- raise NotImplementedError("_initial_body_parse")
-
- def _handle_string(self, shape, value):
- parsed = value
- if is_json_value_header(shape):
- decoded = base64.b64decode(value).decode(self.DEFAULT_ENCODING)
- parsed = json.loads(decoded)
- return parsed
-
-
-class RestJSONParser(BaseRestParser, BaseJSONParser):
-
+ # This is a stream
+ body = response['body']
+ if isinstance(body, bytes):
+ body = body.decode(self.DEFAULT_ENCODING)
+ final_parsed[payload_member_name] = body
+ else:
+ original_parsed = self._initial_body_parse(response['body'])
+ final_parsed[payload_member_name] = self._parse_shape(
+ body_shape, original_parsed)
+ else:
+ original_parsed = self._initial_body_parse(response['body'])
+ body_parsed = self._parse_shape(shape, original_parsed)
+ final_parsed.update(body_parsed)
+
+ def _parse_non_payload_attrs(self, response, shape,
+ member_shapes, final_parsed):
+ headers = response['headers']
+ for name in member_shapes:
+ member_shape = member_shapes[name]
+ location = member_shape.serialization.get('location')
+ if location is None:
+ continue
+ elif location == 'statusCode':
+ final_parsed[name] = self._parse_shape(
+ member_shape, response['status_code'])
+ elif location == 'headers':
+ final_parsed[name] = self._parse_header_map(member_shape,
+ headers)
+ elif location == 'header':
+ header_name = member_shape.serialization.get('name', name)
+ if header_name in headers:
+ final_parsed[name] = self._parse_shape(
+ member_shape, headers[header_name])
+
+ def _parse_header_map(self, shape, headers):
+ # Note that headers are case insensitive, so we .lower()
+ # all header names and header prefixes.
+ parsed = {}
+ prefix = shape.serialization.get('name', '').lower()
+ for header_name in headers:
+ if header_name.lower().startswith(prefix):
+ # The key name inserted into the parsed hash
+ # strips off the prefix.
+ name = header_name[len(prefix):]
+ parsed[name] = headers[header_name]
+ return parsed
+
+ def _initial_body_parse(self, body_contents):
+ # This method should do the initial xml/json parsing of the
+ # body. We we still need to walk the parsed body in order
+ # to convert types, but this method will do the first round
+ # of parsing.
+ raise NotImplementedError("_initial_body_parse")
+
+ def _handle_string(self, shape, value):
+ parsed = value
+ if is_json_value_header(shape):
+ decoded = base64.b64decode(value).decode(self.DEFAULT_ENCODING)
+ parsed = json.loads(decoded)
+ return parsed
+
+
+class RestJSONParser(BaseRestParser, BaseJSONParser):
+
EVENT_STREAM_PARSER_CLS = EventStreamJSONParser
- def _initial_body_parse(self, body_contents):
- return self._parse_body_as_json(body_contents)
-
- def _do_error_parse(self, response, shape):
- error = super(RestJSONParser, self)._do_error_parse(response, shape)
- self._inject_error_code(error, response)
- return error
-
- def _inject_error_code(self, error, response):
- # The "Code" value can come from either a response
- # header or a value in the JSON body.
- body = self._initial_body_parse(response['body'])
- if 'x-amzn-errortype' in response['headers']:
- code = response['headers']['x-amzn-errortype']
- # Could be:
- # x-amzn-errortype: ValidationException:
- code = code.split(':')[0]
- error['Error']['Code'] = code
- elif 'code' in body or 'Code' in body:
- error['Error']['Code'] = body.get(
- 'code', body.get('Code', ''))
-
-
-class RestXMLParser(BaseRestParser, BaseXMLResponseParser):
-
+ def _initial_body_parse(self, body_contents):
+ return self._parse_body_as_json(body_contents)
+
+ def _do_error_parse(self, response, shape):
+ error = super(RestJSONParser, self)._do_error_parse(response, shape)
+ self._inject_error_code(error, response)
+ return error
+
+ def _inject_error_code(self, error, response):
+ # The "Code" value can come from either a response
+ # header or a value in the JSON body.
+ body = self._initial_body_parse(response['body'])
+ if 'x-amzn-errortype' in response['headers']:
+ code = response['headers']['x-amzn-errortype']
+ # Could be:
+ # x-amzn-errortype: ValidationException:
+ code = code.split(':')[0]
+ error['Error']['Code'] = code
+ elif 'code' in body or 'Code' in body:
+ error['Error']['Code'] = body.get(
+ 'code', body.get('Code', ''))
+
+
+class RestXMLParser(BaseRestParser, BaseXMLResponseParser):
+
EVENT_STREAM_PARSER_CLS = EventStreamXMLParser
- def _initial_body_parse(self, xml_string):
- if not xml_string:
+ def _initial_body_parse(self, xml_string):
+ if not xml_string:
return ETree.Element('')
- return self._parse_xml_string_to_dom(xml_string)
-
- def _do_error_parse(self, response, shape):
- # We're trying to be service agnostic here, but S3 does have a slightly
- # different response structure for its errors compared to other
- # rest-xml serivces (route53/cloudfront). We handle this by just
- # trying to parse both forms.
- # First:
- # <ErrorResponse xmlns="...">
- # <Error>
- # <Type>Sender</Type>
- # <Code>InvalidInput</Code>
- # <Message>Invalid resource type: foo</Message>
- # </Error>
- # <RequestId>request-id</RequestId>
- # </ErrorResponse>
- if response['body']:
- # If the body ends up being invalid xml, the xml parser should not
- # blow up. It should at least try to pull information about the
- # the error response from other sources like the HTTP status code.
- try:
- return self._parse_error_from_body(response)
- except ResponseParserError as e:
- LOG.debug(
- 'Exception caught when parsing error response body:',
- exc_info=True)
- return self._parse_error_from_http_status(response)
-
- def _parse_error_from_http_status(self, response):
- return {
- 'Error': {
- 'Code': str(response['status_code']),
- 'Message': six.moves.http_client.responses.get(
- response['status_code'], ''),
- },
- 'ResponseMetadata': {
- 'RequestId': response['headers'].get('x-amz-request-id', ''),
- 'HostId': response['headers'].get('x-amz-id-2', ''),
- }
- }
-
- def _parse_error_from_body(self, response):
- xml_contents = response['body']
- root = self._parse_xml_string_to_dom(xml_contents)
- parsed = self._build_name_to_xml_node(root)
- self._replace_nodes(parsed)
- if root.tag == 'Error':
- # This is an S3 error response. First we'll populate the
- # response metadata.
- metadata = self._populate_response_metadata(response)
- # The RequestId and the HostId are already in the
- # ResponseMetadata, but are also duplicated in the XML
- # body. We don't need these values in both places,
- # we'll just remove them from the parsed XML body.
- parsed.pop('RequestId', '')
- parsed.pop('HostId', '')
- return {'Error': parsed, 'ResponseMetadata': metadata}
- elif 'RequestId' in parsed:
- # Other rest-xml serivces:
- parsed['ResponseMetadata'] = {'RequestId': parsed.pop('RequestId')}
- default = {'Error': {'Message': '', 'Code': ''}}
- merge_dicts(default, parsed)
- return default
-
- @_text_content
- def _handle_string(self, shape, text):
- text = super(RestXMLParser, self)._handle_string(shape, text)
- return text
-
-
-PROTOCOL_PARSERS = {
- 'ec2': EC2QueryParser,
- 'query': QueryParser,
- 'json': JSONParser,
- 'rest-json': RestJSONParser,
- 'rest-xml': RestXMLParser,
-}
+ return self._parse_xml_string_to_dom(xml_string)
+
+ def _do_error_parse(self, response, shape):
+ # We're trying to be service agnostic here, but S3 does have a slightly
+ # different response structure for its errors compared to other
+ # rest-xml serivces (route53/cloudfront). We handle this by just
+ # trying to parse both forms.
+ # First:
+ # <ErrorResponse xmlns="...">
+ # <Error>
+ # <Type>Sender</Type>
+ # <Code>InvalidInput</Code>
+ # <Message>Invalid resource type: foo</Message>
+ # </Error>
+ # <RequestId>request-id</RequestId>
+ # </ErrorResponse>
+ if response['body']:
+ # If the body ends up being invalid xml, the xml parser should not
+ # blow up. It should at least try to pull information about the
+ # the error response from other sources like the HTTP status code.
+ try:
+ return self._parse_error_from_body(response)
+ except ResponseParserError as e:
+ LOG.debug(
+ 'Exception caught when parsing error response body:',
+ exc_info=True)
+ return self._parse_error_from_http_status(response)
+
+ def _parse_error_from_http_status(self, response):
+ return {
+ 'Error': {
+ 'Code': str(response['status_code']),
+ 'Message': six.moves.http_client.responses.get(
+ response['status_code'], ''),
+ },
+ 'ResponseMetadata': {
+ 'RequestId': response['headers'].get('x-amz-request-id', ''),
+ 'HostId': response['headers'].get('x-amz-id-2', ''),
+ }
+ }
+
+ def _parse_error_from_body(self, response):
+ xml_contents = response['body']
+ root = self._parse_xml_string_to_dom(xml_contents)
+ parsed = self._build_name_to_xml_node(root)
+ self._replace_nodes(parsed)
+ if root.tag == 'Error':
+ # This is an S3 error response. First we'll populate the
+ # response metadata.
+ metadata = self._populate_response_metadata(response)
+ # The RequestId and the HostId are already in the
+ # ResponseMetadata, but are also duplicated in the XML
+ # body. We don't need these values in both places,
+ # we'll just remove them from the parsed XML body.
+ parsed.pop('RequestId', '')
+ parsed.pop('HostId', '')
+ return {'Error': parsed, 'ResponseMetadata': metadata}
+ elif 'RequestId' in parsed:
+ # Other rest-xml serivces:
+ parsed['ResponseMetadata'] = {'RequestId': parsed.pop('RequestId')}
+ default = {'Error': {'Message': '', 'Code': ''}}
+ merge_dicts(default, parsed)
+ return default
+
+ @_text_content
+ def _handle_string(self, shape, text):
+ text = super(RestXMLParser, self)._handle_string(shape, text)
+ return text
+
+
+PROTOCOL_PARSERS = {
+ 'ec2': EC2QueryParser,
+ 'query': QueryParser,
+ 'json': JSONParser,
+ 'rest-json': RestJSONParser,
+ 'rest-xml': RestXMLParser,
+}
diff --git a/contrib/python/botocore/botocore/regions.py b/contrib/python/botocore/botocore/regions.py
index 3261c53ea7..bb866d4e08 100644
--- a/contrib/python/botocore/botocore/regions.py
+++ b/contrib/python/botocore/botocore/regions.py
@@ -1,120 +1,120 @@
-# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-"""Resolves regions and endpoints.
-
-This module implements endpoint resolution, including resolving endpoints for a
-given service and region and resolving the available endpoints for a service
-in a specific AWS partition.
-"""
-import logging
-import re
-
-from botocore.exceptions import NoRegionError
-
-LOG = logging.getLogger(__name__)
-DEFAULT_URI_TEMPLATE = '{service}.{region}.{dnsSuffix}'
-DEFAULT_SERVICE_DATA = {'endpoints': {}}
-
-
-class BaseEndpointResolver(object):
- """Resolves regions and endpoints. Must be subclassed."""
- def construct_endpoint(self, service_name, region_name=None):
- """Resolves an endpoint for a service and region combination.
-
- :type service_name: string
- :param service_name: Name of the service to resolve an endpoint for
- (e.g., s3)
-
- :type region_name: string
- :param region_name: Region/endpoint name to resolve (e.g., us-east-1)
- if no region is provided, the first found partition-wide endpoint
- will be used if available.
-
- :rtype: dict
- :return: Returns a dict containing the following keys:
- - partition: (string, required) Resolved partition name
- - endpointName: (string, required) Resolved endpoint name
- - hostname: (string, required) Hostname to use for this endpoint
- - sslCommonName: (string) sslCommonName to use for this endpoint.
- - credentialScope: (dict) Signature version 4 credential scope
- - region: (string) region name override when signing.
- - service: (string) service name override when signing.
- - signatureVersions: (list<string>) A list of possible signature
- versions, including s3, v4, v2, and s3v4
- - protocols: (list<string>) A list of supported protocols
- (e.g., http, https)
- - ...: Other keys may be included as well based on the metadata
- """
- raise NotImplementedError
-
- def get_available_partitions(self):
- """Lists the partitions available to the endpoint resolver.
-
- :return: Returns a list of partition names (e.g., ["aws", "aws-cn"]).
- """
- raise NotImplementedError
-
- def get_available_endpoints(self, service_name, partition_name='aws',
- allow_non_regional=False):
- """Lists the endpoint names of a particular partition.
-
- :type service_name: string
- :param service_name: Name of a service to list endpoint for (e.g., s3)
-
- :type partition_name: string
- :param partition_name: Name of the partition to limit endpoints to.
- (e.g., aws for the public AWS endpoints, aws-cn for AWS China
- endpoints, aws-us-gov for AWS GovCloud (US) Endpoints, etc.
-
- :type allow_non_regional: bool
- :param allow_non_regional: Set to True to include endpoints that are
- not regional endpoints (e.g., s3-external-1,
- fips-us-gov-west-1, etc).
- :return: Returns a list of endpoint names (e.g., ["us-east-1"]).
- """
- raise NotImplementedError
-
-
-class EndpointResolver(BaseEndpointResolver):
- """Resolves endpoints based on partition endpoint metadata"""
- def __init__(self, endpoint_data):
- """
- :param endpoint_data: A dict of partition data.
- """
- if 'partitions' not in endpoint_data:
- raise ValueError('Missing "partitions" in endpoint data')
- self._endpoint_data = endpoint_data
-
- def get_available_partitions(self):
- result = []
- for partition in self._endpoint_data['partitions']:
- result.append(partition['partition'])
- return result
-
- def get_available_endpoints(self, service_name, partition_name='aws',
- allow_non_regional=False):
- result = []
- for partition in self._endpoint_data['partitions']:
- if partition['partition'] != partition_name:
- continue
- services = partition['services']
- if service_name not in services:
- continue
- for endpoint_name in services[service_name]['endpoints']:
- if allow_non_regional or endpoint_name in partition['regions']:
- result.append(endpoint_name)
- return result
-
+# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+"""Resolves regions and endpoints.
+
+This module implements endpoint resolution, including resolving endpoints for a
+given service and region and resolving the available endpoints for a service
+in a specific AWS partition.
+"""
+import logging
+import re
+
+from botocore.exceptions import NoRegionError
+
+LOG = logging.getLogger(__name__)
+DEFAULT_URI_TEMPLATE = '{service}.{region}.{dnsSuffix}'
+DEFAULT_SERVICE_DATA = {'endpoints': {}}
+
+
+class BaseEndpointResolver(object):
+ """Resolves regions and endpoints. Must be subclassed."""
+ def construct_endpoint(self, service_name, region_name=None):
+ """Resolves an endpoint for a service and region combination.
+
+ :type service_name: string
+ :param service_name: Name of the service to resolve an endpoint for
+ (e.g., s3)
+
+ :type region_name: string
+ :param region_name: Region/endpoint name to resolve (e.g., us-east-1)
+ if no region is provided, the first found partition-wide endpoint
+ will be used if available.
+
+ :rtype: dict
+ :return: Returns a dict containing the following keys:
+ - partition: (string, required) Resolved partition name
+ - endpointName: (string, required) Resolved endpoint name
+ - hostname: (string, required) Hostname to use for this endpoint
+ - sslCommonName: (string) sslCommonName to use for this endpoint.
+ - credentialScope: (dict) Signature version 4 credential scope
+ - region: (string) region name override when signing.
+ - service: (string) service name override when signing.
+ - signatureVersions: (list<string>) A list of possible signature
+ versions, including s3, v4, v2, and s3v4
+ - protocols: (list<string>) A list of supported protocols
+ (e.g., http, https)
+ - ...: Other keys may be included as well based on the metadata
+ """
+ raise NotImplementedError
+
+ def get_available_partitions(self):
+ """Lists the partitions available to the endpoint resolver.
+
+ :return: Returns a list of partition names (e.g., ["aws", "aws-cn"]).
+ """
+ raise NotImplementedError
+
+ def get_available_endpoints(self, service_name, partition_name='aws',
+ allow_non_regional=False):
+ """Lists the endpoint names of a particular partition.
+
+ :type service_name: string
+ :param service_name: Name of a service to list endpoint for (e.g., s3)
+
+ :type partition_name: string
+ :param partition_name: Name of the partition to limit endpoints to.
+ (e.g., aws for the public AWS endpoints, aws-cn for AWS China
+ endpoints, aws-us-gov for AWS GovCloud (US) Endpoints, etc.
+
+ :type allow_non_regional: bool
+ :param allow_non_regional: Set to True to include endpoints that are
+ not regional endpoints (e.g., s3-external-1,
+ fips-us-gov-west-1, etc).
+ :return: Returns a list of endpoint names (e.g., ["us-east-1"]).
+ """
+ raise NotImplementedError
+
+
+class EndpointResolver(BaseEndpointResolver):
+ """Resolves endpoints based on partition endpoint metadata"""
+ def __init__(self, endpoint_data):
+ """
+ :param endpoint_data: A dict of partition data.
+ """
+ if 'partitions' not in endpoint_data:
+ raise ValueError('Missing "partitions" in endpoint data')
+ self._endpoint_data = endpoint_data
+
+ def get_available_partitions(self):
+ result = []
+ for partition in self._endpoint_data['partitions']:
+ result.append(partition['partition'])
+ return result
+
+ def get_available_endpoints(self, service_name, partition_name='aws',
+ allow_non_regional=False):
+ result = []
+ for partition in self._endpoint_data['partitions']:
+ if partition['partition'] != partition_name:
+ continue
+ services = partition['services']
+ if service_name not in services:
+ continue
+ for endpoint_name in services[service_name]['endpoints']:
+ if allow_non_regional or endpoint_name in partition['regions']:
+ result.append(endpoint_name)
+ return result
+
def construct_endpoint(self, service_name, region_name=None, partition_name=None):
if partition_name is not None:
valid_partition = None
@@ -128,74 +128,74 @@ class EndpointResolver(BaseEndpointResolver):
return result
return None
- # Iterate over each partition until a match is found.
- for partition in self._endpoint_data['partitions']:
- result = self._endpoint_for_partition(
- partition, service_name, region_name)
- if result:
- return result
-
+ # Iterate over each partition until a match is found.
+ for partition in self._endpoint_data['partitions']:
+ result = self._endpoint_for_partition(
+ partition, service_name, region_name)
+ if result:
+ return result
+
def _endpoint_for_partition(self, partition, service_name, region_name,
force_partition=False):
- # Get the service from the partition, or an empty template.
- service_data = partition['services'].get(
- service_name, DEFAULT_SERVICE_DATA)
- # Use the partition endpoint if no region is supplied.
- if region_name is None:
- if 'partitionEndpoint' in service_data:
- region_name = service_data['partitionEndpoint']
- else:
- raise NoRegionError()
- # Attempt to resolve the exact region for this partition.
- if region_name in service_data['endpoints']:
- return self._resolve(
- partition, service_name, service_data, region_name)
- # Check to see if the endpoint provided is valid for the partition.
+ # Get the service from the partition, or an empty template.
+ service_data = partition['services'].get(
+ service_name, DEFAULT_SERVICE_DATA)
+ # Use the partition endpoint if no region is supplied.
+ if region_name is None:
+ if 'partitionEndpoint' in service_data:
+ region_name = service_data['partitionEndpoint']
+ else:
+ raise NoRegionError()
+ # Attempt to resolve the exact region for this partition.
+ if region_name in service_data['endpoints']:
+ return self._resolve(
+ partition, service_name, service_data, region_name)
+ # Check to see if the endpoint provided is valid for the partition.
if self._region_match(partition, region_name) or force_partition:
- # Use the partition endpoint if set and not regionalized.
- partition_endpoint = service_data.get('partitionEndpoint')
- is_regionalized = service_data.get('isRegionalized', True)
- if partition_endpoint and not is_regionalized:
- LOG.debug('Using partition endpoint for %s, %s: %s',
- service_name, region_name, partition_endpoint)
- return self._resolve(
- partition, service_name, service_data, partition_endpoint)
- LOG.debug('Creating a regex based endpoint for %s, %s',
- service_name, region_name)
- return self._resolve(
- partition, service_name, service_data, region_name)
-
- def _region_match(self, partition, region_name):
- if region_name in partition['regions']:
- return True
- if 'regionRegex' in partition:
- return re.compile(partition['regionRegex']).match(region_name)
- return False
-
- def _resolve(self, partition, service_name, service_data, endpoint_name):
- result = service_data['endpoints'].get(endpoint_name, {})
- result['partition'] = partition['partition']
- result['endpointName'] = endpoint_name
- # Merge in the service defaults then the partition defaults.
- self._merge_keys(service_data.get('defaults', {}), result)
- self._merge_keys(partition.get('defaults', {}), result)
- hostname = result.get('hostname', DEFAULT_URI_TEMPLATE)
- result['hostname'] = self._expand_template(
- partition, result['hostname'], service_name, endpoint_name)
- if 'sslCommonName' in result:
- result['sslCommonName'] = self._expand_template(
- partition, result['sslCommonName'], service_name,
- endpoint_name)
- result['dnsSuffix'] = partition['dnsSuffix']
- return result
-
- def _merge_keys(self, from_data, result):
- for key in from_data:
- if key not in result:
- result[key] = from_data[key]
-
- def _expand_template(self, partition, template, service_name,
- endpoint_name):
- return template.format(
- service=service_name, region=endpoint_name,
- dnsSuffix=partition['dnsSuffix'])
+ # Use the partition endpoint if set and not regionalized.
+ partition_endpoint = service_data.get('partitionEndpoint')
+ is_regionalized = service_data.get('isRegionalized', True)
+ if partition_endpoint and not is_regionalized:
+ LOG.debug('Using partition endpoint for %s, %s: %s',
+ service_name, region_name, partition_endpoint)
+ return self._resolve(
+ partition, service_name, service_data, partition_endpoint)
+ LOG.debug('Creating a regex based endpoint for %s, %s',
+ service_name, region_name)
+ return self._resolve(
+ partition, service_name, service_data, region_name)
+
+ def _region_match(self, partition, region_name):
+ if region_name in partition['regions']:
+ return True
+ if 'regionRegex' in partition:
+ return re.compile(partition['regionRegex']).match(region_name)
+ return False
+
+ def _resolve(self, partition, service_name, service_data, endpoint_name):
+ result = service_data['endpoints'].get(endpoint_name, {})
+ result['partition'] = partition['partition']
+ result['endpointName'] = endpoint_name
+ # Merge in the service defaults then the partition defaults.
+ self._merge_keys(service_data.get('defaults', {}), result)
+ self._merge_keys(partition.get('defaults', {}), result)
+ hostname = result.get('hostname', DEFAULT_URI_TEMPLATE)
+ result['hostname'] = self._expand_template(
+ partition, result['hostname'], service_name, endpoint_name)
+ if 'sslCommonName' in result:
+ result['sslCommonName'] = self._expand_template(
+ partition, result['sslCommonName'], service_name,
+ endpoint_name)
+ result['dnsSuffix'] = partition['dnsSuffix']
+ return result
+
+ def _merge_keys(self, from_data, result):
+ for key in from_data:
+ if key not in result:
+ result[key] = from_data[key]
+
+ def _expand_template(self, partition, template, service_name,
+ endpoint_name):
+ return template.format(
+ service=service_name, region=endpoint_name,
+ dnsSuffix=partition['dnsSuffix'])
diff --git a/contrib/python/botocore/botocore/response.py b/contrib/python/botocore/botocore/response.py
index 7021f20aa6..0a6b326648 100644
--- a/contrib/python/botocore/botocore/response.py
+++ b/contrib/python/botocore/botocore/response.py
@@ -1,91 +1,91 @@
-# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/
-# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-
-import sys
-import logging
-
-from botocore import ScalarTypes
-from botocore.hooks import first_non_none_response
-from botocore.compat import json, set_socket_timeout, XMLParseError
+# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/
+# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+
+import sys
+import logging
+
+from botocore import ScalarTypes
+from botocore.hooks import first_non_none_response
+from botocore.compat import json, set_socket_timeout, XMLParseError
from botocore.exceptions import IncompleteReadError, ReadTimeoutError
from urllib3.exceptions import ReadTimeoutError as URLLib3ReadTimeoutError
-from botocore import parsers
-
-
-logger = logging.getLogger(__name__)
-
-
-class StreamingBody(object):
- """Wrapper class for an http response body.
-
- This provides a few additional conveniences that do not exist
- in the urllib3 model:
-
- * Set the timeout on the socket (i.e read() timeouts)
- * Auto validation of content length, if the amount of bytes
- we read does not match the content length, an exception
- is raised.
-
- """
+from botocore import parsers
+
+
+logger = logging.getLogger(__name__)
+
+
+class StreamingBody(object):
+ """Wrapper class for an http response body.
+
+ This provides a few additional conveniences that do not exist
+ in the urllib3 model:
+
+ * Set the timeout on the socket (i.e read() timeouts)
+ * Auto validation of content length, if the amount of bytes
+ we read does not match the content length, an exception
+ is raised.
+
+ """
_DEFAULT_CHUNK_SIZE = 1024
- def __init__(self, raw_stream, content_length):
- self._raw_stream = raw_stream
- self._content_length = content_length
- self._amount_read = 0
-
- def set_socket_timeout(self, timeout):
- """Set the timeout seconds on the socket."""
- # The problem we're trying to solve is to prevent .read() calls from
- # hanging. This can happen in rare cases. What we'd like to ideally
- # do is set a timeout on the .read() call so that callers can retry
- # the request.
- # Unfortunately, this isn't currently possible in requests.
- # See: https://github.com/kennethreitz/requests/issues/1803
- # So what we're going to do is reach into the guts of the stream and
- # grab the socket object, which we can set the timeout on. We're
- # putting in a check here so in case this interface goes away, we'll
- # know.
- try:
- # To further complicate things, the way to grab the
- # underlying socket object from an HTTPResponse is different
- # in py2 and py3. So this code has been pushed to botocore.compat.
- set_socket_timeout(self._raw_stream, timeout)
- except AttributeError:
- logger.error("Cannot access the socket object of "
- "a streaming response. It's possible "
- "the interface has changed.", exc_info=True)
- raise
-
- def read(self, amt=None):
- """Read at most amt bytes from the stream.
-
- If the amt argument is omitted, read all data.
- """
+ def __init__(self, raw_stream, content_length):
+ self._raw_stream = raw_stream
+ self._content_length = content_length
+ self._amount_read = 0
+
+ def set_socket_timeout(self, timeout):
+ """Set the timeout seconds on the socket."""
+ # The problem we're trying to solve is to prevent .read() calls from
+ # hanging. This can happen in rare cases. What we'd like to ideally
+ # do is set a timeout on the .read() call so that callers can retry
+ # the request.
+ # Unfortunately, this isn't currently possible in requests.
+ # See: https://github.com/kennethreitz/requests/issues/1803
+ # So what we're going to do is reach into the guts of the stream and
+ # grab the socket object, which we can set the timeout on. We're
+ # putting in a check here so in case this interface goes away, we'll
+ # know.
+ try:
+ # To further complicate things, the way to grab the
+ # underlying socket object from an HTTPResponse is different
+ # in py2 and py3. So this code has been pushed to botocore.compat.
+ set_socket_timeout(self._raw_stream, timeout)
+ except AttributeError:
+ logger.error("Cannot access the socket object of "
+ "a streaming response. It's possible "
+ "the interface has changed.", exc_info=True)
+ raise
+
+ def read(self, amt=None):
+ """Read at most amt bytes from the stream.
+
+ If the amt argument is omitted, read all data.
+ """
try:
chunk = self._raw_stream.read(amt)
except URLLib3ReadTimeoutError as e:
# TODO: the url will be None as urllib3 isn't setting it yet
raise ReadTimeoutError(endpoint_url=e.url, error=e)
- self._amount_read += len(chunk)
- if amt is None or (not chunk and amt > 0):
- # If the server sends empty contents or
- # we ask to read all of the contents, then we know
- # we need to verify the content length.
- self._verify_content_length()
- return chunk
-
+ self._amount_read += len(chunk)
+ if amt is None or (not chunk and amt > 0):
+ # If the server sends empty contents or
+ # we ask to read all of the contents, then we know
+ # we need to verify the content length.
+ self._verify_content_length()
+ return chunk
+
def __iter__(self):
"""Return an iterator to yield 1k chunks from the raw stream.
"""
@@ -126,38 +126,38 @@ class StreamingBody(object):
break
yield current_chunk
- def _verify_content_length(self):
- # See: https://github.com/kennethreitz/requests/issues/1855
- # Basically, our http library doesn't do this for us, so we have
- # to do this ourself.
- if self._content_length is not None and \
- self._amount_read != int(self._content_length):
- raise IncompleteReadError(
- actual_bytes=self._amount_read,
- expected_bytes=int(self._content_length))
-
- def close(self):
- """Close the underlying http response stream."""
- self._raw_stream.close()
-
-
-def get_response(operation_model, http_response):
- protocol = operation_model.metadata['protocol']
- response_dict = {
- 'headers': http_response.headers,
- 'status_code': http_response.status_code,
- }
- # TODO: Unfortunately, we have to have error logic here.
- # If it looks like an error, in the streaming response case we
- # need to actually grab the contents.
- if response_dict['status_code'] >= 300:
- response_dict['body'] = http_response.content
- elif operation_model.has_streaming_output:
- response_dict['body'] = StreamingBody(
- http_response.raw, response_dict['headers'].get('content-length'))
- else:
- response_dict['body'] = http_response.content
-
- parser = parsers.create_parser(protocol)
- return http_response, parser.parse(response_dict,
- operation_model.output_shape)
+ def _verify_content_length(self):
+ # See: https://github.com/kennethreitz/requests/issues/1855
+ # Basically, our http library doesn't do this for us, so we have
+ # to do this ourself.
+ if self._content_length is not None and \
+ self._amount_read != int(self._content_length):
+ raise IncompleteReadError(
+ actual_bytes=self._amount_read,
+ expected_bytes=int(self._content_length))
+
+ def close(self):
+ """Close the underlying http response stream."""
+ self._raw_stream.close()
+
+
+def get_response(operation_model, http_response):
+ protocol = operation_model.metadata['protocol']
+ response_dict = {
+ 'headers': http_response.headers,
+ 'status_code': http_response.status_code,
+ }
+ # TODO: Unfortunately, we have to have error logic here.
+ # If it looks like an error, in the streaming response case we
+ # need to actually grab the contents.
+ if response_dict['status_code'] >= 300:
+ response_dict['body'] = http_response.content
+ elif operation_model.has_streaming_output:
+ response_dict['body'] = StreamingBody(
+ http_response.raw, response_dict['headers'].get('content-length'))
+ else:
+ response_dict['body'] = http_response.content
+
+ parser = parsers.create_parser(protocol)
+ return http_response, parser.parse(response_dict,
+ operation_model.output_shape)
diff --git a/contrib/python/botocore/botocore/retryhandler.py b/contrib/python/botocore/botocore/retryhandler.py
index 7c1a69d779..d7385b20ff 100644
--- a/contrib/python/botocore/botocore/retryhandler.py
+++ b/contrib/python/botocore/botocore/retryhandler.py
@@ -1,359 +1,359 @@
-# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/
-# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-
-import random
-import functools
-import logging
-from binascii import crc32
-
+# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/
+# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+
+import random
+import functools
+import logging
+from binascii import crc32
+
from botocore.exceptions import (
ChecksumError, EndpointConnectionError, ReadTimeoutError,
ConnectionError, ConnectionClosedError,
)
-
-
-logger = logging.getLogger(__name__)
-# The only supported error for now is GENERAL_CONNECTION_ERROR
-# which maps to requests generic ConnectionError. If we're able
-# to get more specific exceptions from requests we can update
-# this mapping with more specific exceptions.
-EXCEPTION_MAP = {
- 'GENERAL_CONNECTION_ERROR': [
+
+
+logger = logging.getLogger(__name__)
+# The only supported error for now is GENERAL_CONNECTION_ERROR
+# which maps to requests generic ConnectionError. If we're able
+# to get more specific exceptions from requests we can update
+# this mapping with more specific exceptions.
+EXCEPTION_MAP = {
+ 'GENERAL_CONNECTION_ERROR': [
ConnectionError, ConnectionClosedError, ReadTimeoutError,
- EndpointConnectionError
- ],
-}
-
-
-def delay_exponential(base, growth_factor, attempts):
- """Calculate time to sleep based on exponential function.
-
- The format is::
-
- base * growth_factor ^ (attempts - 1)
-
- If ``base`` is set to 'rand' then a random number between
- 0 and 1 will be used as the base.
- Base must be greater than 0, otherwise a ValueError will be
- raised.
-
- """
- if base == 'rand':
- base = random.random()
- elif base <= 0:
- raise ValueError("The 'base' param must be greater than 0, "
- "got: %s" % base)
- time_to_sleep = base * (growth_factor ** (attempts - 1))
- return time_to_sleep
-
-
-def create_exponential_delay_function(base, growth_factor):
- """Create an exponential delay function based on the attempts.
-
- This is used so that you only have to pass it the attempts
- parameter to calculate the delay.
-
- """
- return functools.partial(
- delay_exponential, base=base, growth_factor=growth_factor)
-
-
-def create_retry_handler(config, operation_name=None):
- checker = create_checker_from_retry_config(
- config, operation_name=operation_name)
- action = create_retry_action_from_config(
- config, operation_name=operation_name)
- return RetryHandler(checker=checker, action=action)
-
-
-def create_retry_action_from_config(config, operation_name=None):
- # The spec has the possibility of supporting per policy
- # actions, but right now, we assume this comes from the
- # default section, which means that delay functions apply
- # for every policy in the retry config (per service).
- delay_config = config['__default__']['delay']
- if delay_config['type'] == 'exponential':
- return create_exponential_delay_function(
- base=delay_config['base'],
- growth_factor=delay_config['growth_factor'])
-
-
-def create_checker_from_retry_config(config, operation_name=None):
- checkers = []
- max_attempts = None
- retryable_exceptions = []
- if '__default__' in config:
- policies = config['__default__'].get('policies', [])
- max_attempts = config['__default__']['max_attempts']
- for key in policies:
- current_config = policies[key]
- checkers.append(_create_single_checker(current_config))
- retry_exception = _extract_retryable_exception(current_config)
- if retry_exception is not None:
- retryable_exceptions.extend(retry_exception)
- if operation_name is not None and config.get(operation_name) is not None:
- operation_policies = config[operation_name]['policies']
- for key in operation_policies:
- checkers.append(_create_single_checker(operation_policies[key]))
- retry_exception = _extract_retryable_exception(
- operation_policies[key])
- if retry_exception is not None:
- retryable_exceptions.extend(retry_exception)
- if len(checkers) == 1:
- # Don't need to use a MultiChecker
- return MaxAttemptsDecorator(checkers[0], max_attempts=max_attempts)
- else:
- multi_checker = MultiChecker(checkers)
- return MaxAttemptsDecorator(
- multi_checker, max_attempts=max_attempts,
- retryable_exceptions=tuple(retryable_exceptions))
-
-
-def _create_single_checker(config):
- if 'response' in config['applies_when']:
- return _create_single_response_checker(
- config['applies_when']['response'])
- elif 'socket_errors' in config['applies_when']:
- return ExceptionRaiser()
-
-
-def _create_single_response_checker(response):
- if 'service_error_code' in response:
- checker = ServiceErrorCodeChecker(
- status_code=response['http_status_code'],
- error_code=response['service_error_code'])
- elif 'http_status_code' in response:
- checker = HTTPStatusCodeChecker(
- status_code=response['http_status_code'])
- elif 'crc32body' in response:
- checker = CRC32Checker(header=response['crc32body'])
- else:
- # TODO: send a signal.
- raise ValueError("Unknown retry policy: %s" % config)
- return checker
-
-
-def _extract_retryable_exception(config):
- applies_when = config['applies_when']
- if 'crc32body' in applies_when.get('response', {}):
- return [ChecksumError]
- elif 'socket_errors' in applies_when:
- exceptions = []
- for name in applies_when['socket_errors']:
- exceptions.extend(EXCEPTION_MAP[name])
- return exceptions
-
-
-class RetryHandler(object):
- """Retry handler.
-
- The retry handler takes two params, ``checker`` object
- and an ``action`` object.
-
- The ``checker`` object must be a callable object and based on a response
- and an attempt number, determines whether or not sufficient criteria for
- a retry has been met. If this is the case then the ``action`` object
- (which also is a callable) determines what needs to happen in the event
- of a retry.
-
- """
-
- def __init__(self, checker, action):
- self._checker = checker
- self._action = action
-
- def __call__(self, attempts, response, caught_exception, **kwargs):
- """Handler for a retry.
-
- Intended to be hooked up to an event handler (hence the **kwargs),
- this will process retries appropriately.
-
- """
- if self._checker(attempts, response, caught_exception):
- result = self._action(attempts=attempts)
- logger.debug("Retry needed, action of: %s", result)
- return result
- logger.debug("No retry needed.")
-
-
-class BaseChecker(object):
- """Base class for retry checkers.
-
- Each class is responsible for checking a single criteria that determines
- whether or not a retry should not happen.
-
- """
- def __call__(self, attempt_number, response, caught_exception):
- """Determine if retry criteria matches.
-
- Note that either ``response`` is not None and ``caught_exception`` is
- None or ``response`` is None and ``caught_exception`` is not None.
-
- :type attempt_number: int
- :param attempt_number: The total number of times we've attempted
- to send the request.
-
- :param response: The HTTP response (if one was received).
-
- :type caught_exception: Exception
- :param caught_exception: Any exception that was caught while trying to
- send the HTTP response.
-
- :return: True, if the retry criteria matches (and therefore a retry
- should occur. False if the criteria does not match.
-
- """
- # The default implementation allows subclasses to not have to check
- # whether or not response is None or not.
- if response is not None:
- return self._check_response(attempt_number, response)
- elif caught_exception is not None:
- return self._check_caught_exception(
- attempt_number, caught_exception)
- else:
- raise ValueError("Both response and caught_exception are None.")
-
- def _check_response(self, attempt_number, response):
- pass
-
- def _check_caught_exception(self, attempt_number, caught_exception):
- pass
-
-
-class MaxAttemptsDecorator(BaseChecker):
- """Allow retries up to a maximum number of attempts.
-
- This will pass through calls to the decorated retry checker, provided
- that the number of attempts does not exceed max_attempts. It will
- also catch any retryable_exceptions passed in. Once max_attempts has
- been exceeded, then False will be returned or the retryable_exceptions
- that was previously being caught will be raised.
-
- """
- def __init__(self, checker, max_attempts, retryable_exceptions=None):
- self._checker = checker
- self._max_attempts = max_attempts
- self._retryable_exceptions = retryable_exceptions
-
- def __call__(self, attempt_number, response, caught_exception):
- should_retry = self._should_retry(attempt_number, response,
- caught_exception)
- if should_retry:
- if attempt_number >= self._max_attempts:
- # explicitly set MaxAttemptsReached
- if response is not None and 'ResponseMetadata' in response[1]:
- response[1]['ResponseMetadata']['MaxAttemptsReached'] = True
- logger.debug("Reached the maximum number of retry "
- "attempts: %s", attempt_number)
- return False
- else:
- return should_retry
- else:
- return False
-
- def _should_retry(self, attempt_number, response, caught_exception):
- if self._retryable_exceptions and \
- attempt_number < self._max_attempts:
- try:
- return self._checker(attempt_number, response, caught_exception)
- except self._retryable_exceptions as e:
- logger.debug("retry needed, retryable exception caught: %s",
- e, exc_info=True)
- return True
- else:
- # If we've exceeded the max attempts we just let the exception
- # propogate if one has occurred.
- return self._checker(attempt_number, response, caught_exception)
-
-
-class HTTPStatusCodeChecker(BaseChecker):
- def __init__(self, status_code):
- self._status_code = status_code
-
- def _check_response(self, attempt_number, response):
- if response[0].status_code == self._status_code:
- logger.debug(
- "retry needed: retryable HTTP status code received: %s",
- self._status_code)
- return True
- else:
- return False
-
-
-class ServiceErrorCodeChecker(BaseChecker):
- def __init__(self, status_code, error_code):
- self._status_code = status_code
- self._error_code = error_code
-
- def _check_response(self, attempt_number, response):
- if response[0].status_code == self._status_code:
- actual_error_code = response[1].get('Error', {}).get('Code')
- if actual_error_code == self._error_code:
- logger.debug(
- "retry needed: matching HTTP status and error code seen: "
- "%s, %s", self._status_code, self._error_code)
- return True
- return False
-
-
-class MultiChecker(BaseChecker):
- def __init__(self, checkers):
- self._checkers = checkers
-
- def __call__(self, attempt_number, response, caught_exception):
- for checker in self._checkers:
- checker_response = checker(attempt_number, response,
- caught_exception)
- if checker_response:
- return checker_response
- return False
-
-
-class CRC32Checker(BaseChecker):
- def __init__(self, header):
- # The header where the expected crc32 is located.
- self._header_name = header
-
- def _check_response(self, attempt_number, response):
- http_response = response[0]
- expected_crc = http_response.headers.get(self._header_name)
- if expected_crc is None:
- logger.debug("crc32 check skipped, the %s header is not "
- "in the http response.", self._header_name)
- else:
- actual_crc32 = crc32(response[0].content) & 0xffffffff
- if not actual_crc32 == int(expected_crc):
- logger.debug(
- "retry needed: crc32 check failed, expected != actual: "
- "%s != %s", int(expected_crc), actual_crc32)
- raise ChecksumError(checksum_type='crc32',
- expected_checksum=int(expected_crc),
- actual_checksum=actual_crc32)
-
-
-class ExceptionRaiser(BaseChecker):
- """Raise any caught exceptions.
-
- This class will raise any non None ``caught_exception``.
-
- """
- def _check_caught_exception(self, attempt_number, caught_exception):
- # This is implementation specific, but this class is useful by
- # coordinating with the MaxAttemptsDecorator.
- # The MaxAttemptsDecorator has a list of exceptions it should catch
- # and retry, but something needs to come along and actually raise the
- # caught_exception. That's what this class is being used for. If
- # the MaxAttemptsDecorator is not interested in retrying the exception
- # then this exception just propogates out past the retry code.
- raise caught_exception
+ EndpointConnectionError
+ ],
+}
+
+
+def delay_exponential(base, growth_factor, attempts):
+ """Calculate time to sleep based on exponential function.
+
+ The format is::
+
+ base * growth_factor ^ (attempts - 1)
+
+ If ``base`` is set to 'rand' then a random number between
+ 0 and 1 will be used as the base.
+ Base must be greater than 0, otherwise a ValueError will be
+ raised.
+
+ """
+ if base == 'rand':
+ base = random.random()
+ elif base <= 0:
+ raise ValueError("The 'base' param must be greater than 0, "
+ "got: %s" % base)
+ time_to_sleep = base * (growth_factor ** (attempts - 1))
+ return time_to_sleep
+
+
+def create_exponential_delay_function(base, growth_factor):
+ """Create an exponential delay function based on the attempts.
+
+ This is used so that you only have to pass it the attempts
+ parameter to calculate the delay.
+
+ """
+ return functools.partial(
+ delay_exponential, base=base, growth_factor=growth_factor)
+
+
+def create_retry_handler(config, operation_name=None):
+ checker = create_checker_from_retry_config(
+ config, operation_name=operation_name)
+ action = create_retry_action_from_config(
+ config, operation_name=operation_name)
+ return RetryHandler(checker=checker, action=action)
+
+
+def create_retry_action_from_config(config, operation_name=None):
+ # The spec has the possibility of supporting per policy
+ # actions, but right now, we assume this comes from the
+ # default section, which means that delay functions apply
+ # for every policy in the retry config (per service).
+ delay_config = config['__default__']['delay']
+ if delay_config['type'] == 'exponential':
+ return create_exponential_delay_function(
+ base=delay_config['base'],
+ growth_factor=delay_config['growth_factor'])
+
+
+def create_checker_from_retry_config(config, operation_name=None):
+ checkers = []
+ max_attempts = None
+ retryable_exceptions = []
+ if '__default__' in config:
+ policies = config['__default__'].get('policies', [])
+ max_attempts = config['__default__']['max_attempts']
+ for key in policies:
+ current_config = policies[key]
+ checkers.append(_create_single_checker(current_config))
+ retry_exception = _extract_retryable_exception(current_config)
+ if retry_exception is not None:
+ retryable_exceptions.extend(retry_exception)
+ if operation_name is not None and config.get(operation_name) is not None:
+ operation_policies = config[operation_name]['policies']
+ for key in operation_policies:
+ checkers.append(_create_single_checker(operation_policies[key]))
+ retry_exception = _extract_retryable_exception(
+ operation_policies[key])
+ if retry_exception is not None:
+ retryable_exceptions.extend(retry_exception)
+ if len(checkers) == 1:
+ # Don't need to use a MultiChecker
+ return MaxAttemptsDecorator(checkers[0], max_attempts=max_attempts)
+ else:
+ multi_checker = MultiChecker(checkers)
+ return MaxAttemptsDecorator(
+ multi_checker, max_attempts=max_attempts,
+ retryable_exceptions=tuple(retryable_exceptions))
+
+
+def _create_single_checker(config):
+ if 'response' in config['applies_when']:
+ return _create_single_response_checker(
+ config['applies_when']['response'])
+ elif 'socket_errors' in config['applies_when']:
+ return ExceptionRaiser()
+
+
+def _create_single_response_checker(response):
+ if 'service_error_code' in response:
+ checker = ServiceErrorCodeChecker(
+ status_code=response['http_status_code'],
+ error_code=response['service_error_code'])
+ elif 'http_status_code' in response:
+ checker = HTTPStatusCodeChecker(
+ status_code=response['http_status_code'])
+ elif 'crc32body' in response:
+ checker = CRC32Checker(header=response['crc32body'])
+ else:
+ # TODO: send a signal.
+ raise ValueError("Unknown retry policy: %s" % config)
+ return checker
+
+
+def _extract_retryable_exception(config):
+ applies_when = config['applies_when']
+ if 'crc32body' in applies_when.get('response', {}):
+ return [ChecksumError]
+ elif 'socket_errors' in applies_when:
+ exceptions = []
+ for name in applies_when['socket_errors']:
+ exceptions.extend(EXCEPTION_MAP[name])
+ return exceptions
+
+
+class RetryHandler(object):
+ """Retry handler.
+
+ The retry handler takes two params, ``checker`` object
+ and an ``action`` object.
+
+ The ``checker`` object must be a callable object and based on a response
+ and an attempt number, determines whether or not sufficient criteria for
+ a retry has been met. If this is the case then the ``action`` object
+ (which also is a callable) determines what needs to happen in the event
+ of a retry.
+
+ """
+
+ def __init__(self, checker, action):
+ self._checker = checker
+ self._action = action
+
+ def __call__(self, attempts, response, caught_exception, **kwargs):
+ """Handler for a retry.
+
+ Intended to be hooked up to an event handler (hence the **kwargs),
+ this will process retries appropriately.
+
+ """
+ if self._checker(attempts, response, caught_exception):
+ result = self._action(attempts=attempts)
+ logger.debug("Retry needed, action of: %s", result)
+ return result
+ logger.debug("No retry needed.")
+
+
+class BaseChecker(object):
+ """Base class for retry checkers.
+
+ Each class is responsible for checking a single criteria that determines
+ whether or not a retry should not happen.
+
+ """
+ def __call__(self, attempt_number, response, caught_exception):
+ """Determine if retry criteria matches.
+
+ Note that either ``response`` is not None and ``caught_exception`` is
+ None or ``response`` is None and ``caught_exception`` is not None.
+
+ :type attempt_number: int
+ :param attempt_number: The total number of times we've attempted
+ to send the request.
+
+ :param response: The HTTP response (if one was received).
+
+ :type caught_exception: Exception
+ :param caught_exception: Any exception that was caught while trying to
+ send the HTTP response.
+
+ :return: True, if the retry criteria matches (and therefore a retry
+ should occur. False if the criteria does not match.
+
+ """
+ # The default implementation allows subclasses to not have to check
+ # whether or not response is None or not.
+ if response is not None:
+ return self._check_response(attempt_number, response)
+ elif caught_exception is not None:
+ return self._check_caught_exception(
+ attempt_number, caught_exception)
+ else:
+ raise ValueError("Both response and caught_exception are None.")
+
+ def _check_response(self, attempt_number, response):
+ pass
+
+ def _check_caught_exception(self, attempt_number, caught_exception):
+ pass
+
+
+class MaxAttemptsDecorator(BaseChecker):
+ """Allow retries up to a maximum number of attempts.
+
+ This will pass through calls to the decorated retry checker, provided
+ that the number of attempts does not exceed max_attempts. It will
+ also catch any retryable_exceptions passed in. Once max_attempts has
+ been exceeded, then False will be returned or the retryable_exceptions
+ that was previously being caught will be raised.
+
+ """
+ def __init__(self, checker, max_attempts, retryable_exceptions=None):
+ self._checker = checker
+ self._max_attempts = max_attempts
+ self._retryable_exceptions = retryable_exceptions
+
+ def __call__(self, attempt_number, response, caught_exception):
+ should_retry = self._should_retry(attempt_number, response,
+ caught_exception)
+ if should_retry:
+ if attempt_number >= self._max_attempts:
+ # explicitly set MaxAttemptsReached
+ if response is not None and 'ResponseMetadata' in response[1]:
+ response[1]['ResponseMetadata']['MaxAttemptsReached'] = True
+ logger.debug("Reached the maximum number of retry "
+ "attempts: %s", attempt_number)
+ return False
+ else:
+ return should_retry
+ else:
+ return False
+
+ def _should_retry(self, attempt_number, response, caught_exception):
+ if self._retryable_exceptions and \
+ attempt_number < self._max_attempts:
+ try:
+ return self._checker(attempt_number, response, caught_exception)
+ except self._retryable_exceptions as e:
+ logger.debug("retry needed, retryable exception caught: %s",
+ e, exc_info=True)
+ return True
+ else:
+ # If we've exceeded the max attempts we just let the exception
+ # propogate if one has occurred.
+ return self._checker(attempt_number, response, caught_exception)
+
+
+class HTTPStatusCodeChecker(BaseChecker):
+ def __init__(self, status_code):
+ self._status_code = status_code
+
+ def _check_response(self, attempt_number, response):
+ if response[0].status_code == self._status_code:
+ logger.debug(
+ "retry needed: retryable HTTP status code received: %s",
+ self._status_code)
+ return True
+ else:
+ return False
+
+
+class ServiceErrorCodeChecker(BaseChecker):
+ def __init__(self, status_code, error_code):
+ self._status_code = status_code
+ self._error_code = error_code
+
+ def _check_response(self, attempt_number, response):
+ if response[0].status_code == self._status_code:
+ actual_error_code = response[1].get('Error', {}).get('Code')
+ if actual_error_code == self._error_code:
+ logger.debug(
+ "retry needed: matching HTTP status and error code seen: "
+ "%s, %s", self._status_code, self._error_code)
+ return True
+ return False
+
+
+class MultiChecker(BaseChecker):
+ def __init__(self, checkers):
+ self._checkers = checkers
+
+ def __call__(self, attempt_number, response, caught_exception):
+ for checker in self._checkers:
+ checker_response = checker(attempt_number, response,
+ caught_exception)
+ if checker_response:
+ return checker_response
+ return False
+
+
+class CRC32Checker(BaseChecker):
+ def __init__(self, header):
+ # The header where the expected crc32 is located.
+ self._header_name = header
+
+ def _check_response(self, attempt_number, response):
+ http_response = response[0]
+ expected_crc = http_response.headers.get(self._header_name)
+ if expected_crc is None:
+ logger.debug("crc32 check skipped, the %s header is not "
+ "in the http response.", self._header_name)
+ else:
+ actual_crc32 = crc32(response[0].content) & 0xffffffff
+ if not actual_crc32 == int(expected_crc):
+ logger.debug(
+ "retry needed: crc32 check failed, expected != actual: "
+ "%s != %s", int(expected_crc), actual_crc32)
+ raise ChecksumError(checksum_type='crc32',
+ expected_checksum=int(expected_crc),
+ actual_checksum=actual_crc32)
+
+
+class ExceptionRaiser(BaseChecker):
+ """Raise any caught exceptions.
+
+ This class will raise any non None ``caught_exception``.
+
+ """
+ def _check_caught_exception(self, attempt_number, caught_exception):
+ # This is implementation specific, but this class is useful by
+ # coordinating with the MaxAttemptsDecorator.
+ # The MaxAttemptsDecorator has a list of exceptions it should catch
+ # and retry, but something needs to come along and actually raise the
+ # caught_exception. That's what this class is being used for. If
+ # the MaxAttemptsDecorator is not interested in retrying the exception
+ # then this exception just propogates out past the retry code.
+ raise caught_exception
diff --git a/contrib/python/botocore/botocore/serialize.py b/contrib/python/botocore/botocore/serialize.py
index 3c87b8eccd..a228a47bf8 100644
--- a/contrib/python/botocore/botocore/serialize.py
+++ b/contrib/python/botocore/botocore/serialize.py
@@ -1,180 +1,180 @@
-# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-"""Protocol input serializes.
-
-This module contains classes that implement input serialization
-for the various AWS protocol types.
-
-These classes essentially take user input, a model object that
-represents what the expected input should look like, and it returns
-a dictionary that contains the various parts of a request. A few
-high level design decisions:
-
-
-* Each protocol type maps to a separate class, all inherit from
- ``Serializer``.
-* The return value for ``serialize_to_request`` (the main entry
- point) returns a dictionary that represents a request. This
- will have keys like ``url_path``, ``query_string``, etc. This
- is done so that it's a) easy to test and b) not tied to a
- particular HTTP library. See the ``serialize_to_request`` docstring
- for more details.
-
-Unicode
--------
-
-The input to the serializers should be text (str/unicode), not bytes,
-with the exception of blob types. Those are assumed to be binary,
-and if a str/unicode type is passed in, it will be encoded as utf-8.
-"""
-import re
-import base64
+# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+"""Protocol input serializes.
+
+This module contains classes that implement input serialization
+for the various AWS protocol types.
+
+These classes essentially take user input, a model object that
+represents what the expected input should look like, and it returns
+a dictionary that contains the various parts of a request. A few
+high level design decisions:
+
+
+* Each protocol type maps to a separate class, all inherit from
+ ``Serializer``.
+* The return value for ``serialize_to_request`` (the main entry
+ point) returns a dictionary that represents a request. This
+ will have keys like ``url_path``, ``query_string``, etc. This
+ is done so that it's a) easy to test and b) not tied to a
+ particular HTTP library. See the ``serialize_to_request`` docstring
+ for more details.
+
+Unicode
+-------
+
+The input to the serializers should be text (str/unicode), not bytes,
+with the exception of blob types. Those are assumed to be binary,
+and if a str/unicode type is passed in, it will be encoded as utf-8.
+"""
+import re
+import base64
import calendar
import datetime
-from xml.etree import ElementTree
-
-from botocore.compat import six
-
-from botocore.compat import json, formatdate
-from botocore.utils import parse_to_aware_datetime
-from botocore.utils import percent_encode
-from botocore.utils import is_json_value_header
+from xml.etree import ElementTree
+
+from botocore.compat import six
+
+from botocore.compat import json, formatdate
+from botocore.utils import parse_to_aware_datetime
+from botocore.utils import percent_encode
+from botocore.utils import is_json_value_header
from botocore.utils import conditionally_calculate_md5
-from botocore import validate
-
-
-# From the spec, the default timestamp format if not specified is iso8601.
-DEFAULT_TIMESTAMP_FORMAT = 'iso8601'
-ISO8601 = '%Y-%m-%dT%H:%M:%SZ'
-# Same as ISO8601, but with microsecond precision.
-ISO8601_MICRO = '%Y-%m-%dT%H:%M:%S.%fZ'
-
-
-def create_serializer(protocol_name, include_validation=True):
- # TODO: Unknown protocols.
- serializer = SERIALIZERS[protocol_name]()
- if include_validation:
- validator = validate.ParamValidator()
- serializer = validate.ParamValidationDecorator(validator, serializer)
- return serializer
-
-
-class Serializer(object):
- DEFAULT_METHOD = 'POST'
- # Clients can change this to a different MutableMapping
- # (i.e OrderedDict) if they want. This is used in the
- # compliance test to match the hash ordering used in the
- # tests.
- MAP_TYPE = dict
- DEFAULT_ENCODING = 'utf-8'
-
- def serialize_to_request(self, parameters, operation_model):
- """Serialize parameters into an HTTP request.
-
- This method takes user provided parameters and a shape
- model and serializes the parameters to an HTTP request.
- More specifically, this method returns information about
- parts of the HTTP request, it does not enforce a particular
- interface or standard for an HTTP request. It instead returns
- a dictionary of:
-
- * 'url_path'
+from botocore import validate
+
+
+# From the spec, the default timestamp format if not specified is iso8601.
+DEFAULT_TIMESTAMP_FORMAT = 'iso8601'
+ISO8601 = '%Y-%m-%dT%H:%M:%SZ'
+# Same as ISO8601, but with microsecond precision.
+ISO8601_MICRO = '%Y-%m-%dT%H:%M:%S.%fZ'
+
+
+def create_serializer(protocol_name, include_validation=True):
+ # TODO: Unknown protocols.
+ serializer = SERIALIZERS[protocol_name]()
+ if include_validation:
+ validator = validate.ParamValidator()
+ serializer = validate.ParamValidationDecorator(validator, serializer)
+ return serializer
+
+
+class Serializer(object):
+ DEFAULT_METHOD = 'POST'
+ # Clients can change this to a different MutableMapping
+ # (i.e OrderedDict) if they want. This is used in the
+ # compliance test to match the hash ordering used in the
+ # tests.
+ MAP_TYPE = dict
+ DEFAULT_ENCODING = 'utf-8'
+
+ def serialize_to_request(self, parameters, operation_model):
+ """Serialize parameters into an HTTP request.
+
+ This method takes user provided parameters and a shape
+ model and serializes the parameters to an HTTP request.
+ More specifically, this method returns information about
+ parts of the HTTP request, it does not enforce a particular
+ interface or standard for an HTTP request. It instead returns
+ a dictionary of:
+
+ * 'url_path'
* 'host_prefix'
- * 'query_string'
- * 'headers'
- * 'body'
- * 'method'
-
- It is then up to consumers to decide how to map this to a Request
- object of their HTTP library of choice. Below is an example
- return value::
-
- {'body': {'Action': 'OperationName',
- 'Bar': 'val2',
- 'Foo': 'val1',
- 'Version': '2014-01-01'},
- 'headers': {},
- 'method': 'POST',
- 'query_string': '',
+ * 'query_string'
+ * 'headers'
+ * 'body'
+ * 'method'
+
+ It is then up to consumers to decide how to map this to a Request
+ object of their HTTP library of choice. Below is an example
+ return value::
+
+ {'body': {'Action': 'OperationName',
+ 'Bar': 'val2',
+ 'Foo': 'val1',
+ 'Version': '2014-01-01'},
+ 'headers': {},
+ 'method': 'POST',
+ 'query_string': '',
'host_prefix': 'value.',
- 'url_path': '/'}
-
- :param parameters: The dictionary input parameters for the
- operation (i.e the user input).
- :param operation_model: The OperationModel object that describes
- the operation.
- """
- raise NotImplementedError("serialize_to_request")
-
- def _create_default_request(self):
- # Creates a boilerplate default request dict that subclasses
- # can use as a starting point.
- serialized = {
- 'url_path': '/',
- 'query_string': '',
- 'method': self.DEFAULT_METHOD,
- 'headers': {},
- # An empty body is represented as an empty byte string.
- 'body': b''
- }
- return serialized
-
- # Some extra utility methods subclasses can use.
-
- def _timestamp_iso8601(self, value):
- if value.microsecond > 0:
- timestamp_format = ISO8601_MICRO
- else:
- timestamp_format = ISO8601
- return value.strftime(timestamp_format)
-
- def _timestamp_unixtimestamp(self, value):
- return int(calendar.timegm(value.timetuple()))
-
- def _timestamp_rfc822(self, value):
+ 'url_path': '/'}
+
+ :param parameters: The dictionary input parameters for the
+ operation (i.e the user input).
+ :param operation_model: The OperationModel object that describes
+ the operation.
+ """
+ raise NotImplementedError("serialize_to_request")
+
+ def _create_default_request(self):
+ # Creates a boilerplate default request dict that subclasses
+ # can use as a starting point.
+ serialized = {
+ 'url_path': '/',
+ 'query_string': '',
+ 'method': self.DEFAULT_METHOD,
+ 'headers': {},
+ # An empty body is represented as an empty byte string.
+ 'body': b''
+ }
+ return serialized
+
+ # Some extra utility methods subclasses can use.
+
+ def _timestamp_iso8601(self, value):
+ if value.microsecond > 0:
+ timestamp_format = ISO8601_MICRO
+ else:
+ timestamp_format = ISO8601
+ return value.strftime(timestamp_format)
+
+ def _timestamp_unixtimestamp(self, value):
+ return int(calendar.timegm(value.timetuple()))
+
+ def _timestamp_rfc822(self, value):
if isinstance(value, datetime.datetime):
value = self._timestamp_unixtimestamp(value)
- return formatdate(value, usegmt=True)
-
+ return formatdate(value, usegmt=True)
+
def _convert_timestamp_to_str(self, value, timestamp_format=None):
if timestamp_format is None:
timestamp_format = self.TIMESTAMP_FORMAT
timestamp_format = timestamp_format.lower()
- datetime_obj = parse_to_aware_datetime(value)
- converter = getattr(
+ datetime_obj = parse_to_aware_datetime(value)
+ converter = getattr(
self, '_timestamp_%s' % timestamp_format)
- final_value = converter(datetime_obj)
- return final_value
-
- def _get_serialized_name(self, shape, default_name):
- # Returns the serialized name for the shape if it exists.
- # Otherwise it will return the passed in default_name.
- return shape.serialization.get('name', default_name)
-
- def _get_base64(self, value):
- # Returns the base64-encoded version of value, handling
- # both strings and bytes. The returned value is a string
- # via the default encoding.
- if isinstance(value, six.text_type):
- value = value.encode(self.DEFAULT_ENCODING)
- return base64.b64encode(value).strip().decode(
- self.DEFAULT_ENCODING)
-
+ final_value = converter(datetime_obj)
+ return final_value
+
+ def _get_serialized_name(self, shape, default_name):
+ # Returns the serialized name for the shape if it exists.
+ # Otherwise it will return the passed in default_name.
+ return shape.serialization.get('name', default_name)
+
+ def _get_base64(self, value):
+ # Returns the base64-encoded version of value, handling
+ # both strings and bytes. The returned value is a string
+ # via the default encoding.
+ if isinstance(value, six.text_type):
+ value = value.encode(self.DEFAULT_ENCODING)
+ return base64.b64encode(value).strip().decode(
+ self.DEFAULT_ENCODING)
+
def _expand_host_prefix(self, parameters, operation_model):
operation_endpoint = operation_model.endpoint
if operation_endpoint is None:
return None
-
+
host_prefix_expression = operation_endpoint['hostPrefix']
input_members = operation_model.input_shape.members
host_labels = [
@@ -192,26 +192,26 @@ class Serializer(object):
return request
-class QuerySerializer(Serializer):
-
- TIMESTAMP_FORMAT = 'iso8601'
-
- def serialize_to_request(self, parameters, operation_model):
- shape = operation_model.input_shape
- serialized = self._create_default_request()
- serialized['method'] = operation_model.http.get('method',
- self.DEFAULT_METHOD)
- serialized['headers'] = {
- 'Content-Type': 'application/x-www-form-urlencoded; charset=utf-8'
- }
- # The query serializer only deals with body params so
- # that's what we hand off the _serialize_* methods.
- body_params = self.MAP_TYPE()
- body_params['Action'] = operation_model.name
- body_params['Version'] = operation_model.metadata['apiVersion']
- if shape is not None:
- self._serialize(body_params, parameters, shape)
- serialized['body'] = body_params
+class QuerySerializer(Serializer):
+
+ TIMESTAMP_FORMAT = 'iso8601'
+
+ def serialize_to_request(self, parameters, operation_model):
+ shape = operation_model.input_shape
+ serialized = self._create_default_request()
+ serialized['method'] = operation_model.http.get('method',
+ self.DEFAULT_METHOD)
+ serialized['headers'] = {
+ 'Content-Type': 'application/x-www-form-urlencoded; charset=utf-8'
+ }
+ # The query serializer only deals with body params so
+ # that's what we hand off the _serialize_* methods.
+ body_params = self.MAP_TYPE()
+ body_params['Action'] = operation_model.name
+ body_params['Version'] = operation_model.metadata['apiVersion']
+ if shape is not None:
+ self._serialize(body_params, parameters, shape)
+ serialized['body'] = body_params
host_prefix = self._expand_host_prefix(parameters, operation_model)
if host_prefix is not None:
@@ -219,134 +219,134 @@ class QuerySerializer(Serializer):
serialized = self._prepare_additional_traits(serialized,
operation_model)
- return serialized
-
- def _serialize(self, serialized, value, shape, prefix=''):
- # serialized: The dict that is incrementally added to with the
- # final serialized parameters.
- # value: The current user input value.
- # shape: The shape object that describes the structure of the
- # input.
- # prefix: The incrementally built up prefix for the serialized
- # key (i.e Foo.bar.members.1).
- method = getattr(self, '_serialize_type_%s' % shape.type_name,
- self._default_serialize)
- method(serialized, value, shape, prefix=prefix)
-
- def _serialize_type_structure(self, serialized, value, shape, prefix=''):
- members = shape.members
- for key, value in value.items():
- member_shape = members[key]
- member_prefix = self._get_serialized_name(member_shape, key)
- if prefix:
- member_prefix = '%s.%s' % (prefix, member_prefix)
- self._serialize(serialized, value, member_shape, member_prefix)
-
- def _serialize_type_list(self, serialized, value, shape, prefix=''):
- if not value:
- # The query protocol serializes empty lists.
- serialized[prefix] = ''
- return
- if self._is_shape_flattened(shape):
- list_prefix = prefix
- if shape.member.serialization.get('name'):
- name = self._get_serialized_name(shape.member, default_name='')
- # Replace '.Original' with '.{name}'.
- list_prefix = '.'.join(prefix.split('.')[:-1] + [name])
- else:
- list_name = shape.member.serialization.get('name', 'member')
- list_prefix = '%s.%s' % (prefix, list_name)
- for i, element in enumerate(value, 1):
- element_prefix = '%s.%s' % (list_prefix, i)
- element_shape = shape.member
- self._serialize(serialized, element, element_shape, element_prefix)
-
- def _serialize_type_map(self, serialized, value, shape, prefix=''):
- if self._is_shape_flattened(shape):
- full_prefix = prefix
- else:
- full_prefix = '%s.entry' % prefix
- template = full_prefix + '.{i}.{suffix}'
- key_shape = shape.key
- value_shape = shape.value
- key_suffix = self._get_serialized_name(key_shape, default_name='key')
- value_suffix = self._get_serialized_name(value_shape, 'value')
- for i, key in enumerate(value, 1):
- key_prefix = template.format(i=i, suffix=key_suffix)
- value_prefix = template.format(i=i, suffix=value_suffix)
- self._serialize(serialized, key, key_shape, key_prefix)
- self._serialize(serialized, value[key], value_shape, value_prefix)
-
- def _serialize_type_blob(self, serialized, value, shape, prefix=''):
- # Blob args must be base64 encoded.
- serialized[prefix] = self._get_base64(value)
-
- def _serialize_type_timestamp(self, serialized, value, shape, prefix=''):
+ return serialized
+
+ def _serialize(self, serialized, value, shape, prefix=''):
+ # serialized: The dict that is incrementally added to with the
+ # final serialized parameters.
+ # value: The current user input value.
+ # shape: The shape object that describes the structure of the
+ # input.
+ # prefix: The incrementally built up prefix for the serialized
+ # key (i.e Foo.bar.members.1).
+ method = getattr(self, '_serialize_type_%s' % shape.type_name,
+ self._default_serialize)
+ method(serialized, value, shape, prefix=prefix)
+
+ def _serialize_type_structure(self, serialized, value, shape, prefix=''):
+ members = shape.members
+ for key, value in value.items():
+ member_shape = members[key]
+ member_prefix = self._get_serialized_name(member_shape, key)
+ if prefix:
+ member_prefix = '%s.%s' % (prefix, member_prefix)
+ self._serialize(serialized, value, member_shape, member_prefix)
+
+ def _serialize_type_list(self, serialized, value, shape, prefix=''):
+ if not value:
+ # The query protocol serializes empty lists.
+ serialized[prefix] = ''
+ return
+ if self._is_shape_flattened(shape):
+ list_prefix = prefix
+ if shape.member.serialization.get('name'):
+ name = self._get_serialized_name(shape.member, default_name='')
+ # Replace '.Original' with '.{name}'.
+ list_prefix = '.'.join(prefix.split('.')[:-1] + [name])
+ else:
+ list_name = shape.member.serialization.get('name', 'member')
+ list_prefix = '%s.%s' % (prefix, list_name)
+ for i, element in enumerate(value, 1):
+ element_prefix = '%s.%s' % (list_prefix, i)
+ element_shape = shape.member
+ self._serialize(serialized, element, element_shape, element_prefix)
+
+ def _serialize_type_map(self, serialized, value, shape, prefix=''):
+ if self._is_shape_flattened(shape):
+ full_prefix = prefix
+ else:
+ full_prefix = '%s.entry' % prefix
+ template = full_prefix + '.{i}.{suffix}'
+ key_shape = shape.key
+ value_shape = shape.value
+ key_suffix = self._get_serialized_name(key_shape, default_name='key')
+ value_suffix = self._get_serialized_name(value_shape, 'value')
+ for i, key in enumerate(value, 1):
+ key_prefix = template.format(i=i, suffix=key_suffix)
+ value_prefix = template.format(i=i, suffix=value_suffix)
+ self._serialize(serialized, key, key_shape, key_prefix)
+ self._serialize(serialized, value[key], value_shape, value_prefix)
+
+ def _serialize_type_blob(self, serialized, value, shape, prefix=''):
+ # Blob args must be base64 encoded.
+ serialized[prefix] = self._get_base64(value)
+
+ def _serialize_type_timestamp(self, serialized, value, shape, prefix=''):
serialized[prefix] = self._convert_timestamp_to_str(
value, shape.serialization.get('timestampFormat'))
-
- def _serialize_type_boolean(self, serialized, value, shape, prefix=''):
- if value:
- serialized[prefix] = 'true'
- else:
- serialized[prefix] = 'false'
-
- def _default_serialize(self, serialized, value, shape, prefix=''):
- serialized[prefix] = value
-
- def _is_shape_flattened(self, shape):
- return shape.serialization.get('flattened')
-
-
-class EC2Serializer(QuerySerializer):
- """EC2 specific customizations to the query protocol serializers.
-
- The EC2 model is almost, but not exactly, similar to the query protocol
- serializer. This class encapsulates those differences. The model
- will have be marked with a ``protocol`` of ``ec2``, so you don't need
- to worry about wiring this class up correctly.
-
- """
-
- def _get_serialized_name(self, shape, default_name):
- # Returns the serialized name for the shape if it exists.
- # Otherwise it will return the passed in default_name.
- if 'queryName' in shape.serialization:
- return shape.serialization['queryName']
- elif 'name' in shape.serialization:
- # A locationName is always capitalized
- # on input for the ec2 protocol.
- name = shape.serialization['name']
- return name[0].upper() + name[1:]
- else:
- return default_name
-
- def _serialize_type_list(self, serialized, value, shape, prefix=''):
- for i, element in enumerate(value, 1):
- element_prefix = '%s.%s' % (prefix, i)
- element_shape = shape.member
- self._serialize(serialized, element, element_shape, element_prefix)
-
-
-class JSONSerializer(Serializer):
- TIMESTAMP_FORMAT = 'unixtimestamp'
-
- def serialize_to_request(self, parameters, operation_model):
- target = '%s.%s' % (operation_model.metadata['targetPrefix'],
- operation_model.name)
- json_version = operation_model.metadata['jsonVersion']
- serialized = self._create_default_request()
- serialized['method'] = operation_model.http.get('method',
- self.DEFAULT_METHOD)
- serialized['headers'] = {
- 'X-Amz-Target': target,
- 'Content-Type': 'application/x-amz-json-%s' % json_version,
- }
+
+ def _serialize_type_boolean(self, serialized, value, shape, prefix=''):
+ if value:
+ serialized[prefix] = 'true'
+ else:
+ serialized[prefix] = 'false'
+
+ def _default_serialize(self, serialized, value, shape, prefix=''):
+ serialized[prefix] = value
+
+ def _is_shape_flattened(self, shape):
+ return shape.serialization.get('flattened')
+
+
+class EC2Serializer(QuerySerializer):
+ """EC2 specific customizations to the query protocol serializers.
+
+ The EC2 model is almost, but not exactly, similar to the query protocol
+ serializer. This class encapsulates those differences. The model
+ will have be marked with a ``protocol`` of ``ec2``, so you don't need
+ to worry about wiring this class up correctly.
+
+ """
+
+ def _get_serialized_name(self, shape, default_name):
+ # Returns the serialized name for the shape if it exists.
+ # Otherwise it will return the passed in default_name.
+ if 'queryName' in shape.serialization:
+ return shape.serialization['queryName']
+ elif 'name' in shape.serialization:
+ # A locationName is always capitalized
+ # on input for the ec2 protocol.
+ name = shape.serialization['name']
+ return name[0].upper() + name[1:]
+ else:
+ return default_name
+
+ def _serialize_type_list(self, serialized, value, shape, prefix=''):
+ for i, element in enumerate(value, 1):
+ element_prefix = '%s.%s' % (prefix, i)
+ element_shape = shape.member
+ self._serialize(serialized, element, element_shape, element_prefix)
+
+
+class JSONSerializer(Serializer):
+ TIMESTAMP_FORMAT = 'unixtimestamp'
+
+ def serialize_to_request(self, parameters, operation_model):
+ target = '%s.%s' % (operation_model.metadata['targetPrefix'],
+ operation_model.name)
+ json_version = operation_model.metadata['jsonVersion']
+ serialized = self._create_default_request()
+ serialized['method'] = operation_model.http.get('method',
+ self.DEFAULT_METHOD)
+ serialized['headers'] = {
+ 'X-Amz-Target': target,
+ 'Content-Type': 'application/x-amz-json-%s' % json_version,
+ }
body = self.MAP_TYPE()
- input_shape = operation_model.input_shape
- if input_shape is not None:
- self._serialize(body, parameters, input_shape)
- serialized['body'] = json.dumps(body).encode(self.DEFAULT_ENCODING)
+ input_shape = operation_model.input_shape
+ if input_shape is not None:
+ self._serialize(body, parameters, input_shape)
+ serialized['body'] = json.dumps(body).encode(self.DEFAULT_ENCODING)
host_prefix = self._expand_host_prefix(parameters, operation_model)
if host_prefix is not None:
@@ -354,14 +354,14 @@ class JSONSerializer(Serializer):
serialized = self._prepare_additional_traits(serialized,
operation_model)
- return serialized
-
- def _serialize(self, serialized, value, shape, key=None):
- method = getattr(self, '_serialize_type_%s' % shape.type_name,
- self._default_serialize)
- method(serialized, value, shape, key)
-
- def _serialize_type_structure(self, serialized, value, shape, key):
+ return serialized
+
+ def _serialize(self, serialized, value, shape, key=None):
+ method = getattr(self, '_serialize_type_%s' % shape.type_name,
+ self._default_serialize)
+ method(serialized, value, shape, key)
+
+ def _serialize_type_structure(self, serialized, value, shape, key):
if shape.is_document_type:
serialized[key] = value
else:
@@ -380,95 +380,95 @@ class JSONSerializer(Serializer):
if 'name' in member_shape.serialization:
member_key = member_shape.serialization['name']
self._serialize(serialized, member_value, member_shape, member_key)
-
- def _serialize_type_map(self, serialized, value, shape, key):
- map_obj = self.MAP_TYPE()
- serialized[key] = map_obj
- for sub_key, sub_value in value.items():
- self._serialize(map_obj, sub_value, shape.value, sub_key)
-
- def _serialize_type_list(self, serialized, value, shape, key):
- list_obj = []
- serialized[key] = list_obj
- for list_item in value:
- wrapper = {}
- # The JSON list serialization is the only case where we aren't
- # setting a key on a dict. We handle this by using
- # a __current__ key on a wrapper dict to serialize each
- # list item before appending it to the serialized list.
- self._serialize(wrapper, list_item, shape.member, "__current__")
- list_obj.append(wrapper["__current__"])
-
- def _default_serialize(self, serialized, value, shape, key):
- serialized[key] = value
-
- def _serialize_type_timestamp(self, serialized, value, shape, key):
+
+ def _serialize_type_map(self, serialized, value, shape, key):
+ map_obj = self.MAP_TYPE()
+ serialized[key] = map_obj
+ for sub_key, sub_value in value.items():
+ self._serialize(map_obj, sub_value, shape.value, sub_key)
+
+ def _serialize_type_list(self, serialized, value, shape, key):
+ list_obj = []
+ serialized[key] = list_obj
+ for list_item in value:
+ wrapper = {}
+ # The JSON list serialization is the only case where we aren't
+ # setting a key on a dict. We handle this by using
+ # a __current__ key on a wrapper dict to serialize each
+ # list item before appending it to the serialized list.
+ self._serialize(wrapper, list_item, shape.member, "__current__")
+ list_obj.append(wrapper["__current__"])
+
+ def _default_serialize(self, serialized, value, shape, key):
+ serialized[key] = value
+
+ def _serialize_type_timestamp(self, serialized, value, shape, key):
serialized[key] = self._convert_timestamp_to_str(
value, shape.serialization.get('timestampFormat'))
-
- def _serialize_type_blob(self, serialized, value, shape, key):
- serialized[key] = self._get_base64(value)
-
-
-class BaseRestSerializer(Serializer):
- """Base class for rest protocols.
-
- The only variance between the various rest protocols is the
- way that the body is serialized. All other aspects (headers, uri, etc.)
- are the same and logic for serializing those aspects lives here.
-
- Subclasses must implement the ``_serialize_body_params`` method.
-
- """
+
+ def _serialize_type_blob(self, serialized, value, shape, key):
+ serialized[key] = self._get_base64(value)
+
+
+class BaseRestSerializer(Serializer):
+ """Base class for rest protocols.
+
+ The only variance between the various rest protocols is the
+ way that the body is serialized. All other aspects (headers, uri, etc.)
+ are the same and logic for serializing those aspects lives here.
+
+ Subclasses must implement the ``_serialize_body_params`` method.
+
+ """
QUERY_STRING_TIMESTAMP_FORMAT = 'iso8601'
HEADER_TIMESTAMP_FORMAT = 'rfc822'
- # This is a list of known values for the "location" key in the
- # serialization dict. The location key tells us where on the request
- # to put the serialized value.
- KNOWN_LOCATIONS = ['uri', 'querystring', 'header', 'headers']
-
- def serialize_to_request(self, parameters, operation_model):
- serialized = self._create_default_request()
- serialized['method'] = operation_model.http.get('method',
- self.DEFAULT_METHOD)
- shape = operation_model.input_shape
- if shape is None:
- serialized['url_path'] = operation_model.http['requestUri']
- return serialized
- shape_members = shape.members
- # While the ``serialized`` key holds the final serialized request
- # data, we need interim dicts for the various locations of the
- # request. We need this for the uri_path_kwargs and the
- # query_string_kwargs because they are templated, so we need
- # to gather all the needed data for the string template,
- # then we render the template. The body_kwargs is needed
- # because once we've collected them all, we run them through
- # _serialize_body_params, which for rest-json, creates JSON,
- # and for rest-xml, will create XML. This is what the
- # ``partitioned`` dict below is for.
- partitioned = {
- 'uri_path_kwargs': self.MAP_TYPE(),
- 'query_string_kwargs': self.MAP_TYPE(),
- 'body_kwargs': self.MAP_TYPE(),
- 'headers': self.MAP_TYPE(),
- }
- for param_name, param_value in parameters.items():
- if param_value is None:
- # Don't serialize any parameter with a None value.
- continue
- self._partition_parameters(partitioned, param_name, param_value,
- shape_members)
- serialized['url_path'] = self._render_uri_template(
- operation_model.http['requestUri'],
- partitioned['uri_path_kwargs'])
- # Note that we lean on the http implementation to handle the case
- # where the requestUri path already has query parameters.
- # The bundled http client, requests, already supports this.
- serialized['query_string'] = partitioned['query_string_kwargs']
- if partitioned['headers']:
- serialized['headers'] = partitioned['headers']
- self._serialize_payload(partitioned, parameters,
- serialized, shape, shape_members)
+ # This is a list of known values for the "location" key in the
+ # serialization dict. The location key tells us where on the request
+ # to put the serialized value.
+ KNOWN_LOCATIONS = ['uri', 'querystring', 'header', 'headers']
+
+ def serialize_to_request(self, parameters, operation_model):
+ serialized = self._create_default_request()
+ serialized['method'] = operation_model.http.get('method',
+ self.DEFAULT_METHOD)
+ shape = operation_model.input_shape
+ if shape is None:
+ serialized['url_path'] = operation_model.http['requestUri']
+ return serialized
+ shape_members = shape.members
+ # While the ``serialized`` key holds the final serialized request
+ # data, we need interim dicts for the various locations of the
+ # request. We need this for the uri_path_kwargs and the
+ # query_string_kwargs because they are templated, so we need
+ # to gather all the needed data for the string template,
+ # then we render the template. The body_kwargs is needed
+ # because once we've collected them all, we run them through
+ # _serialize_body_params, which for rest-json, creates JSON,
+ # and for rest-xml, will create XML. This is what the
+ # ``partitioned`` dict below is for.
+ partitioned = {
+ 'uri_path_kwargs': self.MAP_TYPE(),
+ 'query_string_kwargs': self.MAP_TYPE(),
+ 'body_kwargs': self.MAP_TYPE(),
+ 'headers': self.MAP_TYPE(),
+ }
+ for param_name, param_value in parameters.items():
+ if param_value is None:
+ # Don't serialize any parameter with a None value.
+ continue
+ self._partition_parameters(partitioned, param_name, param_value,
+ shape_members)
+ serialized['url_path'] = self._render_uri_template(
+ operation_model.http['requestUri'],
+ partitioned['uri_path_kwargs'])
+ # Note that we lean on the http implementation to handle the case
+ # where the requestUri path already has query parameters.
+ # The bundled http client, requests, already supports this.
+ serialized['query_string'] = partitioned['query_string_kwargs']
+ if partitioned['headers']:
+ serialized['headers'] = partitioned['headers']
+ self._serialize_payload(partitioned, parameters,
+ serialized, shape, shape_members)
host_prefix = self._expand_host_prefix(parameters, operation_model)
if host_prefix is not None:
@@ -476,74 +476,74 @@ class BaseRestSerializer(Serializer):
serialized = self._prepare_additional_traits(serialized,
operation_model)
- return serialized
-
- def _render_uri_template(self, uri_template, params):
- # We need to handle two cases::
- #
- # /{Bucket}/foo
- # /{Key+}/bar
- # A label ending with '+' is greedy. There can only
- # be one greedy key.
- encoded_params = {}
- for template_param in re.findall(r'{(.*?)}', uri_template):
- if template_param.endswith('+'):
- encoded_params[template_param] = percent_encode(
- params[template_param[:-1]], safe='/~')
- else:
- encoded_params[template_param] = percent_encode(
- params[template_param])
- return uri_template.format(**encoded_params)
-
- def _serialize_payload(self, partitioned, parameters,
- serialized, shape, shape_members):
- # partitioned - The user input params partitioned by location.
- # parameters - The user input params.
- # serialized - The final serialized request dict.
- # shape - Describes the expected input shape
- # shape_members - The members of the input struct shape
- payload_member = shape.serialization.get('payload')
- if payload_member is not None and \
- shape_members[payload_member].type_name in ['blob', 'string']:
- # If it's streaming, then the body is just the
- # value of the payload.
- body_payload = parameters.get(payload_member, b'')
- body_payload = self._encode_payload(body_payload)
- serialized['body'] = body_payload
- elif payload_member is not None:
- # If there's a payload member, we serialized that
- # member to they body.
- body_params = parameters.get(payload_member)
- if body_params is not None:
- serialized['body'] = self._serialize_body_params(
- body_params,
- shape_members[payload_member])
- elif partitioned['body_kwargs']:
- serialized['body'] = self._serialize_body_params(
- partitioned['body_kwargs'], shape)
-
- def _encode_payload(self, body):
- if isinstance(body, six.text_type):
- return body.encode(self.DEFAULT_ENCODING)
- return body
-
- def _partition_parameters(self, partitioned, param_name,
- param_value, shape_members):
- # This takes the user provided input parameter (``param``)
- # and figures out where they go in the request dict.
- # Some params are HTTP headers, some are used in the URI, some
- # are in the request body. This method deals with this.
- member = shape_members[param_name]
- location = member.serialization.get('location')
- key_name = member.serialization.get('name', param_name)
- if location == 'uri':
- partitioned['uri_path_kwargs'][key_name] = param_value
- elif location == 'querystring':
- if isinstance(param_value, dict):
- partitioned['query_string_kwargs'].update(param_value)
- elif isinstance(param_value, bool):
- partitioned['query_string_kwargs'][
- key_name] = str(param_value).lower()
+ return serialized
+
+ def _render_uri_template(self, uri_template, params):
+ # We need to handle two cases::
+ #
+ # /{Bucket}/foo
+ # /{Key+}/bar
+ # A label ending with '+' is greedy. There can only
+ # be one greedy key.
+ encoded_params = {}
+ for template_param in re.findall(r'{(.*?)}', uri_template):
+ if template_param.endswith('+'):
+ encoded_params[template_param] = percent_encode(
+ params[template_param[:-1]], safe='/~')
+ else:
+ encoded_params[template_param] = percent_encode(
+ params[template_param])
+ return uri_template.format(**encoded_params)
+
+ def _serialize_payload(self, partitioned, parameters,
+ serialized, shape, shape_members):
+ # partitioned - The user input params partitioned by location.
+ # parameters - The user input params.
+ # serialized - The final serialized request dict.
+ # shape - Describes the expected input shape
+ # shape_members - The members of the input struct shape
+ payload_member = shape.serialization.get('payload')
+ if payload_member is not None and \
+ shape_members[payload_member].type_name in ['blob', 'string']:
+ # If it's streaming, then the body is just the
+ # value of the payload.
+ body_payload = parameters.get(payload_member, b'')
+ body_payload = self._encode_payload(body_payload)
+ serialized['body'] = body_payload
+ elif payload_member is not None:
+ # If there's a payload member, we serialized that
+ # member to they body.
+ body_params = parameters.get(payload_member)
+ if body_params is not None:
+ serialized['body'] = self._serialize_body_params(
+ body_params,
+ shape_members[payload_member])
+ elif partitioned['body_kwargs']:
+ serialized['body'] = self._serialize_body_params(
+ partitioned['body_kwargs'], shape)
+
+ def _encode_payload(self, body):
+ if isinstance(body, six.text_type):
+ return body.encode(self.DEFAULT_ENCODING)
+ return body
+
+ def _partition_parameters(self, partitioned, param_name,
+ param_value, shape_members):
+ # This takes the user provided input parameter (``param``)
+ # and figures out where they go in the request dict.
+ # Some params are HTTP headers, some are used in the URI, some
+ # are in the request body. This method deals with this.
+ member = shape_members[param_name]
+ location = member.serialization.get('location')
+ key_name = member.serialization.get('name', param_name)
+ if location == 'uri':
+ partitioned['uri_path_kwargs'][key_name] = param_value
+ elif location == 'querystring':
+ if isinstance(param_value, dict):
+ partitioned['query_string_kwargs'].update(param_value)
+ elif isinstance(param_value, bool):
+ partitioned['query_string_kwargs'][
+ key_name] = str(param_value).lower()
elif member.type_name == 'timestamp':
timestamp_format = member.serialization.get(
'timestampFormat', self.QUERY_STRING_TIMESTAMP_FORMAT)
@@ -551,157 +551,157 @@ class BaseRestSerializer(Serializer):
key_name] = self._convert_timestamp_to_str(
param_value, timestamp_format
)
- else:
- partitioned['query_string_kwargs'][key_name] = param_value
- elif location == 'header':
- shape = shape_members[param_name]
- value = self._convert_header_value(shape, param_value)
- partitioned['headers'][key_name] = str(value)
- elif location == 'headers':
- # 'headers' is a bit of an oddball. The ``key_name``
- # is actually really a prefix for the header names:
- header_prefix = key_name
- # The value provided by the user is a dict so we'll be
- # creating multiple header key/val pairs. The key
- # name to use for each header is the header_prefix (``key_name``)
- # plus the key provided by the user.
- self._do_serialize_header_map(header_prefix,
- partitioned['headers'],
- param_value)
- else:
- partitioned['body_kwargs'][param_name] = param_value
-
- def _do_serialize_header_map(self, header_prefix, headers, user_input):
- for key, val in user_input.items():
- full_key = header_prefix + key
- headers[full_key] = val
-
- def _serialize_body_params(self, params, shape):
- raise NotImplementedError('_serialize_body_params')
-
- def _convert_header_value(self, shape, value):
- if shape.type_name == 'timestamp':
- datetime_obj = parse_to_aware_datetime(value)
- timestamp = calendar.timegm(datetime_obj.utctimetuple())
+ else:
+ partitioned['query_string_kwargs'][key_name] = param_value
+ elif location == 'header':
+ shape = shape_members[param_name]
+ value = self._convert_header_value(shape, param_value)
+ partitioned['headers'][key_name] = str(value)
+ elif location == 'headers':
+ # 'headers' is a bit of an oddball. The ``key_name``
+ # is actually really a prefix for the header names:
+ header_prefix = key_name
+ # The value provided by the user is a dict so we'll be
+ # creating multiple header key/val pairs. The key
+ # name to use for each header is the header_prefix (``key_name``)
+ # plus the key provided by the user.
+ self._do_serialize_header_map(header_prefix,
+ partitioned['headers'],
+ param_value)
+ else:
+ partitioned['body_kwargs'][param_name] = param_value
+
+ def _do_serialize_header_map(self, header_prefix, headers, user_input):
+ for key, val in user_input.items():
+ full_key = header_prefix + key
+ headers[full_key] = val
+
+ def _serialize_body_params(self, params, shape):
+ raise NotImplementedError('_serialize_body_params')
+
+ def _convert_header_value(self, shape, value):
+ if shape.type_name == 'timestamp':
+ datetime_obj = parse_to_aware_datetime(value)
+ timestamp = calendar.timegm(datetime_obj.utctimetuple())
timestamp_format = shape.serialization.get(
'timestampFormat', self.HEADER_TIMESTAMP_FORMAT)
return self._convert_timestamp_to_str(timestamp, timestamp_format)
- elif is_json_value_header(shape):
- # Serialize with no spaces after separators to save space in
- # the header.
- return self._get_base64(json.dumps(value, separators=(',', ':')))
- else:
- return value
-
-
-class RestJSONSerializer(BaseRestSerializer, JSONSerializer):
-
- def _serialize_body_params(self, params, shape):
- serialized_body = self.MAP_TYPE()
- self._serialize(serialized_body, params, shape)
- return json.dumps(serialized_body).encode(self.DEFAULT_ENCODING)
-
-
-class RestXMLSerializer(BaseRestSerializer):
- TIMESTAMP_FORMAT = 'iso8601'
-
- def _serialize_body_params(self, params, shape):
- root_name = shape.serialization['name']
- pseudo_root = ElementTree.Element('')
- self._serialize(shape, params, pseudo_root, root_name)
- real_root = list(pseudo_root)[0]
- return ElementTree.tostring(real_root, encoding=self.DEFAULT_ENCODING)
-
- def _serialize(self, shape, params, xmlnode, name):
- method = getattr(self, '_serialize_type_%s' % shape.type_name,
- self._default_serialize)
- method(xmlnode, params, shape, name)
-
- def _serialize_type_structure(self, xmlnode, params, shape, name):
- structure_node = ElementTree.SubElement(xmlnode, name)
-
- if 'xmlNamespace' in shape.serialization:
- namespace_metadata = shape.serialization['xmlNamespace']
- attribute_name = 'xmlns'
- if namespace_metadata.get('prefix'):
- attribute_name += ':%s' % namespace_metadata['prefix']
- structure_node.attrib[attribute_name] = namespace_metadata['uri']
- for key, value in params.items():
- member_shape = shape.members[key]
- member_name = member_shape.serialization.get('name', key)
- # We need to special case member shapes that are marked as an
- # xmlAttribute. Rather than serializing into an XML child node,
- # we instead serialize the shape to an XML attribute of the
- # *current* node.
- if value is None:
- # Don't serialize any param whose value is None.
- return
- if member_shape.serialization.get('xmlAttribute'):
- # xmlAttributes must have a serialization name.
- xml_attribute_name = member_shape.serialization['name']
- structure_node.attrib[xml_attribute_name] = value
- continue
- self._serialize(member_shape, value, structure_node, member_name)
-
- def _serialize_type_list(self, xmlnode, params, shape, name):
- member_shape = shape.member
- if shape.serialization.get('flattened'):
- element_name = name
- list_node = xmlnode
- else:
- element_name = member_shape.serialization.get('name', 'member')
- list_node = ElementTree.SubElement(xmlnode, name)
- for item in params:
- self._serialize(member_shape, item, list_node, element_name)
-
- def _serialize_type_map(self, xmlnode, params, shape, name):
- # Given the ``name`` of MyMap, and input of {"key1": "val1"}
- # we serialize this as:
- # <MyMap>
- # <entry>
- # <key>key1</key>
- # <value>val1</value>
- # </entry>
- # </MyMap>
- node = ElementTree.SubElement(xmlnode, name)
- # TODO: handle flattened maps.
- for key, value in params.items():
- entry_node = ElementTree.SubElement(node, 'entry')
- key_name = self._get_serialized_name(shape.key, default_name='key')
- val_name = self._get_serialized_name(shape.value,
- default_name='value')
- self._serialize(shape.key, key, entry_node, key_name)
- self._serialize(shape.value, value, entry_node, val_name)
-
- def _serialize_type_boolean(self, xmlnode, params, shape, name):
- # For scalar types, the 'params' attr is actually just a scalar
- # value representing the data we need to serialize as a boolean.
- # It will either be 'true' or 'false'
- node = ElementTree.SubElement(xmlnode, name)
- if params:
- str_value = 'true'
- else:
- str_value = 'false'
- node.text = str_value
-
- def _serialize_type_blob(self, xmlnode, params, shape, name):
- node = ElementTree.SubElement(xmlnode, name)
- node.text = self._get_base64(params)
-
- def _serialize_type_timestamp(self, xmlnode, params, shape, name):
- node = ElementTree.SubElement(xmlnode, name)
+ elif is_json_value_header(shape):
+ # Serialize with no spaces after separators to save space in
+ # the header.
+ return self._get_base64(json.dumps(value, separators=(',', ':')))
+ else:
+ return value
+
+
+class RestJSONSerializer(BaseRestSerializer, JSONSerializer):
+
+ def _serialize_body_params(self, params, shape):
+ serialized_body = self.MAP_TYPE()
+ self._serialize(serialized_body, params, shape)
+ return json.dumps(serialized_body).encode(self.DEFAULT_ENCODING)
+
+
+class RestXMLSerializer(BaseRestSerializer):
+ TIMESTAMP_FORMAT = 'iso8601'
+
+ def _serialize_body_params(self, params, shape):
+ root_name = shape.serialization['name']
+ pseudo_root = ElementTree.Element('')
+ self._serialize(shape, params, pseudo_root, root_name)
+ real_root = list(pseudo_root)[0]
+ return ElementTree.tostring(real_root, encoding=self.DEFAULT_ENCODING)
+
+ def _serialize(self, shape, params, xmlnode, name):
+ method = getattr(self, '_serialize_type_%s' % shape.type_name,
+ self._default_serialize)
+ method(xmlnode, params, shape, name)
+
+ def _serialize_type_structure(self, xmlnode, params, shape, name):
+ structure_node = ElementTree.SubElement(xmlnode, name)
+
+ if 'xmlNamespace' in shape.serialization:
+ namespace_metadata = shape.serialization['xmlNamespace']
+ attribute_name = 'xmlns'
+ if namespace_metadata.get('prefix'):
+ attribute_name += ':%s' % namespace_metadata['prefix']
+ structure_node.attrib[attribute_name] = namespace_metadata['uri']
+ for key, value in params.items():
+ member_shape = shape.members[key]
+ member_name = member_shape.serialization.get('name', key)
+ # We need to special case member shapes that are marked as an
+ # xmlAttribute. Rather than serializing into an XML child node,
+ # we instead serialize the shape to an XML attribute of the
+ # *current* node.
+ if value is None:
+ # Don't serialize any param whose value is None.
+ return
+ if member_shape.serialization.get('xmlAttribute'):
+ # xmlAttributes must have a serialization name.
+ xml_attribute_name = member_shape.serialization['name']
+ structure_node.attrib[xml_attribute_name] = value
+ continue
+ self._serialize(member_shape, value, structure_node, member_name)
+
+ def _serialize_type_list(self, xmlnode, params, shape, name):
+ member_shape = shape.member
+ if shape.serialization.get('flattened'):
+ element_name = name
+ list_node = xmlnode
+ else:
+ element_name = member_shape.serialization.get('name', 'member')
+ list_node = ElementTree.SubElement(xmlnode, name)
+ for item in params:
+ self._serialize(member_shape, item, list_node, element_name)
+
+ def _serialize_type_map(self, xmlnode, params, shape, name):
+ # Given the ``name`` of MyMap, and input of {"key1": "val1"}
+ # we serialize this as:
+ # <MyMap>
+ # <entry>
+ # <key>key1</key>
+ # <value>val1</value>
+ # </entry>
+ # </MyMap>
+ node = ElementTree.SubElement(xmlnode, name)
+ # TODO: handle flattened maps.
+ for key, value in params.items():
+ entry_node = ElementTree.SubElement(node, 'entry')
+ key_name = self._get_serialized_name(shape.key, default_name='key')
+ val_name = self._get_serialized_name(shape.value,
+ default_name='value')
+ self._serialize(shape.key, key, entry_node, key_name)
+ self._serialize(shape.value, value, entry_node, val_name)
+
+ def _serialize_type_boolean(self, xmlnode, params, shape, name):
+ # For scalar types, the 'params' attr is actually just a scalar
+ # value representing the data we need to serialize as a boolean.
+ # It will either be 'true' or 'false'
+ node = ElementTree.SubElement(xmlnode, name)
+ if params:
+ str_value = 'true'
+ else:
+ str_value = 'false'
+ node.text = str_value
+
+ def _serialize_type_blob(self, xmlnode, params, shape, name):
+ node = ElementTree.SubElement(xmlnode, name)
+ node.text = self._get_base64(params)
+
+ def _serialize_type_timestamp(self, xmlnode, params, shape, name):
+ node = ElementTree.SubElement(xmlnode, name)
node.text = self._convert_timestamp_to_str(
params, shape.serialization.get('timestampFormat'))
-
- def _default_serialize(self, xmlnode, params, shape, name):
- node = ElementTree.SubElement(xmlnode, name)
- node.text = six.text_type(params)
-
-
-SERIALIZERS = {
- 'ec2': EC2Serializer,
- 'query': QuerySerializer,
- 'json': JSONSerializer,
- 'rest-json': RestJSONSerializer,
- 'rest-xml': RestXMLSerializer,
-}
+
+ def _default_serialize(self, xmlnode, params, shape, name):
+ node = ElementTree.SubElement(xmlnode, name)
+ node.text = six.text_type(params)
+
+
+SERIALIZERS = {
+ 'ec2': EC2Serializer,
+ 'query': QuerySerializer,
+ 'json': JSONSerializer,
+ 'rest-json': RestJSONSerializer,
+ 'rest-xml': RestXMLSerializer,
+}
diff --git a/contrib/python/botocore/botocore/session.py b/contrib/python/botocore/botocore/session.py
index 5c35d50449..f8ed57a392 100644
--- a/contrib/python/botocore/botocore/session.py
+++ b/contrib/python/botocore/botocore/session.py
@@ -1,34 +1,34 @@
-# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/
-# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-"""
-This module contains the main interface to the botocore package, the
-Session object.
-"""
-
-import copy
-import logging
-import os
-import platform
+# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/
+# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+"""
+This module contains the main interface to the botocore package, the
+Session object.
+"""
+
+import copy
+import logging
+import os
+import platform
import re
import socket
import warnings
-
-from botocore import __version__
+
+from botocore import __version__
from botocore import UNSIGNED
-import botocore.configloader
-import botocore.credentials
-import botocore.client
+import botocore.configloader
+import botocore.credentials
+import botocore.client
from botocore.configprovider import ConfigValueStore
from botocore.configprovider import ConfigChainFactory
from botocore.configprovider import create_botocore_default_config_mapping
@@ -37,154 +37,154 @@ from botocore.exceptions import (
ConfigNotFound, ProfileNotFound, UnknownServiceError,
PartialCredentialsError,
)
-from botocore.errorfactory import ClientExceptionsFactory
-from botocore import handlers
-from botocore.hooks import HierarchicalEmitter, first_non_none_response
+from botocore.errorfactory import ClientExceptionsFactory
+from botocore import handlers
+from botocore.hooks import HierarchicalEmitter, first_non_none_response
from botocore.hooks import EventAliaser
-from botocore.loaders import create_loader
-from botocore.parsers import ResponseParserFactory
-from botocore.regions import EndpointResolver
-from botocore.model import ServiceModel
+from botocore.loaders import create_loader
+from botocore.parsers import ResponseParserFactory
+from botocore.regions import EndpointResolver
+from botocore.model import ServiceModel
from botocore import monitoring
-from botocore import paginate
-from botocore import waiter
-from botocore import retryhandler, translate
+from botocore import paginate
+from botocore import waiter
+from botocore import retryhandler, translate
from botocore import utils
from botocore.utils import EVENT_ALIASES, validate_region_name
from botocore.compat import MutableMapping, HAS_CRT
-
-
-logger = logging.getLogger(__name__)
-
-
-class Session(object):
- """
- The Session object collects together useful functionality
- from `botocore` as well as important data such as configuration
- information and credentials into a single, easy-to-use object.
-
- :ivar available_profiles: A list of profiles defined in the config
- file associated with this session.
- :ivar profile: The current profile.
- """
-
+
+
+logger = logging.getLogger(__name__)
+
+
+class Session(object):
+ """
+ The Session object collects together useful functionality
+ from `botocore` as well as important data such as configuration
+ information and credentials into a single, easy-to-use object.
+
+ :ivar available_profiles: A list of profiles defined in the config
+ file associated with this session.
+ :ivar profile: The current profile.
+ """
+
SESSION_VARIABLES = copy.copy(BOTOCORE_DEFAUT_SESSION_VARIABLES)
-
- #: The default format string to use when configuring the botocore logger.
- LOG_FORMAT = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
-
- def __init__(self, session_vars=None, event_hooks=None,
- include_builtin_handlers=True, profile=None):
- """
- Create a new Session object.
-
- :type session_vars: dict
- :param session_vars: A dictionary that is used to override some or all
- of the environment variables associated with this session. The
- key/value pairs defined in this dictionary will override the
- corresponding variables defined in ``SESSION_VARIABLES``.
-
- :type event_hooks: BaseEventHooks
- :param event_hooks: The event hooks object to use. If one is not
- provided, an event hooks object will be automatically created
- for you.
-
- :type include_builtin_handlers: bool
- :param include_builtin_handlers: Indicates whether or not to
- automatically register builtin handlers.
-
- :type profile: str
- :param profile: The name of the profile to use for this
- session. Note that the profile can only be set when
- the session is created.
-
- """
- if event_hooks is None:
+
+ #: The default format string to use when configuring the botocore logger.
+ LOG_FORMAT = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
+
+ def __init__(self, session_vars=None, event_hooks=None,
+ include_builtin_handlers=True, profile=None):
+ """
+ Create a new Session object.
+
+ :type session_vars: dict
+ :param session_vars: A dictionary that is used to override some or all
+ of the environment variables associated with this session. The
+ key/value pairs defined in this dictionary will override the
+ corresponding variables defined in ``SESSION_VARIABLES``.
+
+ :type event_hooks: BaseEventHooks
+ :param event_hooks: The event hooks object to use. If one is not
+ provided, an event hooks object will be automatically created
+ for you.
+
+ :type include_builtin_handlers: bool
+ :param include_builtin_handlers: Indicates whether or not to
+ automatically register builtin handlers.
+
+ :type profile: str
+ :param profile: The name of the profile to use for this
+ session. Note that the profile can only be set when
+ the session is created.
+
+ """
+ if event_hooks is None:
self._original_handler = HierarchicalEmitter()
- else:
+ else:
self._original_handler = event_hooks
self._events = EventAliaser(self._original_handler)
- if include_builtin_handlers:
- self._register_builtin_handlers(self._events)
- self.user_agent_name = 'Botocore'
- self.user_agent_version = __version__
- self.user_agent_extra = ''
- # The _profile attribute is just used to cache the value
- # of the current profile to avoid going through the normal
- # config lookup process each access time.
- self._profile = None
- self._config = None
- self._credentials = None
- self._profile_map = None
- # This is a dict that stores per session specific config variable
- # overrides via set_config_variable().
- self._session_instance_vars = {}
- if profile is not None:
- self._session_instance_vars['profile'] = profile
- self._client_config = None
+ if include_builtin_handlers:
+ self._register_builtin_handlers(self._events)
+ self.user_agent_name = 'Botocore'
+ self.user_agent_version = __version__
+ self.user_agent_extra = ''
+ # The _profile attribute is just used to cache the value
+ # of the current profile to avoid going through the normal
+ # config lookup process each access time.
+ self._profile = None
+ self._config = None
+ self._credentials = None
+ self._profile_map = None
+ # This is a dict that stores per session specific config variable
+ # overrides via set_config_variable().
+ self._session_instance_vars = {}
+ if profile is not None:
+ self._session_instance_vars['profile'] = profile
+ self._client_config = None
self._last_client_region_used = None
- self._components = ComponentLocator()
+ self._components = ComponentLocator()
self._internal_components = ComponentLocator()
- self._register_components()
+ self._register_components()
self.session_var_map = SessionVarDict(self, self.SESSION_VARIABLES)
if session_vars is not None:
self.session_var_map.update(session_vars)
-
- def _register_components(self):
- self._register_credential_provider()
- self._register_data_loader()
- self._register_endpoint_resolver()
- self._register_event_emitter()
- self._register_response_parser_factory()
- self._register_exceptions_factory()
+
+ def _register_components(self):
+ self._register_credential_provider()
+ self._register_data_loader()
+ self._register_endpoint_resolver()
+ self._register_event_emitter()
+ self._register_response_parser_factory()
+ self._register_exceptions_factory()
self._register_config_store()
self._register_monitor()
-
- def _register_event_emitter(self):
- self._components.register_component('event_emitter', self._events)
-
- def _register_credential_provider(self):
- self._components.lazy_register_component(
+
+ def _register_event_emitter(self):
+ self._components.register_component('event_emitter', self._events)
+
+ def _register_credential_provider(self):
+ self._components.lazy_register_component(
'credential_provider', self._create_credential_resolver)
-
+
def _create_credential_resolver(self):
return botocore.credentials.create_credential_resolver(
self, region_name=self._last_client_region_used
)
- def _register_data_loader(self):
- self._components.lazy_register_component(
- 'data_loader',
- lambda: create_loader(self.get_config_variable('data_path')))
-
- def _register_endpoint_resolver(self):
- def create_default_resolver():
- loader = self.get_component('data_loader')
- endpoints = loader.load_data('endpoints')
- return EndpointResolver(endpoints)
+ def _register_data_loader(self):
+ self._components.lazy_register_component(
+ 'data_loader',
+ lambda: create_loader(self.get_config_variable('data_path')))
+
+ def _register_endpoint_resolver(self):
+ def create_default_resolver():
+ loader = self.get_component('data_loader')
+ endpoints = loader.load_data('endpoints')
+ return EndpointResolver(endpoints)
self._internal_components.lazy_register_component(
- 'endpoint_resolver', create_default_resolver)
-
- def _register_response_parser_factory(self):
- self._components.register_component('response_parser_factory',
- ResponseParserFactory())
-
- def _register_exceptions_factory(self):
+ 'endpoint_resolver', create_default_resolver)
+
+ def _register_response_parser_factory(self):
+ self._components.register_component('response_parser_factory',
+ ResponseParserFactory())
+
+ def _register_exceptions_factory(self):
self._internal_components.register_component(
- 'exceptions_factory', ClientExceptionsFactory())
-
- def _register_builtin_handlers(self, events):
- for spec in handlers.BUILTIN_HANDLERS:
- if len(spec) == 2:
- event_name, handler = spec
- self.register(event_name, handler)
- else:
- event_name, handler, register_type = spec
- if register_type is handlers.REGISTER_FIRST:
- self._events.register_first(event_name, handler)
- elif register_type is handlers.REGISTER_LAST:
- self._events.register_last(event_name, handler)
-
+ 'exceptions_factory', ClientExceptionsFactory())
+
+ def _register_builtin_handlers(self, events):
+ for spec in handlers.BUILTIN_HANDLERS:
+ if len(spec) == 2:
+ event_name, handler = spec
+ self.register(event_name, handler)
+ else:
+ event_name, handler, register_type = spec
+ if register_type is handlers.REGISTER_FIRST:
+ self._events.register_first(event_name, handler)
+ elif register_type is handlers.REGISTER_LAST:
+ self._events.register_last(event_name, handler)
+
def _register_config_store(self):
config_store_component = ConfigValueStore(
mapping=create_botocore_default_config_mapping(self)
@@ -225,32 +225,32 @@ class Session(object):
# dependency on the package being present.
return "Unknown"
- @property
- def available_profiles(self):
- return list(self._build_profile_map().keys())
-
- def _build_profile_map(self):
- # This will build the profile map if it has not been created,
- # otherwise it will return the cached value. The profile map
- # is a list of profile names, to the config values for the profile.
- if self._profile_map is None:
- self._profile_map = self.full_config['profiles']
- return self._profile_map
-
- @property
- def profile(self):
- if self._profile is None:
- profile = self.get_config_variable('profile')
- self._profile = profile
- return self._profile
-
+ @property
+ def available_profiles(self):
+ return list(self._build_profile_map().keys())
+
+ def _build_profile_map(self):
+ # This will build the profile map if it has not been created,
+ # otherwise it will return the cached value. The profile map
+ # is a list of profile names, to the config values for the profile.
+ if self._profile_map is None:
+ self._profile_map = self.full_config['profiles']
+ return self._profile_map
+
+ @property
+ def profile(self):
+ if self._profile is None:
+ profile = self.get_config_variable('profile')
+ self._profile = profile
+ return self._profile
+
def get_config_variable(self, logical_name, methods=None):
if methods is not None:
return self._get_config_variable_with_custom_methods(
logical_name, methods)
return self.get_component('config_store').get_config_variable(
logical_name)
-
+
def _get_config_variable_with_custom_methods(self, logical_name, methods):
# If a custom list of methods was supplied we need to perserve the
# behavior with the new system. To do so a new chain that is a copy of
@@ -274,426 +274,426 @@ class Session(object):
build_chain_config_args['config_property_name'] = config_name
mapping[name] = chain_builder.create_config_chain(
**build_chain_config_args
- )
+ )
config_store_component = ConfigValueStore(
mapping=mapping
)
value = config_store_component.get_config_variable(logical_name)
- return value
-
- def set_config_variable(self, logical_name, value):
- """Set a configuration variable to a specific value.
-
- By using this method, you can override the normal lookup
- process used in ``get_config_variable`` by explicitly setting
- a value. Subsequent calls to ``get_config_variable`` will
- use the ``value``. This gives you per-session specific
- configuration values.
-
- ::
- >>> # Assume logical name 'foo' maps to env var 'FOO'
- >>> os.environ['FOO'] = 'myvalue'
- >>> s.get_config_variable('foo')
- 'myvalue'
- >>> s.set_config_variable('foo', 'othervalue')
- >>> s.get_config_variable('foo')
- 'othervalue'
-
- :type logical_name: str
- :param logical_name: The logical name of the session variable
- you want to set. These are the keys in ``SESSION_VARIABLES``.
- :param value: The value to associate with the config variable.
-
- """
- logger.debug(
- "Setting config variable for %s to %r",
- logical_name,
- value,
- )
- self._session_instance_vars[logical_name] = value
-
+ return value
+
+ def set_config_variable(self, logical_name, value):
+ """Set a configuration variable to a specific value.
+
+ By using this method, you can override the normal lookup
+ process used in ``get_config_variable`` by explicitly setting
+ a value. Subsequent calls to ``get_config_variable`` will
+ use the ``value``. This gives you per-session specific
+ configuration values.
+
+ ::
+ >>> # Assume logical name 'foo' maps to env var 'FOO'
+ >>> os.environ['FOO'] = 'myvalue'
+ >>> s.get_config_variable('foo')
+ 'myvalue'
+ >>> s.set_config_variable('foo', 'othervalue')
+ >>> s.get_config_variable('foo')
+ 'othervalue'
+
+ :type logical_name: str
+ :param logical_name: The logical name of the session variable
+ you want to set. These are the keys in ``SESSION_VARIABLES``.
+ :param value: The value to associate with the config variable.
+
+ """
+ logger.debug(
+ "Setting config variable for %s to %r",
+ logical_name,
+ value,
+ )
+ self._session_instance_vars[logical_name] = value
+
def instance_variables(self):
return copy.copy(self._session_instance_vars)
- def get_scoped_config(self):
- """
- Returns the config values from the config file scoped to the current
- profile.
-
- The configuration data is loaded **only** from the config file.
- It does not resolve variables based on different locations
- (e.g. first from the session instance, then from environment
- variables, then from the config file). If you want this lookup
- behavior, use the ``get_config_variable`` method instead.
-
- Note that this configuration is specific to a single profile (the
- ``profile`` session variable).
-
- If the ``profile`` session variable is set and the profile does
- not exist in the config file, a ``ProfileNotFound`` exception
- will be raised.
-
- :raises: ConfigNotFound, ConfigParseError, ProfileNotFound
- :rtype: dict
-
- """
- profile_name = self.get_config_variable('profile')
- profile_map = self._build_profile_map()
- # If a profile is not explicitly set return the default
- # profile config or an empty config dict if we don't have
- # a default profile.
- if profile_name is None:
- return profile_map.get('default', {})
- elif profile_name not in profile_map:
- # Otherwise if they specified a profile, it has to
- # exist (even if it's the default profile) otherwise
- # we complain.
- raise ProfileNotFound(profile=profile_name)
- else:
- return profile_map[profile_name]
-
- @property
- def full_config(self):
- """Return the parsed config file.
-
- The ``get_config`` method returns the config associated with the
- specified profile. This property returns the contents of the
- **entire** config file.
-
- :rtype: dict
- """
- if self._config is None:
- try:
- config_file = self.get_config_variable('config_file')
- self._config = botocore.configloader.load_config(config_file)
- except ConfigNotFound:
- self._config = {'profiles': {}}
- try:
- # Now we need to inject the profiles from the
- # credentials file. We don't actually need the values
- # in the creds file, only the profile names so that we
- # can validate the user is not referring to a nonexistent
- # profile.
- cred_file = self.get_config_variable('credentials_file')
- cred_profiles = botocore.configloader.raw_config_parse(
- cred_file)
- for profile in cred_profiles:
- cred_vars = cred_profiles[profile]
- if profile not in self._config['profiles']:
- self._config['profiles'][profile] = cred_vars
- else:
- self._config['profiles'][profile].update(cred_vars)
- except ConfigNotFound:
- pass
- return self._config
-
- def get_default_client_config(self):
- """Retrieves the default config for creating clients
-
- :rtype: botocore.client.Config
- :returns: The default client config object when creating clients. If
- the value is ``None`` then there is no default config object
- attached to the session.
- """
- return self._client_config
-
- def set_default_client_config(self, client_config):
- """Sets the default config for creating clients
-
- :type client_config: botocore.client.Config
- :param client_config: The default client config object when creating
- clients. If the value is ``None`` then there is no default config
- object attached to the session.
- """
- self._client_config = client_config
-
- def set_credentials(self, access_key, secret_key, token=None):
- """
- Manually create credentials for this session. If you would
- prefer to use botocore without a config file, environment variables,
- or IAM roles, you can pass explicit credentials into this
- method to establish credentials for this session.
-
- :type access_key: str
- :param access_key: The access key part of the credentials.
-
- :type secret_key: str
- :param secret_key: The secret key part of the credentials.
-
- :type token: str
- :param token: An option session token used by STS session
- credentials.
- """
- self._credentials = botocore.credentials.Credentials(access_key,
- secret_key,
- token)
-
- def get_credentials(self):
- """
- Return the :class:`botocore.credential.Credential` object
- associated with this session. If the credentials have not
- yet been loaded, this will attempt to load them. If they
- have already been loaded, this will return the cached
- credentials.
-
- """
- if self._credentials is None:
- self._credentials = self._components.get_component(
- 'credential_provider').load_credentials()
- return self._credentials
-
- def user_agent(self):
- """
- Return a string suitable for use as a User-Agent header.
- The string will be of the form:
-
- <agent_name>/<agent_version> Python/<py_ver> <plat_name>/<plat_ver> <exec_env>
-
- Where:
-
- - agent_name is the value of the `user_agent_name` attribute
+ def get_scoped_config(self):
+ """
+ Returns the config values from the config file scoped to the current
+ profile.
+
+ The configuration data is loaded **only** from the config file.
+ It does not resolve variables based on different locations
+ (e.g. first from the session instance, then from environment
+ variables, then from the config file). If you want this lookup
+ behavior, use the ``get_config_variable`` method instead.
+
+ Note that this configuration is specific to a single profile (the
+ ``profile`` session variable).
+
+ If the ``profile`` session variable is set and the profile does
+ not exist in the config file, a ``ProfileNotFound`` exception
+ will be raised.
+
+ :raises: ConfigNotFound, ConfigParseError, ProfileNotFound
+ :rtype: dict
+
+ """
+ profile_name = self.get_config_variable('profile')
+ profile_map = self._build_profile_map()
+ # If a profile is not explicitly set return the default
+ # profile config or an empty config dict if we don't have
+ # a default profile.
+ if profile_name is None:
+ return profile_map.get('default', {})
+ elif profile_name not in profile_map:
+ # Otherwise if they specified a profile, it has to
+ # exist (even if it's the default profile) otherwise
+ # we complain.
+ raise ProfileNotFound(profile=profile_name)
+ else:
+ return profile_map[profile_name]
+
+ @property
+ def full_config(self):
+ """Return the parsed config file.
+
+ The ``get_config`` method returns the config associated with the
+ specified profile. This property returns the contents of the
+ **entire** config file.
+
+ :rtype: dict
+ """
+ if self._config is None:
+ try:
+ config_file = self.get_config_variable('config_file')
+ self._config = botocore.configloader.load_config(config_file)
+ except ConfigNotFound:
+ self._config = {'profiles': {}}
+ try:
+ # Now we need to inject the profiles from the
+ # credentials file. We don't actually need the values
+ # in the creds file, only the profile names so that we
+ # can validate the user is not referring to a nonexistent
+ # profile.
+ cred_file = self.get_config_variable('credentials_file')
+ cred_profiles = botocore.configloader.raw_config_parse(
+ cred_file)
+ for profile in cred_profiles:
+ cred_vars = cred_profiles[profile]
+ if profile not in self._config['profiles']:
+ self._config['profiles'][profile] = cred_vars
+ else:
+ self._config['profiles'][profile].update(cred_vars)
+ except ConfigNotFound:
+ pass
+ return self._config
+
+ def get_default_client_config(self):
+ """Retrieves the default config for creating clients
+
+ :rtype: botocore.client.Config
+ :returns: The default client config object when creating clients. If
+ the value is ``None`` then there is no default config object
+ attached to the session.
+ """
+ return self._client_config
+
+ def set_default_client_config(self, client_config):
+ """Sets the default config for creating clients
+
+ :type client_config: botocore.client.Config
+ :param client_config: The default client config object when creating
+ clients. If the value is ``None`` then there is no default config
+ object attached to the session.
+ """
+ self._client_config = client_config
+
+ def set_credentials(self, access_key, secret_key, token=None):
+ """
+ Manually create credentials for this session. If you would
+ prefer to use botocore without a config file, environment variables,
+ or IAM roles, you can pass explicit credentials into this
+ method to establish credentials for this session.
+
+ :type access_key: str
+ :param access_key: The access key part of the credentials.
+
+ :type secret_key: str
+ :param secret_key: The secret key part of the credentials.
+
+ :type token: str
+ :param token: An option session token used by STS session
+ credentials.
+ """
+ self._credentials = botocore.credentials.Credentials(access_key,
+ secret_key,
+ token)
+
+ def get_credentials(self):
+ """
+ Return the :class:`botocore.credential.Credential` object
+ associated with this session. If the credentials have not
+ yet been loaded, this will attempt to load them. If they
+ have already been loaded, this will return the cached
+ credentials.
+
+ """
+ if self._credentials is None:
+ self._credentials = self._components.get_component(
+ 'credential_provider').load_credentials()
+ return self._credentials
+
+ def user_agent(self):
+ """
+ Return a string suitable for use as a User-Agent header.
+ The string will be of the form:
+
+ <agent_name>/<agent_version> Python/<py_ver> <plat_name>/<plat_ver> <exec_env>
+
+ Where:
+
+ - agent_name is the value of the `user_agent_name` attribute
of the session object (`Botocore` by default).
- - agent_version is the value of the `user_agent_version`
- attribute of the session object (the botocore version by default).
- by default.
- - py_ver is the version of the Python interpreter beng used.
- - plat_name is the name of the platform (e.g. Darwin)
- - plat_ver is the version of the platform
- - exec_env is exec-env/$AWS_EXECUTION_ENV
-
- If ``user_agent_extra`` is not empty, then this value will be
- appended to the end of the user agent string.
-
- """
- base = '%s/%s Python/%s %s/%s' % (self.user_agent_name,
- self.user_agent_version,
- platform.python_version(),
- platform.system(),
- platform.release())
+ - agent_version is the value of the `user_agent_version`
+ attribute of the session object (the botocore version by default).
+ by default.
+ - py_ver is the version of the Python interpreter beng used.
+ - plat_name is the name of the platform (e.g. Darwin)
+ - plat_ver is the version of the platform
+ - exec_env is exec-env/$AWS_EXECUTION_ENV
+
+ If ``user_agent_extra`` is not empty, then this value will be
+ appended to the end of the user agent string.
+
+ """
+ base = '%s/%s Python/%s %s/%s' % (self.user_agent_name,
+ self.user_agent_version,
+ platform.python_version(),
+ platform.system(),
+ platform.release())
if HAS_CRT:
base += ' awscrt/%s' % self._get_crt_version()
- if os.environ.get('AWS_EXECUTION_ENV') is not None:
- base += ' exec-env/%s' % os.environ.get('AWS_EXECUTION_ENV')
- if self.user_agent_extra:
- base += ' %s' % self.user_agent_extra
-
- return base
-
- def get_data(self, data_path):
- """
- Retrieve the data associated with `data_path`.
-
- :type data_path: str
- :param data_path: The path to the data you wish to retrieve.
- """
- return self.get_component('data_loader').load_data(data_path)
-
- def get_service_model(self, service_name, api_version=None):
- """Get the service model object.
-
- :type service_name: string
- :param service_name: The service name
-
- :type api_version: string
- :param api_version: The API version of the service. If none is
- provided, then the latest API version will be used.
-
- :rtype: L{botocore.model.ServiceModel}
- :return: The botocore service model for the service.
-
- """
- service_description = self.get_service_data(service_name, api_version)
- return ServiceModel(service_description, service_name=service_name)
-
- def get_waiter_model(self, service_name, api_version=None):
- loader = self.get_component('data_loader')
- waiter_config = loader.load_service_model(
- service_name, 'waiters-2', api_version)
- return waiter.WaiterModel(waiter_config)
-
- def get_paginator_model(self, service_name, api_version=None):
- loader = self.get_component('data_loader')
- paginator_config = loader.load_service_model(
- service_name, 'paginators-1', api_version)
- return paginate.PaginatorModel(paginator_config)
-
- def get_service_data(self, service_name, api_version=None):
- """
- Retrieve the fully merged data associated with a service.
- """
- data_path = service_name
- service_data = self.get_component('data_loader').load_service_model(
- data_path,
- type_name='service-2',
- api_version=api_version
- )
+ if os.environ.get('AWS_EXECUTION_ENV') is not None:
+ base += ' exec-env/%s' % os.environ.get('AWS_EXECUTION_ENV')
+ if self.user_agent_extra:
+ base += ' %s' % self.user_agent_extra
+
+ return base
+
+ def get_data(self, data_path):
+ """
+ Retrieve the data associated with `data_path`.
+
+ :type data_path: str
+ :param data_path: The path to the data you wish to retrieve.
+ """
+ return self.get_component('data_loader').load_data(data_path)
+
+ def get_service_model(self, service_name, api_version=None):
+ """Get the service model object.
+
+ :type service_name: string
+ :param service_name: The service name
+
+ :type api_version: string
+ :param api_version: The API version of the service. If none is
+ provided, then the latest API version will be used.
+
+ :rtype: L{botocore.model.ServiceModel}
+ :return: The botocore service model for the service.
+
+ """
+ service_description = self.get_service_data(service_name, api_version)
+ return ServiceModel(service_description, service_name=service_name)
+
+ def get_waiter_model(self, service_name, api_version=None):
+ loader = self.get_component('data_loader')
+ waiter_config = loader.load_service_model(
+ service_name, 'waiters-2', api_version)
+ return waiter.WaiterModel(waiter_config)
+
+ def get_paginator_model(self, service_name, api_version=None):
+ loader = self.get_component('data_loader')
+ paginator_config = loader.load_service_model(
+ service_name, 'paginators-1', api_version)
+ return paginate.PaginatorModel(paginator_config)
+
+ def get_service_data(self, service_name, api_version=None):
+ """
+ Retrieve the fully merged data associated with a service.
+ """
+ data_path = service_name
+ service_data = self.get_component('data_loader').load_service_model(
+ data_path,
+ type_name='service-2',
+ api_version=api_version
+ )
service_id = EVENT_ALIASES.get(service_name, service_name)
self._events.emit('service-data-loaded.%s' % service_id,
- service_data=service_data,
- service_name=service_name, session=self)
- return service_data
-
- def get_available_services(self):
- """
- Return a list of names of available services.
- """
- return self.get_component('data_loader')\
- .list_available_services(type_name='service-2')
-
- def set_debug_logger(self, logger_name='botocore'):
- """
- Convenience function to quickly configure full debug output
- to go to the console.
- """
- self.set_stream_logger(logger_name, logging.DEBUG)
-
- def set_stream_logger(self, logger_name, log_level, stream=None,
- format_string=None):
- """
- Convenience method to configure a stream logger.
-
- :type logger_name: str
- :param logger_name: The name of the logger to configure
-
- :type log_level: str
- :param log_level: The log level to set for the logger. This
- is any param supported by the ``.setLevel()`` method of
- a ``Log`` object.
-
- :type stream: file
- :param stream: A file like object to log to. If none is provided
- then sys.stderr will be used.
-
- :type format_string: str
- :param format_string: The format string to use for the log
- formatter. If none is provided this will default to
- ``self.LOG_FORMAT``.
-
- """
- log = logging.getLogger(logger_name)
- log.setLevel(logging.DEBUG)
-
- ch = logging.StreamHandler(stream)
- ch.setLevel(log_level)
-
- # create formatter
- if format_string is None:
- format_string = self.LOG_FORMAT
- formatter = logging.Formatter(format_string)
-
- # add formatter to ch
- ch.setFormatter(formatter)
-
- # add ch to logger
- log.addHandler(ch)
-
- def set_file_logger(self, log_level, path, logger_name='botocore'):
- """
- Convenience function to quickly configure any level of logging
- to a file.
-
- :type log_level: int
- :param log_level: A log level as specified in the `logging` module
-
- :type path: string
- :param path: Path to the log file. The file will be created
- if it doesn't already exist.
- """
- log = logging.getLogger(logger_name)
- log.setLevel(logging.DEBUG)
-
- # create console handler and set level to debug
- ch = logging.FileHandler(path)
- ch.setLevel(log_level)
-
- # create formatter
- formatter = logging.Formatter(self.LOG_FORMAT)
-
- # add formatter to ch
- ch.setFormatter(formatter)
-
- # add ch to logger
- log.addHandler(ch)
-
- def register(self, event_name, handler, unique_id=None,
- unique_id_uses_count=False):
- """Register a handler with an event.
-
- :type event_name: str
- :param event_name: The name of the event.
-
- :type handler: callable
- :param handler: The callback to invoke when the event
- is emitted. This object must be callable, and must
- accept ``**kwargs``. If either of these preconditions are
- not met, a ``ValueError`` will be raised.
-
- :type unique_id: str
- :param unique_id: An optional identifier to associate with the
- registration. A unique_id can only be used once for
- the entire session registration (unless it is unregistered).
- This can be used to prevent an event handler from being
- registered twice.
-
- :param unique_id_uses_count: boolean
- :param unique_id_uses_count: Specifies if the event should maintain
- a count when a ``unique_id`` is registered and unregisted. The
- event can only be completely unregistered once every register call
- using the unique id has been matched by an ``unregister`` call.
- If ``unique_id`` is specified, subsequent ``register``
- calls must use the same value for ``unique_id_uses_count``
- as the ``register`` call that first registered the event.
-
- :raises ValueError: If the call to ``register`` uses ``unique_id``
- but the value for ``unique_id_uses_count`` differs from the
- ``unique_id_uses_count`` value declared by the very first
- ``register`` call for that ``unique_id``.
- """
- self._events.register(event_name, handler, unique_id,
- unique_id_uses_count=unique_id_uses_count)
-
- def unregister(self, event_name, handler=None, unique_id=None,
- unique_id_uses_count=False):
- """Unregister a handler with an event.
-
- :type event_name: str
- :param event_name: The name of the event.
-
- :type handler: callable
- :param handler: The callback to unregister.
-
- :type unique_id: str
- :param unique_id: A unique identifier identifying the callback
- to unregister. You can provide either the handler or the
- unique_id, you do not have to provide both.
-
- :param unique_id_uses_count: boolean
- :param unique_id_uses_count: Specifies if the event should maintain
- a count when a ``unique_id`` is registered and unregisted. The
- event can only be completely unregistered once every ``register``
- call using the ``unique_id`` has been matched by an ``unregister``
- call. If the ``unique_id`` is specified, subsequent
- ``unregister`` calls must use the same value for
- ``unique_id_uses_count`` as the ``register`` call that first
- registered the event.
-
- :raises ValueError: If the call to ``unregister`` uses ``unique_id``
- but the value for ``unique_id_uses_count`` differs from the
- ``unique_id_uses_count`` value declared by the very first
- ``register`` call for that ``unique_id``.
- """
- self._events.unregister(event_name, handler=handler,
- unique_id=unique_id,
- unique_id_uses_count=unique_id_uses_count)
-
- def emit(self, event_name, **kwargs):
- return self._events.emit(event_name, **kwargs)
-
- def emit_first_non_none_response(self, event_name, **kwargs):
- responses = self._events.emit(event_name, **kwargs)
- return first_non_none_response(responses)
-
- def get_component(self, name):
+ service_data=service_data,
+ service_name=service_name, session=self)
+ return service_data
+
+ def get_available_services(self):
+ """
+ Return a list of names of available services.
+ """
+ return self.get_component('data_loader')\
+ .list_available_services(type_name='service-2')
+
+ def set_debug_logger(self, logger_name='botocore'):
+ """
+ Convenience function to quickly configure full debug output
+ to go to the console.
+ """
+ self.set_stream_logger(logger_name, logging.DEBUG)
+
+ def set_stream_logger(self, logger_name, log_level, stream=None,
+ format_string=None):
+ """
+ Convenience method to configure a stream logger.
+
+ :type logger_name: str
+ :param logger_name: The name of the logger to configure
+
+ :type log_level: str
+ :param log_level: The log level to set for the logger. This
+ is any param supported by the ``.setLevel()`` method of
+ a ``Log`` object.
+
+ :type stream: file
+ :param stream: A file like object to log to. If none is provided
+ then sys.stderr will be used.
+
+ :type format_string: str
+ :param format_string: The format string to use for the log
+ formatter. If none is provided this will default to
+ ``self.LOG_FORMAT``.
+
+ """
+ log = logging.getLogger(logger_name)
+ log.setLevel(logging.DEBUG)
+
+ ch = logging.StreamHandler(stream)
+ ch.setLevel(log_level)
+
+ # create formatter
+ if format_string is None:
+ format_string = self.LOG_FORMAT
+ formatter = logging.Formatter(format_string)
+
+ # add formatter to ch
+ ch.setFormatter(formatter)
+
+ # add ch to logger
+ log.addHandler(ch)
+
+ def set_file_logger(self, log_level, path, logger_name='botocore'):
+ """
+ Convenience function to quickly configure any level of logging
+ to a file.
+
+ :type log_level: int
+ :param log_level: A log level as specified in the `logging` module
+
+ :type path: string
+ :param path: Path to the log file. The file will be created
+ if it doesn't already exist.
+ """
+ log = logging.getLogger(logger_name)
+ log.setLevel(logging.DEBUG)
+
+ # create console handler and set level to debug
+ ch = logging.FileHandler(path)
+ ch.setLevel(log_level)
+
+ # create formatter
+ formatter = logging.Formatter(self.LOG_FORMAT)
+
+ # add formatter to ch
+ ch.setFormatter(formatter)
+
+ # add ch to logger
+ log.addHandler(ch)
+
+ def register(self, event_name, handler, unique_id=None,
+ unique_id_uses_count=False):
+ """Register a handler with an event.
+
+ :type event_name: str
+ :param event_name: The name of the event.
+
+ :type handler: callable
+ :param handler: The callback to invoke when the event
+ is emitted. This object must be callable, and must
+ accept ``**kwargs``. If either of these preconditions are
+ not met, a ``ValueError`` will be raised.
+
+ :type unique_id: str
+ :param unique_id: An optional identifier to associate with the
+ registration. A unique_id can only be used once for
+ the entire session registration (unless it is unregistered).
+ This can be used to prevent an event handler from being
+ registered twice.
+
+ :param unique_id_uses_count: boolean
+ :param unique_id_uses_count: Specifies if the event should maintain
+ a count when a ``unique_id`` is registered and unregisted. The
+ event can only be completely unregistered once every register call
+ using the unique id has been matched by an ``unregister`` call.
+ If ``unique_id`` is specified, subsequent ``register``
+ calls must use the same value for ``unique_id_uses_count``
+ as the ``register`` call that first registered the event.
+
+ :raises ValueError: If the call to ``register`` uses ``unique_id``
+ but the value for ``unique_id_uses_count`` differs from the
+ ``unique_id_uses_count`` value declared by the very first
+ ``register`` call for that ``unique_id``.
+ """
+ self._events.register(event_name, handler, unique_id,
+ unique_id_uses_count=unique_id_uses_count)
+
+ def unregister(self, event_name, handler=None, unique_id=None,
+ unique_id_uses_count=False):
+ """Unregister a handler with an event.
+
+ :type event_name: str
+ :param event_name: The name of the event.
+
+ :type handler: callable
+ :param handler: The callback to unregister.
+
+ :type unique_id: str
+ :param unique_id: A unique identifier identifying the callback
+ to unregister. You can provide either the handler or the
+ unique_id, you do not have to provide both.
+
+ :param unique_id_uses_count: boolean
+ :param unique_id_uses_count: Specifies if the event should maintain
+ a count when a ``unique_id`` is registered and unregisted. The
+ event can only be completely unregistered once every ``register``
+ call using the ``unique_id`` has been matched by an ``unregister``
+ call. If the ``unique_id`` is specified, subsequent
+ ``unregister`` calls must use the same value for
+ ``unique_id_uses_count`` as the ``register`` call that first
+ registered the event.
+
+ :raises ValueError: If the call to ``unregister`` uses ``unique_id``
+ but the value for ``unique_id_uses_count`` differs from the
+ ``unique_id_uses_count`` value declared by the very first
+ ``register`` call for that ``unique_id``.
+ """
+ self._events.unregister(event_name, handler=handler,
+ unique_id=unique_id,
+ unique_id_uses_count=unique_id_uses_count)
+
+ def emit(self, event_name, **kwargs):
+ return self._events.emit(event_name, **kwargs)
+
+ def emit_first_non_none_response(self, event_name, **kwargs):
+ responses = self._events.emit(event_name, **kwargs)
+ return first_non_none_response(responses)
+
+ def get_component(self, name):
try:
return self._components.get_component(name)
except ValueError:
@@ -705,7 +705,7 @@ class Session(object):
DeprecationWarning)
return self._internal_components.get_component(name)
raise
-
+
def _get_internal_component(self, name):
# While this method may be called by botocore classes outside of the
# Session, this method should **never** be used by a class that lives
@@ -718,142 +718,142 @@ class Session(object):
# outside of botocore.
return self._internal_components.register_component(name, component)
- def register_component(self, name, component):
- self._components.register_component(name, component)
-
- def lazy_register_component(self, name, component):
- self._components.lazy_register_component(name, component)
-
- def create_client(self, service_name, region_name=None, api_version=None,
- use_ssl=True, verify=None, endpoint_url=None,
- aws_access_key_id=None, aws_secret_access_key=None,
- aws_session_token=None, config=None):
- """Create a botocore client.
-
- :type service_name: string
- :param service_name: The name of the service for which a client will
- be created. You can use the ``Sesssion.get_available_services()``
- method to get a list of all available service names.
-
- :type region_name: string
- :param region_name: The name of the region associated with the client.
- A client is associated with a single region.
-
- :type api_version: string
- :param api_version: The API version to use. By default, botocore will
- use the latest API version when creating a client. You only need
- to specify this parameter if you want to use a previous API version
- of the client.
-
- :type use_ssl: boolean
- :param use_ssl: Whether or not to use SSL. By default, SSL is used.
- Note that not all services support non-ssl connections.
-
- :type verify: boolean/string
- :param verify: Whether or not to verify SSL certificates.
- By default SSL certificates are verified. You can provide the
- following values:
-
- * False - do not validate SSL certificates. SSL will still be
- used (unless use_ssl is False), but SSL certificates
- will not be verified.
- * path/to/cert/bundle.pem - A filename of the CA cert bundle to
- uses. You can specify this argument if you want to use a
- different CA cert bundle than the one used by botocore.
-
- :type endpoint_url: string
- :param endpoint_url: The complete URL to use for the constructed
- client. Normally, botocore will automatically construct the
- appropriate URL to use when communicating with a service. You can
- specify a complete URL (including the "http/https" scheme) to
- override this behavior. If this value is provided, then
- ``use_ssl`` is ignored.
-
- :type aws_access_key_id: string
- :param aws_access_key_id: The access key to use when creating
- the client. This is entirely optional, and if not provided,
- the credentials configured for the session will automatically
- be used. You only need to provide this argument if you want
- to override the credentials used for this specific client.
-
- :type aws_secret_access_key: string
- :param aws_secret_access_key: The secret key to use when creating
- the client. Same semantics as aws_access_key_id above.
-
- :type aws_session_token: string
- :param aws_session_token: The session token to use when creating
- the client. Same semantics as aws_access_key_id above.
-
- :type config: botocore.client.Config
- :param config: Advanced client configuration options. If a value
- is specified in the client config, its value will take precedence
- over environment variables and configuration values, but not over
- a value passed explicitly to the method. If a default config
- object is set on the session, the config object used when creating
- the client will be the result of calling ``merge()`` on the
- default config with the config provided to this call.
-
- :rtype: botocore.client.BaseClient
- :return: A botocore client instance
-
- """
- default_client_config = self.get_default_client_config()
- # If a config is provided and a default config is set, then
- # use the config resulting from merging the two.
- if config is not None and default_client_config is not None:
- config = default_client_config.merge(config)
- # If a config was not provided then use the default
- # client config from the session
- elif default_client_config is not None:
- config = default_client_config
-
+ def register_component(self, name, component):
+ self._components.register_component(name, component)
+
+ def lazy_register_component(self, name, component):
+ self._components.lazy_register_component(name, component)
+
+ def create_client(self, service_name, region_name=None, api_version=None,
+ use_ssl=True, verify=None, endpoint_url=None,
+ aws_access_key_id=None, aws_secret_access_key=None,
+ aws_session_token=None, config=None):
+ """Create a botocore client.
+
+ :type service_name: string
+ :param service_name: The name of the service for which a client will
+ be created. You can use the ``Sesssion.get_available_services()``
+ method to get a list of all available service names.
+
+ :type region_name: string
+ :param region_name: The name of the region associated with the client.
+ A client is associated with a single region.
+
+ :type api_version: string
+ :param api_version: The API version to use. By default, botocore will
+ use the latest API version when creating a client. You only need
+ to specify this parameter if you want to use a previous API version
+ of the client.
+
+ :type use_ssl: boolean
+ :param use_ssl: Whether or not to use SSL. By default, SSL is used.
+ Note that not all services support non-ssl connections.
+
+ :type verify: boolean/string
+ :param verify: Whether or not to verify SSL certificates.
+ By default SSL certificates are verified. You can provide the
+ following values:
+
+ * False - do not validate SSL certificates. SSL will still be
+ used (unless use_ssl is False), but SSL certificates
+ will not be verified.
+ * path/to/cert/bundle.pem - A filename of the CA cert bundle to
+ uses. You can specify this argument if you want to use a
+ different CA cert bundle than the one used by botocore.
+
+ :type endpoint_url: string
+ :param endpoint_url: The complete URL to use for the constructed
+ client. Normally, botocore will automatically construct the
+ appropriate URL to use when communicating with a service. You can
+ specify a complete URL (including the "http/https" scheme) to
+ override this behavior. If this value is provided, then
+ ``use_ssl`` is ignored.
+
+ :type aws_access_key_id: string
+ :param aws_access_key_id: The access key to use when creating
+ the client. This is entirely optional, and if not provided,
+ the credentials configured for the session will automatically
+ be used. You only need to provide this argument if you want
+ to override the credentials used for this specific client.
+
+ :type aws_secret_access_key: string
+ :param aws_secret_access_key: The secret key to use when creating
+ the client. Same semantics as aws_access_key_id above.
+
+ :type aws_session_token: string
+ :param aws_session_token: The session token to use when creating
+ the client. Same semantics as aws_access_key_id above.
+
+ :type config: botocore.client.Config
+ :param config: Advanced client configuration options. If a value
+ is specified in the client config, its value will take precedence
+ over environment variables and configuration values, but not over
+ a value passed explicitly to the method. If a default config
+ object is set on the session, the config object used when creating
+ the client will be the result of calling ``merge()`` on the
+ default config with the config provided to this call.
+
+ :rtype: botocore.client.BaseClient
+ :return: A botocore client instance
+
+ """
+ default_client_config = self.get_default_client_config()
+ # If a config is provided and a default config is set, then
+ # use the config resulting from merging the two.
+ if config is not None and default_client_config is not None:
+ config = default_client_config.merge(config)
+ # If a config was not provided then use the default
+ # client config from the session
+ elif default_client_config is not None:
+ config = default_client_config
+
region_name = self._resolve_region_name(region_name, config)
-
- # Figure out the verify value base on the various
- # configuration options.
- if verify is None:
- verify = self.get_config_variable('ca_bundle')
-
- if api_version is None:
- api_version = self.get_config_variable('api_versions').get(
- service_name, None)
-
- loader = self.get_component('data_loader')
- event_emitter = self.get_component('event_emitter')
- response_parser_factory = self.get_component(
- 'response_parser_factory')
+
+ # Figure out the verify value base on the various
+ # configuration options.
+ if verify is None:
+ verify = self.get_config_variable('ca_bundle')
+
+ if api_version is None:
+ api_version = self.get_config_variable('api_versions').get(
+ service_name, None)
+
+ loader = self.get_component('data_loader')
+ event_emitter = self.get_component('event_emitter')
+ response_parser_factory = self.get_component(
+ 'response_parser_factory')
if config is not None and config.signature_version is UNSIGNED:
credentials = None
elif aws_access_key_id is not None and aws_secret_access_key is not None:
- credentials = botocore.credentials.Credentials(
- access_key=aws_access_key_id,
- secret_key=aws_secret_access_key,
- token=aws_session_token)
- elif self._missing_cred_vars(aws_access_key_id,
- aws_secret_access_key):
- raise PartialCredentialsError(
- provider='explicit',
- cred_var=self._missing_cred_vars(aws_access_key_id,
- aws_secret_access_key))
- else:
- credentials = self.get_credentials()
+ credentials = botocore.credentials.Credentials(
+ access_key=aws_access_key_id,
+ secret_key=aws_secret_access_key,
+ token=aws_session_token)
+ elif self._missing_cred_vars(aws_access_key_id,
+ aws_secret_access_key):
+ raise PartialCredentialsError(
+ provider='explicit',
+ cred_var=self._missing_cred_vars(aws_access_key_id,
+ aws_secret_access_key))
+ else:
+ credentials = self.get_credentials()
endpoint_resolver = self._get_internal_component('endpoint_resolver')
exceptions_factory = self._get_internal_component('exceptions_factory')
config_store = self.get_component('config_store')
- client_creator = botocore.client.ClientCreator(
- loader, endpoint_resolver, self.user_agent(), event_emitter,
- retryhandler, translate, response_parser_factory,
+ client_creator = botocore.client.ClientCreator(
+ loader, endpoint_resolver, self.user_agent(), event_emitter,
+ retryhandler, translate, response_parser_factory,
exceptions_factory, config_store)
- client = client_creator.create_client(
- service_name=service_name, region_name=region_name,
- is_secure=use_ssl, endpoint_url=endpoint_url, verify=verify,
- credentials=credentials, scoped_config=self.get_scoped_config(),
- client_config=config, api_version=api_version)
+ client = client_creator.create_client(
+ service_name=service_name, region_name=region_name,
+ is_secure=use_ssl, endpoint_url=endpoint_url, verify=verify,
+ credentials=credentials, scoped_config=self.get_scoped_config(),
+ client_config=config, api_version=api_version)
monitor = self._get_internal_component('monitor')
if monitor is not None:
monitor.register(client.meta.events)
- return client
-
+ return client
+
def _resolve_region_name(self, region_name, config):
# Figure out the user-provided region based on the various
# configuration options.
@@ -876,89 +876,89 @@ class Session(object):
self._last_client_region_used = region_name
return region_name
- def _missing_cred_vars(self, access_key, secret_key):
- if access_key is not None and secret_key is None:
- return 'aws_secret_access_key'
- if secret_key is not None and access_key is None:
- return 'aws_access_key_id'
- return None
-
- def get_available_partitions(self):
- """Lists the available partitions found on disk
-
- :rtype: list
- :return: Returns a list of partition names (e.g., ["aws", "aws-cn"])
- """
+ def _missing_cred_vars(self, access_key, secret_key):
+ if access_key is not None and secret_key is None:
+ return 'aws_secret_access_key'
+ if secret_key is not None and access_key is None:
+ return 'aws_access_key_id'
+ return None
+
+ def get_available_partitions(self):
+ """Lists the available partitions found on disk
+
+ :rtype: list
+ :return: Returns a list of partition names (e.g., ["aws", "aws-cn"])
+ """
resolver = self._get_internal_component('endpoint_resolver')
- return resolver.get_available_partitions()
-
- def get_available_regions(self, service_name, partition_name='aws',
- allow_non_regional=False):
- """Lists the region and endpoint names of a particular partition.
-
- :type service_name: string
- :param service_name: Name of a service to list endpoint for (e.g., s3).
- This parameter accepts a service name (e.g., "elb") or endpoint
- prefix (e.g., "elasticloadbalancing").
-
- :type partition_name: string
- :param partition_name: Name of the partition to limit endpoints to.
- (e.g., aws for the public AWS endpoints, aws-cn for AWS China
- endpoints, aws-us-gov for AWS GovCloud (US) Endpoints, etc.
-
- :type allow_non_regional: bool
- :param allow_non_regional: Set to True to include endpoints that are
- not regional endpoints (e.g., s3-external-1,
- fips-us-gov-west-1, etc).
- :return: Returns a list of endpoint names (e.g., ["us-east-1"]).
- """
+ return resolver.get_available_partitions()
+
+ def get_available_regions(self, service_name, partition_name='aws',
+ allow_non_regional=False):
+ """Lists the region and endpoint names of a particular partition.
+
+ :type service_name: string
+ :param service_name: Name of a service to list endpoint for (e.g., s3).
+ This parameter accepts a service name (e.g., "elb") or endpoint
+ prefix (e.g., "elasticloadbalancing").
+
+ :type partition_name: string
+ :param partition_name: Name of the partition to limit endpoints to.
+ (e.g., aws for the public AWS endpoints, aws-cn for AWS China
+ endpoints, aws-us-gov for AWS GovCloud (US) Endpoints, etc.
+
+ :type allow_non_regional: bool
+ :param allow_non_regional: Set to True to include endpoints that are
+ not regional endpoints (e.g., s3-external-1,
+ fips-us-gov-west-1, etc).
+ :return: Returns a list of endpoint names (e.g., ["us-east-1"]).
+ """
resolver = self._get_internal_component('endpoint_resolver')
- results = []
- try:
- service_data = self.get_service_data(service_name)
- endpoint_prefix = service_data['metadata'].get(
- 'endpointPrefix', service_name)
- results = resolver.get_available_endpoints(
- endpoint_prefix, partition_name, allow_non_regional)
- except UnknownServiceError:
- pass
- return results
-
-
-class ComponentLocator(object):
- """Service locator for session components."""
- def __init__(self):
- self._components = {}
- self._deferred = {}
-
- def get_component(self, name):
- if name in self._deferred:
- factory = self._deferred[name]
- self._components[name] = factory()
- # Only delete the component from the deferred dict after
- # successfully creating the object from the factory as well as
- # injecting the instantiated value into the _components dict.
- del self._deferred[name]
- try:
- return self._components[name]
- except KeyError:
- raise ValueError("Unknown component: %s" % name)
-
- def register_component(self, name, component):
- self._components[name] = component
- try:
- del self._deferred[name]
- except KeyError:
- pass
-
- def lazy_register_component(self, name, no_arg_factory):
- self._deferred[name] = no_arg_factory
- try:
- del self._components[name]
- except KeyError:
- pass
-
-
+ results = []
+ try:
+ service_data = self.get_service_data(service_name)
+ endpoint_prefix = service_data['metadata'].get(
+ 'endpointPrefix', service_name)
+ results = resolver.get_available_endpoints(
+ endpoint_prefix, partition_name, allow_non_regional)
+ except UnknownServiceError:
+ pass
+ return results
+
+
+class ComponentLocator(object):
+ """Service locator for session components."""
+ def __init__(self):
+ self._components = {}
+ self._deferred = {}
+
+ def get_component(self, name):
+ if name in self._deferred:
+ factory = self._deferred[name]
+ self._components[name] = factory()
+ # Only delete the component from the deferred dict after
+ # successfully creating the object from the factory as well as
+ # injecting the instantiated value into the _components dict.
+ del self._deferred[name]
+ try:
+ return self._components[name]
+ except KeyError:
+ raise ValueError("Unknown component: %s" % name)
+
+ def register_component(self, name, component):
+ self._components[name] = component
+ try:
+ del self._deferred[name]
+ except KeyError:
+ pass
+
+ def lazy_register_component(self, name, no_arg_factory):
+ self._deferred[name] = no_arg_factory
+ try:
+ del self._components[name]
+ except KeyError:
+ pass
+
+
class SessionVarDict(MutableMapping):
def __init__(self, session, session_vars):
self._session = session
@@ -1044,8 +1044,8 @@ class SubsetChainConfigFactory(object):
)
-def get_session(env_vars=None):
- """
- Return a new session object.
- """
- return Session(env_vars)
+def get_session(env_vars=None):
+ """
+ Return a new session object.
+ """
+ return Session(env_vars)
diff --git a/contrib/python/botocore/botocore/signers.py b/contrib/python/botocore/botocore/signers.py
index 870a50f528..b055fea4bd 100644
--- a/contrib/python/botocore/botocore/signers.py
+++ b/contrib/python/botocore/botocore/signers.py
@@ -1,724 +1,724 @@
-# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-import datetime
-import weakref
-import json
-import base64
-
-import botocore
-import botocore.auth
-from botocore.compat import six, OrderedDict
-from botocore.awsrequest import create_request_object, prepare_request_dict
-from botocore.exceptions import UnknownSignatureVersionError
-from botocore.exceptions import UnknownClientMethodError
-from botocore.exceptions import UnsupportedSignatureVersionError
-from botocore.utils import fix_s3_host, datetime2timestamp
-
-
-class RequestSigner(object):
- """
- An object to sign requests before they go out over the wire using
- one of the authentication mechanisms defined in ``auth.py``. This
- class fires two events scoped to a service and operation name:
-
- * choose-signer: Allows overriding the auth signer name.
- * before-sign: Allows mutating the request before signing.
-
- Together these events allow for customization of the request
- signing pipeline, including overrides, request path manipulation,
- and disabling signing per operation.
-
-
+# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+import datetime
+import weakref
+import json
+import base64
+
+import botocore
+import botocore.auth
+from botocore.compat import six, OrderedDict
+from botocore.awsrequest import create_request_object, prepare_request_dict
+from botocore.exceptions import UnknownSignatureVersionError
+from botocore.exceptions import UnknownClientMethodError
+from botocore.exceptions import UnsupportedSignatureVersionError
+from botocore.utils import fix_s3_host, datetime2timestamp
+
+
+class RequestSigner(object):
+ """
+ An object to sign requests before they go out over the wire using
+ one of the authentication mechanisms defined in ``auth.py``. This
+ class fires two events scoped to a service and operation name:
+
+ * choose-signer: Allows overriding the auth signer name.
+ * before-sign: Allows mutating the request before signing.
+
+ Together these events allow for customization of the request
+ signing pipeline, including overrides, request path manipulation,
+ and disabling signing per operation.
+
+
:type service_id: botocore.model.ServiceId
:param service_id: The service id for the service, e.g. ``S3``
- :type region_name: string
- :param region_name: Name of the service region, e.g. ``us-east-1``
-
- :type signing_name: string
- :param signing_name: Service signing name. This is usually the
- same as the service name, but can differ. E.g.
- ``emr`` vs. ``elasticmapreduce``.
-
- :type signature_version: string
- :param signature_version: Signature name like ``v4``.
-
- :type credentials: :py:class:`~botocore.credentials.Credentials`
- :param credentials: User credentials with which to sign requests.
-
- :type event_emitter: :py:class:`~botocore.hooks.BaseEventHooks`
- :param event_emitter: Extension mechanism to fire events.
- """
+ :type region_name: string
+ :param region_name: Name of the service region, e.g. ``us-east-1``
+
+ :type signing_name: string
+ :param signing_name: Service signing name. This is usually the
+ same as the service name, but can differ. E.g.
+ ``emr`` vs. ``elasticmapreduce``.
+
+ :type signature_version: string
+ :param signature_version: Signature name like ``v4``.
+
+ :type credentials: :py:class:`~botocore.credentials.Credentials`
+ :param credentials: User credentials with which to sign requests.
+
+ :type event_emitter: :py:class:`~botocore.hooks.BaseEventHooks`
+ :param event_emitter: Extension mechanism to fire events.
+ """
def __init__(self, service_id, region_name, signing_name,
- signature_version, credentials, event_emitter):
- self._region_name = region_name
- self._signing_name = signing_name
- self._signature_version = signature_version
- self._credentials = credentials
+ signature_version, credentials, event_emitter):
+ self._region_name = region_name
+ self._signing_name = signing_name
+ self._signature_version = signature_version
+ self._credentials = credentials
self._service_id = service_id
-
- # We need weakref to prevent leaking memory in Python 2.6 on Linux 2.6
- self._event_emitter = weakref.proxy(event_emitter)
-
- @property
- def region_name(self):
- return self._region_name
-
- @property
- def signature_version(self):
- return self._signature_version
-
- @property
- def signing_name(self):
- return self._signing_name
-
- def handler(self, operation_name=None, request=None, **kwargs):
- # This is typically hooked up to the "request-created" event
- # from a client's event emitter. When a new request is created
- # this method is invoked to sign the request.
- # Don't call this method directly.
- return self.sign(operation_name, request)
-
- def sign(self, operation_name, request, region_name=None,
- signing_type='standard', expires_in=None, signing_name=None):
- """Sign a request before it goes out over the wire.
-
- :type operation_name: string
- :param operation_name: The name of the current operation, e.g.
- ``ListBuckets``.
- :type request: AWSRequest
- :param request: The request object to be sent over the wire.
-
- :type region_name: str
- :param region_name: The region to sign the request for.
-
- :type signing_type: str
- :param signing_type: The type of signing to perform. This can be one of
- three possible values:
-
- * 'standard' - This should be used for most requests.
- * 'presign-url' - This should be used when pre-signing a request.
- * 'presign-post' - This should be used when pre-signing an S3 post.
-
- :type expires_in: int
- :param expires_in: The number of seconds the presigned url is valid
- for. This parameter is only valid for signing type 'presign-url'.
-
- :type signing_name: str
- :param signing_name: The name to use for the service when signing.
- """
+
+ # We need weakref to prevent leaking memory in Python 2.6 on Linux 2.6
+ self._event_emitter = weakref.proxy(event_emitter)
+
+ @property
+ def region_name(self):
+ return self._region_name
+
+ @property
+ def signature_version(self):
+ return self._signature_version
+
+ @property
+ def signing_name(self):
+ return self._signing_name
+
+ def handler(self, operation_name=None, request=None, **kwargs):
+ # This is typically hooked up to the "request-created" event
+ # from a client's event emitter. When a new request is created
+ # this method is invoked to sign the request.
+ # Don't call this method directly.
+ return self.sign(operation_name, request)
+
+ def sign(self, operation_name, request, region_name=None,
+ signing_type='standard', expires_in=None, signing_name=None):
+ """Sign a request before it goes out over the wire.
+
+ :type operation_name: string
+ :param operation_name: The name of the current operation, e.g.
+ ``ListBuckets``.
+ :type request: AWSRequest
+ :param request: The request object to be sent over the wire.
+
+ :type region_name: str
+ :param region_name: The region to sign the request for.
+
+ :type signing_type: str
+ :param signing_type: The type of signing to perform. This can be one of
+ three possible values:
+
+ * 'standard' - This should be used for most requests.
+ * 'presign-url' - This should be used when pre-signing a request.
+ * 'presign-post' - This should be used when pre-signing an S3 post.
+
+ :type expires_in: int
+ :param expires_in: The number of seconds the presigned url is valid
+ for. This parameter is only valid for signing type 'presign-url'.
+
+ :type signing_name: str
+ :param signing_name: The name to use for the service when signing.
+ """
explicit_region_name = region_name
- if region_name is None:
- region_name = self._region_name
-
- if signing_name is None:
- signing_name = self._signing_name
-
- signature_version = self._choose_signer(
- operation_name, signing_type, request.context)
-
- # Allow mutating request before signing
- self._event_emitter.emit(
+ if region_name is None:
+ region_name = self._region_name
+
+ if signing_name is None:
+ signing_name = self._signing_name
+
+ signature_version = self._choose_signer(
+ operation_name, signing_type, request.context)
+
+ # Allow mutating request before signing
+ self._event_emitter.emit(
'before-sign.{0}.{1}'.format(
self._service_id.hyphenize(), operation_name),
- request=request, signing_name=signing_name,
- region_name=self._region_name,
+ request=request, signing_name=signing_name,
+ region_name=self._region_name,
signature_version=signature_version, request_signer=self,
operation_name=operation_name
)
-
- if signature_version != botocore.UNSIGNED:
- kwargs = {
- 'signing_name': signing_name,
- 'region_name': region_name,
- 'signature_version': signature_version
- }
- if expires_in is not None:
- kwargs['expires'] = expires_in
+
+ if signature_version != botocore.UNSIGNED:
+ kwargs = {
+ 'signing_name': signing_name,
+ 'region_name': region_name,
+ 'signature_version': signature_version
+ }
+ if expires_in is not None:
+ kwargs['expires'] = expires_in
signing_context = request.context.get('signing', {})
if not explicit_region_name and signing_context.get('region'):
kwargs['region_name'] = signing_context['region']
if signing_context.get('signing_name'):
kwargs['signing_name'] = signing_context['signing_name']
- try:
- auth = self.get_auth_instance(**kwargs)
- except UnknownSignatureVersionError as e:
- if signing_type != 'standard':
- raise UnsupportedSignatureVersionError(
- signature_version=signature_version)
- else:
- raise e
-
- auth.add_auth(request)
-
- def _choose_signer(self, operation_name, signing_type, context):
- """
- Allow setting the signature version via the choose-signer event.
- A value of `botocore.UNSIGNED` means no signing will be performed.
-
- :param operation_name: The operation to sign.
- :param signing_type: The type of signing that the signer is to be used
- for.
- :return: The signature version to sign with.
- """
- signing_type_suffix_map = {
- 'presign-post': '-presign-post',
- 'presign-url': '-query'
- }
- suffix = signing_type_suffix_map.get(signing_type, '')
-
- signature_version = self._signature_version
- if signature_version is not botocore.UNSIGNED and not \
- signature_version.endswith(suffix):
- signature_version += suffix
-
- handler, response = self._event_emitter.emit_until_response(
+ try:
+ auth = self.get_auth_instance(**kwargs)
+ except UnknownSignatureVersionError as e:
+ if signing_type != 'standard':
+ raise UnsupportedSignatureVersionError(
+ signature_version=signature_version)
+ else:
+ raise e
+
+ auth.add_auth(request)
+
+ def _choose_signer(self, operation_name, signing_type, context):
+ """
+ Allow setting the signature version via the choose-signer event.
+ A value of `botocore.UNSIGNED` means no signing will be performed.
+
+ :param operation_name: The operation to sign.
+ :param signing_type: The type of signing that the signer is to be used
+ for.
+ :return: The signature version to sign with.
+ """
+ signing_type_suffix_map = {
+ 'presign-post': '-presign-post',
+ 'presign-url': '-query'
+ }
+ suffix = signing_type_suffix_map.get(signing_type, '')
+
+ signature_version = self._signature_version
+ if signature_version is not botocore.UNSIGNED and not \
+ signature_version.endswith(suffix):
+ signature_version += suffix
+
+ handler, response = self._event_emitter.emit_until_response(
'choose-signer.{0}.{1}'.format(
self._service_id.hyphenize(), operation_name),
- signing_name=self._signing_name, region_name=self._region_name,
- signature_version=signature_version, context=context)
-
- if response is not None:
- signature_version = response
- # The suffix needs to be checked again in case we get an improper
- # signature version from choose-signer.
- if signature_version is not botocore.UNSIGNED and not \
- signature_version.endswith(suffix):
- signature_version += suffix
-
- return signature_version
-
- def get_auth_instance(self, signing_name, region_name,
- signature_version=None, **kwargs):
- """
- Get an auth instance which can be used to sign a request
- using the given signature version.
-
- :type signing_name: string
- :param signing_name: Service signing name. This is usually the
- same as the service name, but can differ. E.g.
- ``emr`` vs. ``elasticmapreduce``.
-
- :type region_name: string
- :param region_name: Name of the service region, e.g. ``us-east-1``
-
- :type signature_version: string
- :param signature_version: Signature name like ``v4``.
-
- :rtype: :py:class:`~botocore.auth.BaseSigner`
- :return: Auth instance to sign a request.
- """
- if signature_version is None:
- signature_version = self._signature_version
-
- cls = botocore.auth.AUTH_TYPE_MAPS.get(signature_version)
- if cls is None:
- raise UnknownSignatureVersionError(
- signature_version=signature_version)
- # If there's no credentials provided (i.e credentials is None),
- # then we'll pass a value of "None" over to the auth classes,
- # which already handle the cases where no credentials have
- # been provided.
- frozen_credentials = None
- if self._credentials is not None:
- frozen_credentials = self._credentials.get_frozen_credentials()
- kwargs['credentials'] = frozen_credentials
- if cls.REQUIRES_REGION:
- if self._region_name is None:
- raise botocore.exceptions.NoRegionError()
- kwargs['region_name'] = region_name
- kwargs['service_name'] = signing_name
- auth = cls(**kwargs)
- return auth
-
- # Alias get_auth for backwards compatibility.
- get_auth = get_auth_instance
-
- def generate_presigned_url(self, request_dict, operation_name,
- expires_in=3600, region_name=None,
- signing_name=None):
- """Generates a presigned url
-
- :type request_dict: dict
- :param request_dict: The prepared request dictionary returned by
- ``botocore.awsrequest.prepare_request_dict()``
-
- :type operation_name: str
- :param operation_name: The operation being signed.
-
- :type expires_in: int
- :param expires_in: The number of seconds the presigned url is valid
- for. By default it expires in an hour (3600 seconds)
-
- :type region_name: string
- :param region_name: The region name to sign the presigned url.
-
- :type signing_name: str
- :param signing_name: The name to use for the service when signing.
-
- :returns: The presigned url
- """
- request = create_request_object(request_dict)
- self.sign(operation_name, request, region_name,
- 'presign-url', expires_in, signing_name)
-
- request.prepare()
- return request.url
-
-
-class CloudFrontSigner(object):
- '''A signer to create a signed CloudFront URL.
-
- First you create a cloudfront signer based on a normalized RSA signer::
-
- import rsa
- def rsa_signer(message):
- private_key = open('private_key.pem', 'r').read()
- return rsa.sign(
- message,
- rsa.PrivateKey.load_pkcs1(private_key.encode('utf8')),
- 'SHA-1') # CloudFront requires SHA-1 hash
- cf_signer = CloudFrontSigner(key_id, rsa_signer)
-
- To sign with a canned policy::
-
- signed_url = cf_signer.generate_signed_url(
- url, date_less_than=datetime(2015, 12, 1))
-
- To sign with a custom policy::
-
- signed_url = cf_signer.generate_signed_url(url, policy=my_policy)
- '''
-
- def __init__(self, key_id, rsa_signer):
- """Create a CloudFrontSigner.
-
- :type key_id: str
- :param key_id: The CloudFront Key Pair ID
-
- :type rsa_signer: callable
- :param rsa_signer: An RSA signer.
- Its only input parameter will be the message to be signed,
- and its output will be the signed content as a binary string.
- The hash algorithm needed by CloudFront is SHA-1.
- """
- self.key_id = key_id
- self.rsa_signer = rsa_signer
-
- def generate_presigned_url(self, url, date_less_than=None, policy=None):
- """Creates a signed CloudFront URL based on given parameters.
-
- :type url: str
- :param url: The URL of the protected object
-
- :type date_less_than: datetime
- :param date_less_than: The URL will expire after that date and time
-
- :type policy: str
- :param policy: The custom policy, possibly built by self.build_policy()
-
- :rtype: str
- :return: The signed URL.
- """
- if (date_less_than is not None and policy is not None or
- date_less_than is None and policy is None):
- e = 'Need to provide either date_less_than or policy, but not both'
- raise ValueError(e)
- if date_less_than is not None:
- # We still need to build a canned policy for signing purpose
- policy = self.build_policy(url, date_less_than)
- if isinstance(policy, six.text_type):
- policy = policy.encode('utf8')
- if date_less_than is not None:
- params = ['Expires=%s' % int(datetime2timestamp(date_less_than))]
- else:
- params = ['Policy=%s' % self._url_b64encode(policy).decode('utf8')]
- signature = self.rsa_signer(policy)
- params.extend([
- 'Signature=%s' % self._url_b64encode(signature).decode('utf8'),
- 'Key-Pair-Id=%s' % self.key_id,
- ])
- return self._build_url(url, params)
-
- def _build_url(self, base_url, extra_params):
- separator = '&' if '?' in base_url else '?'
- return base_url + separator + '&'.join(extra_params)
-
- def build_policy(self, resource, date_less_than,
- date_greater_than=None, ip_address=None):
- """A helper to build policy.
-
- :type resource: str
- :param resource: The URL or the stream filename of the protected object
-
- :type date_less_than: datetime
- :param date_less_than: The URL will expire after the time has passed
-
- :type date_greater_than: datetime
- :param date_greater_than: The URL will not be valid until this time
-
- :type ip_address: str
- :param ip_address: Use 'x.x.x.x' for an IP, or 'x.x.x.x/x' for a subnet
-
- :rtype: str
- :return: The policy in a compact string.
- """
- # Note:
- # 1. Order in canned policy is significant. Special care has been taken
- # to ensure the output will match the order defined by the document.
- # There is also a test case to ensure that order.
- # SEE: http://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/private-content-creating-signed-url-canned-policy.html#private-content-canned-policy-creating-policy-statement
- # 2. Albeit the order in custom policy is not required by CloudFront,
- # we still use OrderedDict internally to ensure the result is stable
- # and also matches canned policy requirement.
- # SEE: http://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/private-content-creating-signed-url-custom-policy.html
- moment = int(datetime2timestamp(date_less_than))
- condition = OrderedDict({"DateLessThan": {"AWS:EpochTime": moment}})
- if ip_address:
- if '/' not in ip_address:
- ip_address += '/32'
- condition["IpAddress"] = {"AWS:SourceIp": ip_address}
- if date_greater_than:
- moment = int(datetime2timestamp(date_greater_than))
- condition["DateGreaterThan"] = {"AWS:EpochTime": moment}
- ordered_payload = [('Resource', resource), ('Condition', condition)]
- custom_policy = {"Statement": [OrderedDict(ordered_payload)]}
- return json.dumps(custom_policy, separators=(',', ':'))
-
- def _url_b64encode(self, data):
- # Required by CloudFront. See also:
- # http://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/private-content-linux-openssl.html
- return base64.b64encode(
- data).replace(b'+', b'-').replace(b'=', b'_').replace(b'/', b'~')
-
-
-def add_generate_db_auth_token(class_attributes, **kwargs):
- class_attributes['generate_db_auth_token'] = generate_db_auth_token
-
-
-def generate_db_auth_token(self, DBHostname, Port, DBUsername, Region=None):
- """Generates an auth token used to connect to a db with IAM credentials.
-
- :type DBHostname: str
- :param DBHostname: The hostname of the database to connect to.
-
- :type Port: int
- :param Port: The port number the database is listening on.
-
- :type DBUsername: str
- :param DBUsername: The username to log in as.
-
- :type Region: str
- :param Region: The region the database is in. If None, the client
- region will be used.
-
- :return: A presigned url which can be used as an auth token.
- """
- region = Region
- if region is None:
- region = self.meta.region_name
-
- params = {
- 'Action': 'connect',
- 'DBUser': DBUsername,
- }
-
- request_dict = {
- 'url_path': '/',
- 'query_string': '',
- 'headers': {},
- 'body': params,
- 'method': 'GET'
- }
-
- # RDS requires that the scheme not be set when sent over. This can cause
- # issues when signing because the Python url parsing libraries follow
- # RFC 1808 closely, which states that a netloc must be introduced by `//`.
- # Otherwise the url is presumed to be relative, and thus the whole
- # netloc would be treated as a path component. To work around this we
- # introduce https here and remove it once we're done processing it.
- scheme = 'https://'
- endpoint_url = '%s%s:%s' % (scheme, DBHostname, Port)
- prepare_request_dict(request_dict, endpoint_url)
- presigned_url = self._request_signer.generate_presigned_url(
- operation_name='connect', request_dict=request_dict,
- region_name=region, expires_in=900, signing_name='rds-db'
- )
- return presigned_url[len(scheme):]
-
-
-class S3PostPresigner(object):
- def __init__(self, request_signer):
- self._request_signer = request_signer
-
- def generate_presigned_post(self, request_dict, fields=None,
- conditions=None, expires_in=3600,
- region_name=None):
- """Generates the url and the form fields used for a presigned s3 post
-
- :type request_dict: dict
- :param request_dict: The prepared request dictionary returned by
- ``botocore.awsrequest.prepare_request_dict()``
-
- :type fields: dict
- :param fields: A dictionary of prefilled form fields to build on top
- of.
-
- :type conditions: list
- :param conditions: A list of conditions to include in the policy. Each
- element can be either a list or a structure. For example:
- [
- {"acl": "public-read"},
- {"bucket": "mybucket"},
- ["starts-with", "$key", "mykey"]
- ]
-
- :type expires_in: int
- :param expires_in: The number of seconds the presigned post is valid
- for.
-
- :type region_name: string
- :param region_name: The region name to sign the presigned post to.
-
- :rtype: dict
- :returns: A dictionary with two elements: ``url`` and ``fields``.
- Url is the url to post to. Fields is a dictionary filled with
- the form fields and respective values to use when submitting the
- post. For example:
-
- {'url': 'https://mybucket.s3.amazonaws.com
- 'fields': {'acl': 'public-read',
- 'key': 'mykey',
- 'signature': 'mysignature',
- 'policy': 'mybase64 encoded policy'}
- }
- """
- if fields is None:
- fields = {}
-
- if conditions is None:
- conditions = []
-
- # Create the policy for the post.
- policy = {}
-
- # Create an expiration date for the policy
- datetime_now = datetime.datetime.utcnow()
- expire_date = datetime_now + datetime.timedelta(seconds=expires_in)
- policy['expiration'] = expire_date.strftime(botocore.auth.ISO8601)
-
- # Append all of the conditions that the user supplied.
- policy['conditions'] = []
- for condition in conditions:
- policy['conditions'].append(condition)
-
- # Store the policy and the fields in the request for signing
- request = create_request_object(request_dict)
- request.context['s3-presign-post-fields'] = fields
- request.context['s3-presign-post-policy'] = policy
-
- self._request_signer.sign(
- 'PutObject', request, region_name, 'presign-post')
- # Return the url and the fields for th form to post.
- return {'url': request.url, 'fields': fields}
-
-
-def add_generate_presigned_url(class_attributes, **kwargs):
- class_attributes['generate_presigned_url'] = generate_presigned_url
-
-
-def generate_presigned_url(self, ClientMethod, Params=None, ExpiresIn=3600,
- HttpMethod=None):
- """Generate a presigned url given a client, its method, and arguments
-
- :type ClientMethod: string
- :param ClientMethod: The client method to presign for
-
- :type Params: dict
- :param Params: The parameters normally passed to
- ``ClientMethod``.
-
- :type ExpiresIn: int
- :param ExpiresIn: The number of seconds the presigned url is valid
- for. By default it expires in an hour (3600 seconds)
-
- :type HttpMethod: string
- :param HttpMethod: The http method to use on the generated url. By
- default, the http method is whatever is used in the method's model.
-
- :returns: The presigned url
- """
- client_method = ClientMethod
- params = Params
+ signing_name=self._signing_name, region_name=self._region_name,
+ signature_version=signature_version, context=context)
+
+ if response is not None:
+ signature_version = response
+ # The suffix needs to be checked again in case we get an improper
+ # signature version from choose-signer.
+ if signature_version is not botocore.UNSIGNED and not \
+ signature_version.endswith(suffix):
+ signature_version += suffix
+
+ return signature_version
+
+ def get_auth_instance(self, signing_name, region_name,
+ signature_version=None, **kwargs):
+ """
+ Get an auth instance which can be used to sign a request
+ using the given signature version.
+
+ :type signing_name: string
+ :param signing_name: Service signing name. This is usually the
+ same as the service name, but can differ. E.g.
+ ``emr`` vs. ``elasticmapreduce``.
+
+ :type region_name: string
+ :param region_name: Name of the service region, e.g. ``us-east-1``
+
+ :type signature_version: string
+ :param signature_version: Signature name like ``v4``.
+
+ :rtype: :py:class:`~botocore.auth.BaseSigner`
+ :return: Auth instance to sign a request.
+ """
+ if signature_version is None:
+ signature_version = self._signature_version
+
+ cls = botocore.auth.AUTH_TYPE_MAPS.get(signature_version)
+ if cls is None:
+ raise UnknownSignatureVersionError(
+ signature_version=signature_version)
+ # If there's no credentials provided (i.e credentials is None),
+ # then we'll pass a value of "None" over to the auth classes,
+ # which already handle the cases where no credentials have
+ # been provided.
+ frozen_credentials = None
+ if self._credentials is not None:
+ frozen_credentials = self._credentials.get_frozen_credentials()
+ kwargs['credentials'] = frozen_credentials
+ if cls.REQUIRES_REGION:
+ if self._region_name is None:
+ raise botocore.exceptions.NoRegionError()
+ kwargs['region_name'] = region_name
+ kwargs['service_name'] = signing_name
+ auth = cls(**kwargs)
+ return auth
+
+ # Alias get_auth for backwards compatibility.
+ get_auth = get_auth_instance
+
+ def generate_presigned_url(self, request_dict, operation_name,
+ expires_in=3600, region_name=None,
+ signing_name=None):
+ """Generates a presigned url
+
+ :type request_dict: dict
+ :param request_dict: The prepared request dictionary returned by
+ ``botocore.awsrequest.prepare_request_dict()``
+
+ :type operation_name: str
+ :param operation_name: The operation being signed.
+
+ :type expires_in: int
+ :param expires_in: The number of seconds the presigned url is valid
+ for. By default it expires in an hour (3600 seconds)
+
+ :type region_name: string
+ :param region_name: The region name to sign the presigned url.
+
+ :type signing_name: str
+ :param signing_name: The name to use for the service when signing.
+
+ :returns: The presigned url
+ """
+ request = create_request_object(request_dict)
+ self.sign(operation_name, request, region_name,
+ 'presign-url', expires_in, signing_name)
+
+ request.prepare()
+ return request.url
+
+
+class CloudFrontSigner(object):
+ '''A signer to create a signed CloudFront URL.
+
+ First you create a cloudfront signer based on a normalized RSA signer::
+
+ import rsa
+ def rsa_signer(message):
+ private_key = open('private_key.pem', 'r').read()
+ return rsa.sign(
+ message,
+ rsa.PrivateKey.load_pkcs1(private_key.encode('utf8')),
+ 'SHA-1') # CloudFront requires SHA-1 hash
+ cf_signer = CloudFrontSigner(key_id, rsa_signer)
+
+ To sign with a canned policy::
+
+ signed_url = cf_signer.generate_signed_url(
+ url, date_less_than=datetime(2015, 12, 1))
+
+ To sign with a custom policy::
+
+ signed_url = cf_signer.generate_signed_url(url, policy=my_policy)
+ '''
+
+ def __init__(self, key_id, rsa_signer):
+ """Create a CloudFrontSigner.
+
+ :type key_id: str
+ :param key_id: The CloudFront Key Pair ID
+
+ :type rsa_signer: callable
+ :param rsa_signer: An RSA signer.
+ Its only input parameter will be the message to be signed,
+ and its output will be the signed content as a binary string.
+ The hash algorithm needed by CloudFront is SHA-1.
+ """
+ self.key_id = key_id
+ self.rsa_signer = rsa_signer
+
+ def generate_presigned_url(self, url, date_less_than=None, policy=None):
+ """Creates a signed CloudFront URL based on given parameters.
+
+ :type url: str
+ :param url: The URL of the protected object
+
+ :type date_less_than: datetime
+ :param date_less_than: The URL will expire after that date and time
+
+ :type policy: str
+ :param policy: The custom policy, possibly built by self.build_policy()
+
+ :rtype: str
+ :return: The signed URL.
+ """
+ if (date_less_than is not None and policy is not None or
+ date_less_than is None and policy is None):
+ e = 'Need to provide either date_less_than or policy, but not both'
+ raise ValueError(e)
+ if date_less_than is not None:
+ # We still need to build a canned policy for signing purpose
+ policy = self.build_policy(url, date_less_than)
+ if isinstance(policy, six.text_type):
+ policy = policy.encode('utf8')
+ if date_less_than is not None:
+ params = ['Expires=%s' % int(datetime2timestamp(date_less_than))]
+ else:
+ params = ['Policy=%s' % self._url_b64encode(policy).decode('utf8')]
+ signature = self.rsa_signer(policy)
+ params.extend([
+ 'Signature=%s' % self._url_b64encode(signature).decode('utf8'),
+ 'Key-Pair-Id=%s' % self.key_id,
+ ])
+ return self._build_url(url, params)
+
+ def _build_url(self, base_url, extra_params):
+ separator = '&' if '?' in base_url else '?'
+ return base_url + separator + '&'.join(extra_params)
+
+ def build_policy(self, resource, date_less_than,
+ date_greater_than=None, ip_address=None):
+ """A helper to build policy.
+
+ :type resource: str
+ :param resource: The URL or the stream filename of the protected object
+
+ :type date_less_than: datetime
+ :param date_less_than: The URL will expire after the time has passed
+
+ :type date_greater_than: datetime
+ :param date_greater_than: The URL will not be valid until this time
+
+ :type ip_address: str
+ :param ip_address: Use 'x.x.x.x' for an IP, or 'x.x.x.x/x' for a subnet
+
+ :rtype: str
+ :return: The policy in a compact string.
+ """
+ # Note:
+ # 1. Order in canned policy is significant. Special care has been taken
+ # to ensure the output will match the order defined by the document.
+ # There is also a test case to ensure that order.
+ # SEE: http://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/private-content-creating-signed-url-canned-policy.html#private-content-canned-policy-creating-policy-statement
+ # 2. Albeit the order in custom policy is not required by CloudFront,
+ # we still use OrderedDict internally to ensure the result is stable
+ # and also matches canned policy requirement.
+ # SEE: http://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/private-content-creating-signed-url-custom-policy.html
+ moment = int(datetime2timestamp(date_less_than))
+ condition = OrderedDict({"DateLessThan": {"AWS:EpochTime": moment}})
+ if ip_address:
+ if '/' not in ip_address:
+ ip_address += '/32'
+ condition["IpAddress"] = {"AWS:SourceIp": ip_address}
+ if date_greater_than:
+ moment = int(datetime2timestamp(date_greater_than))
+ condition["DateGreaterThan"] = {"AWS:EpochTime": moment}
+ ordered_payload = [('Resource', resource), ('Condition', condition)]
+ custom_policy = {"Statement": [OrderedDict(ordered_payload)]}
+ return json.dumps(custom_policy, separators=(',', ':'))
+
+ def _url_b64encode(self, data):
+ # Required by CloudFront. See also:
+ # http://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/private-content-linux-openssl.html
+ return base64.b64encode(
+ data).replace(b'+', b'-').replace(b'=', b'_').replace(b'/', b'~')
+
+
+def add_generate_db_auth_token(class_attributes, **kwargs):
+ class_attributes['generate_db_auth_token'] = generate_db_auth_token
+
+
+def generate_db_auth_token(self, DBHostname, Port, DBUsername, Region=None):
+ """Generates an auth token used to connect to a db with IAM credentials.
+
+ :type DBHostname: str
+ :param DBHostname: The hostname of the database to connect to.
+
+ :type Port: int
+ :param Port: The port number the database is listening on.
+
+ :type DBUsername: str
+ :param DBUsername: The username to log in as.
+
+ :type Region: str
+ :param Region: The region the database is in. If None, the client
+ region will be used.
+
+ :return: A presigned url which can be used as an auth token.
+ """
+ region = Region
+ if region is None:
+ region = self.meta.region_name
+
+ params = {
+ 'Action': 'connect',
+ 'DBUser': DBUsername,
+ }
+
+ request_dict = {
+ 'url_path': '/',
+ 'query_string': '',
+ 'headers': {},
+ 'body': params,
+ 'method': 'GET'
+ }
+
+ # RDS requires that the scheme not be set when sent over. This can cause
+ # issues when signing because the Python url parsing libraries follow
+ # RFC 1808 closely, which states that a netloc must be introduced by `//`.
+ # Otherwise the url is presumed to be relative, and thus the whole
+ # netloc would be treated as a path component. To work around this we
+ # introduce https here and remove it once we're done processing it.
+ scheme = 'https://'
+ endpoint_url = '%s%s:%s' % (scheme, DBHostname, Port)
+ prepare_request_dict(request_dict, endpoint_url)
+ presigned_url = self._request_signer.generate_presigned_url(
+ operation_name='connect', request_dict=request_dict,
+ region_name=region, expires_in=900, signing_name='rds-db'
+ )
+ return presigned_url[len(scheme):]
+
+
+class S3PostPresigner(object):
+ def __init__(self, request_signer):
+ self._request_signer = request_signer
+
+ def generate_presigned_post(self, request_dict, fields=None,
+ conditions=None, expires_in=3600,
+ region_name=None):
+ """Generates the url and the form fields used for a presigned s3 post
+
+ :type request_dict: dict
+ :param request_dict: The prepared request dictionary returned by
+ ``botocore.awsrequest.prepare_request_dict()``
+
+ :type fields: dict
+ :param fields: A dictionary of prefilled form fields to build on top
+ of.
+
+ :type conditions: list
+ :param conditions: A list of conditions to include in the policy. Each
+ element can be either a list or a structure. For example:
+ [
+ {"acl": "public-read"},
+ {"bucket": "mybucket"},
+ ["starts-with", "$key", "mykey"]
+ ]
+
+ :type expires_in: int
+ :param expires_in: The number of seconds the presigned post is valid
+ for.
+
+ :type region_name: string
+ :param region_name: The region name to sign the presigned post to.
+
+ :rtype: dict
+ :returns: A dictionary with two elements: ``url`` and ``fields``.
+ Url is the url to post to. Fields is a dictionary filled with
+ the form fields and respective values to use when submitting the
+ post. For example:
+
+ {'url': 'https://mybucket.s3.amazonaws.com
+ 'fields': {'acl': 'public-read',
+ 'key': 'mykey',
+ 'signature': 'mysignature',
+ 'policy': 'mybase64 encoded policy'}
+ }
+ """
+ if fields is None:
+ fields = {}
+
+ if conditions is None:
+ conditions = []
+
+ # Create the policy for the post.
+ policy = {}
+
+ # Create an expiration date for the policy
+ datetime_now = datetime.datetime.utcnow()
+ expire_date = datetime_now + datetime.timedelta(seconds=expires_in)
+ policy['expiration'] = expire_date.strftime(botocore.auth.ISO8601)
+
+ # Append all of the conditions that the user supplied.
+ policy['conditions'] = []
+ for condition in conditions:
+ policy['conditions'].append(condition)
+
+ # Store the policy and the fields in the request for signing
+ request = create_request_object(request_dict)
+ request.context['s3-presign-post-fields'] = fields
+ request.context['s3-presign-post-policy'] = policy
+
+ self._request_signer.sign(
+ 'PutObject', request, region_name, 'presign-post')
+ # Return the url and the fields for th form to post.
+ return {'url': request.url, 'fields': fields}
+
+
+def add_generate_presigned_url(class_attributes, **kwargs):
+ class_attributes['generate_presigned_url'] = generate_presigned_url
+
+
+def generate_presigned_url(self, ClientMethod, Params=None, ExpiresIn=3600,
+ HttpMethod=None):
+ """Generate a presigned url given a client, its method, and arguments
+
+ :type ClientMethod: string
+ :param ClientMethod: The client method to presign for
+
+ :type Params: dict
+ :param Params: The parameters normally passed to
+ ``ClientMethod``.
+
+ :type ExpiresIn: int
+ :param ExpiresIn: The number of seconds the presigned url is valid
+ for. By default it expires in an hour (3600 seconds)
+
+ :type HttpMethod: string
+ :param HttpMethod: The http method to use on the generated url. By
+ default, the http method is whatever is used in the method's model.
+
+ :returns: The presigned url
+ """
+ client_method = ClientMethod
+ params = Params
if params is None:
params = {}
- expires_in = ExpiresIn
- http_method = HttpMethod
+ expires_in = ExpiresIn
+ http_method = HttpMethod
context = {
'is_presign_request': True,
'use_global_endpoint': _should_use_global_endpoint(self),
}
-
- request_signer = self._request_signer
- serializer = self._serializer
-
- try:
- operation_name = self._PY_TO_OP_NAME[client_method]
- except KeyError:
- raise UnknownClientMethodError(method_name=client_method)
-
- operation_model = self.meta.service_model.operation_model(
- operation_name)
-
+
+ request_signer = self._request_signer
+ serializer = self._serializer
+
+ try:
+ operation_name = self._PY_TO_OP_NAME[client_method]
+ except KeyError:
+ raise UnknownClientMethodError(method_name=client_method)
+
+ operation_model = self.meta.service_model.operation_model(
+ operation_name)
+
params = self._emit_api_params(params, operation_model, context)
- # Create a request dict based on the params to serialize.
- request_dict = serializer.serialize_to_request(
- params, operation_model)
-
- # Switch out the http method if user specified it.
- if http_method is not None:
- request_dict['method'] = http_method
-
- # Prepare the request dict by including the client's endpoint url.
- prepare_request_dict(
+ # Create a request dict based on the params to serialize.
+ request_dict = serializer.serialize_to_request(
+ params, operation_model)
+
+ # Switch out the http method if user specified it.
+ if http_method is not None:
+ request_dict['method'] = http_method
+
+ # Prepare the request dict by including the client's endpoint url.
+ prepare_request_dict(
request_dict, endpoint_url=self.meta.endpoint_url, context=context)
-
- # Generate the presigned url.
- return request_signer.generate_presigned_url(
- request_dict=request_dict, expires_in=expires_in,
- operation_name=operation_name)
-
-
-def add_generate_presigned_post(class_attributes, **kwargs):
- class_attributes['generate_presigned_post'] = generate_presigned_post
-
-
-def generate_presigned_post(self, Bucket, Key, Fields=None, Conditions=None,
- ExpiresIn=3600):
- """Builds the url and the form fields used for a presigned s3 post
-
- :type Bucket: string
- :param Bucket: The name of the bucket to presign the post to. Note that
- bucket related conditions should not be included in the
- ``conditions`` parameter.
-
- :type Key: string
- :param Key: Key name, optionally add ${filename} to the end to
- attach the submitted filename. Note that key related conditions and
- fields are filled out for you and should not be included in the
- ``Fields`` or ``Conditions`` parameter.
-
- :type Fields: dict
- :param Fields: A dictionary of prefilled form fields to build on top
- of. Elements that may be included are acl, Cache-Control,
- Content-Type, Content-Disposition, Content-Encoding, Expires,
- success_action_redirect, redirect, success_action_status,
- and x-amz-meta-.
-
- Note that if a particular element is included in the fields
- dictionary it will not be automatically added to the conditions
- list. You must specify a condition for the element as well.
-
- :type Conditions: list
- :param Conditions: A list of conditions to include in the policy. Each
- element can be either a list or a structure. For example:
-
- [
- {"acl": "public-read"},
- ["content-length-range", 2, 5],
- ["starts-with", "$success_action_redirect", ""]
- ]
-
- Conditions that are included may pertain to acl,
- content-length-range, Cache-Control, Content-Type,
- Content-Disposition, Content-Encoding, Expires,
- success_action_redirect, redirect, success_action_status,
- and/or x-amz-meta-.
-
- Note that if you include a condition, you must specify
- the a valid value in the fields dictionary as well. A value will
- not be added automatically to the fields dictionary based on the
- conditions.
-
- :type ExpiresIn: int
- :param ExpiresIn: The number of seconds the presigned post
- is valid for.
-
- :rtype: dict
- :returns: A dictionary with two elements: ``url`` and ``fields``.
- Url is the url to post to. Fields is a dictionary filled with
- the form fields and respective values to use when submitting the
- post. For example:
-
- {'url': 'https://mybucket.s3.amazonaws.com
- 'fields': {'acl': 'public-read',
- 'key': 'mykey',
- 'signature': 'mysignature',
- 'policy': 'mybase64 encoded policy'}
- }
- """
- bucket = Bucket
- key = Key
- fields = Fields
- conditions = Conditions
- expires_in = ExpiresIn
-
- if fields is None:
- fields = {}
+
+ # Generate the presigned url.
+ return request_signer.generate_presigned_url(
+ request_dict=request_dict, expires_in=expires_in,
+ operation_name=operation_name)
+
+
+def add_generate_presigned_post(class_attributes, **kwargs):
+ class_attributes['generate_presigned_post'] = generate_presigned_post
+
+
+def generate_presigned_post(self, Bucket, Key, Fields=None, Conditions=None,
+ ExpiresIn=3600):
+ """Builds the url and the form fields used for a presigned s3 post
+
+ :type Bucket: string
+ :param Bucket: The name of the bucket to presign the post to. Note that
+ bucket related conditions should not be included in the
+ ``conditions`` parameter.
+
+ :type Key: string
+ :param Key: Key name, optionally add ${filename} to the end to
+ attach the submitted filename. Note that key related conditions and
+ fields are filled out for you and should not be included in the
+ ``Fields`` or ``Conditions`` parameter.
+
+ :type Fields: dict
+ :param Fields: A dictionary of prefilled form fields to build on top
+ of. Elements that may be included are acl, Cache-Control,
+ Content-Type, Content-Disposition, Content-Encoding, Expires,
+ success_action_redirect, redirect, success_action_status,
+ and x-amz-meta-.
+
+ Note that if a particular element is included in the fields
+ dictionary it will not be automatically added to the conditions
+ list. You must specify a condition for the element as well.
+
+ :type Conditions: list
+ :param Conditions: A list of conditions to include in the policy. Each
+ element can be either a list or a structure. For example:
+
+ [
+ {"acl": "public-read"},
+ ["content-length-range", 2, 5],
+ ["starts-with", "$success_action_redirect", ""]
+ ]
+
+ Conditions that are included may pertain to acl,
+ content-length-range, Cache-Control, Content-Type,
+ Content-Disposition, Content-Encoding, Expires,
+ success_action_redirect, redirect, success_action_status,
+ and/or x-amz-meta-.
+
+ Note that if you include a condition, you must specify
+ the a valid value in the fields dictionary as well. A value will
+ not be added automatically to the fields dictionary based on the
+ conditions.
+
+ :type ExpiresIn: int
+ :param ExpiresIn: The number of seconds the presigned post
+ is valid for.
+
+ :rtype: dict
+ :returns: A dictionary with two elements: ``url`` and ``fields``.
+ Url is the url to post to. Fields is a dictionary filled with
+ the form fields and respective values to use when submitting the
+ post. For example:
+
+ {'url': 'https://mybucket.s3.amazonaws.com
+ 'fields': {'acl': 'public-read',
+ 'key': 'mykey',
+ 'signature': 'mysignature',
+ 'policy': 'mybase64 encoded policy'}
+ }
+ """
+ bucket = Bucket
+ key = Key
+ fields = Fields
+ conditions = Conditions
+ expires_in = ExpiresIn
+
+ if fields is None:
+ fields = {}
else:
fields = fields.copy()
-
- if conditions is None:
- conditions = []
-
- post_presigner = S3PostPresigner(self._request_signer)
- serializer = self._serializer
-
- # We choose the CreateBucket operation model because its url gets
- # serialized to what a presign post requires.
- operation_model = self.meta.service_model.operation_model(
- 'CreateBucket')
-
- # Create a request dict based on the params to serialize.
- request_dict = serializer.serialize_to_request(
- {'Bucket': bucket}, operation_model)
-
- # Prepare the request dict by including the client's endpoint url.
- prepare_request_dict(
+
+ if conditions is None:
+ conditions = []
+
+ post_presigner = S3PostPresigner(self._request_signer)
+ serializer = self._serializer
+
+ # We choose the CreateBucket operation model because its url gets
+ # serialized to what a presign post requires.
+ operation_model = self.meta.service_model.operation_model(
+ 'CreateBucket')
+
+ # Create a request dict based on the params to serialize.
+ request_dict = serializer.serialize_to_request(
+ {'Bucket': bucket}, operation_model)
+
+ # Prepare the request dict by including the client's endpoint url.
+ prepare_request_dict(
request_dict, endpoint_url=self.meta.endpoint_url,
context={
'is_presign_request': True,
'use_global_endpoint': _should_use_global_endpoint(self),
},
)
-
- # Append that the bucket name to the list of conditions.
- conditions.append({'bucket': bucket})
-
- # If the key ends with filename, the only constraint that can be
- # imposed is if it starts with the specified prefix.
- if key.endswith('${filename}'):
- conditions.append(["starts-with", '$key', key[:-len('${filename}')]])
- else:
- conditions.append({'key': key})
-
- # Add the key to the fields.
- fields['key'] = key
-
- return post_presigner.generate_presigned_post(
- request_dict=request_dict, fields=fields, conditions=conditions,
- expires_in=expires_in)
+
+ # Append that the bucket name to the list of conditions.
+ conditions.append({'bucket': bucket})
+
+ # If the key ends with filename, the only constraint that can be
+ # imposed is if it starts with the specified prefix.
+ if key.endswith('${filename}'):
+ conditions.append(["starts-with", '$key', key[:-len('${filename}')]])
+ else:
+ conditions.append({'key': key})
+
+ # Add the key to the fields.
+ fields['key'] = key
+
+ return post_presigner.generate_presigned_post(
+ request_dict=request_dict, fields=fields, conditions=conditions,
+ expires_in=expires_in)
def _should_use_global_endpoint(client):
diff --git a/contrib/python/botocore/botocore/stub.py b/contrib/python/botocore/botocore/stub.py
index b84cc6a2cf..ca7536bb0f 100644
--- a/contrib/python/botocore/botocore/stub.py
+++ b/contrib/python/botocore/botocore/stub.py
@@ -1,370 +1,370 @@
-# Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-import copy
-from collections import deque
-from pprint import pformat
-
-from botocore.validate import validate_parameters
-from botocore.exceptions import ParamValidationError, \
+# Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+import copy
+from collections import deque
+from pprint import pformat
+
+from botocore.validate import validate_parameters
+from botocore.exceptions import ParamValidationError, \
StubResponseError, StubAssertionError, UnStubbedResponseError
from botocore.awsrequest import AWSResponse
-
-
-class _ANY(object):
- """
- A helper object that compares equal to everything. Copied from
- unittest.mock
- """
-
- def __eq__(self, other):
- return True
-
- def __ne__(self, other):
- return False
-
- def __repr__(self):
- return '<ANY>'
-
-ANY = _ANY()
-
-
-class Stubber(object):
- """
- This class will allow you to stub out requests so you don't have to hit
- an endpoint to write tests. Responses are returned first in, first out.
- If operations are called out of order, or are called with no remaining
- queued responses, an error will be raised.
-
- **Example:**
- ::
- import datetime
- import botocore.session
- from botocore.stub import Stubber
-
-
- s3 = botocore.session.get_session().create_client('s3')
- stubber = Stubber(s3)
-
- response = {
- 'IsTruncated': False,
- 'Name': 'test-bucket',
- 'MaxKeys': 1000, 'Prefix': '',
- 'Contents': [{
- 'Key': 'test.txt',
- 'ETag': '"abc123"',
- 'StorageClass': 'STANDARD',
- 'LastModified': datetime.datetime(2016, 1, 20, 22, 9),
- 'Owner': {'ID': 'abc123', 'DisplayName': 'myname'},
- 'Size': 14814
- }],
- 'EncodingType': 'url',
- 'ResponseMetadata': {
- 'RequestId': 'abc123',
- 'HTTPStatusCode': 200,
- 'HostId': 'abc123'
- },
- 'Marker': ''
- }
-
- expected_params = {'Bucket': 'test-bucket'}
-
- stubber.add_response('list_objects', response, expected_params)
- stubber.activate()
-
- service_response = s3.list_objects(Bucket='test-bucket')
- assert service_response == response
-
-
- This class can also be called as a context manager, which will handle
- activation / deactivation for you.
-
- **Example:**
- ::
- import datetime
- import botocore.session
- from botocore.stub import Stubber
-
-
- s3 = botocore.session.get_session().create_client('s3')
-
- response = {
- "Owner": {
- "ID": "foo",
- "DisplayName": "bar"
- },
- "Buckets": [{
- "CreationDate": datetime.datetime(2016, 1, 20, 22, 9),
- "Name": "baz"
- }]
- }
-
-
- with Stubber(s3) as stubber:
- stubber.add_response('list_buckets', response, {})
- service_response = s3.list_buckets()
-
- assert service_response == response
-
-
+
+
+class _ANY(object):
+ """
+ A helper object that compares equal to everything. Copied from
+ unittest.mock
+ """
+
+ def __eq__(self, other):
+ return True
+
+ def __ne__(self, other):
+ return False
+
+ def __repr__(self):
+ return '<ANY>'
+
+ANY = _ANY()
+
+
+class Stubber(object):
+ """
+ This class will allow you to stub out requests so you don't have to hit
+ an endpoint to write tests. Responses are returned first in, first out.
+ If operations are called out of order, or are called with no remaining
+ queued responses, an error will be raised.
+
+ **Example:**
+ ::
+ import datetime
+ import botocore.session
+ from botocore.stub import Stubber
+
+
+ s3 = botocore.session.get_session().create_client('s3')
+ stubber = Stubber(s3)
+
+ response = {
+ 'IsTruncated': False,
+ 'Name': 'test-bucket',
+ 'MaxKeys': 1000, 'Prefix': '',
+ 'Contents': [{
+ 'Key': 'test.txt',
+ 'ETag': '"abc123"',
+ 'StorageClass': 'STANDARD',
+ 'LastModified': datetime.datetime(2016, 1, 20, 22, 9),
+ 'Owner': {'ID': 'abc123', 'DisplayName': 'myname'},
+ 'Size': 14814
+ }],
+ 'EncodingType': 'url',
+ 'ResponseMetadata': {
+ 'RequestId': 'abc123',
+ 'HTTPStatusCode': 200,
+ 'HostId': 'abc123'
+ },
+ 'Marker': ''
+ }
+
+ expected_params = {'Bucket': 'test-bucket'}
+
+ stubber.add_response('list_objects', response, expected_params)
+ stubber.activate()
+
+ service_response = s3.list_objects(Bucket='test-bucket')
+ assert service_response == response
+
+
+ This class can also be called as a context manager, which will handle
+ activation / deactivation for you.
+
+ **Example:**
+ ::
+ import datetime
+ import botocore.session
+ from botocore.stub import Stubber
+
+
+ s3 = botocore.session.get_session().create_client('s3')
+
+ response = {
+ "Owner": {
+ "ID": "foo",
+ "DisplayName": "bar"
+ },
+ "Buckets": [{
+ "CreationDate": datetime.datetime(2016, 1, 20, 22, 9),
+ "Name": "baz"
+ }]
+ }
+
+
+ with Stubber(s3) as stubber:
+ stubber.add_response('list_buckets', response, {})
+ service_response = s3.list_buckets()
+
+ assert service_response == response
+
+
If you have an input parameter that is a randomly generated value, or you
otherwise don't care about its value, you can use ``stub.ANY`` to ignore
it in validation.
-
- **Example:**
- ::
- import datetime
- import botocore.session
- from botocore.stub import Stubber, ANY
-
-
- s3 = botocore.session.get_session().create_client('s3')
- stubber = Stubber(s3)
-
- response = {
- 'IsTruncated': False,
- 'Name': 'test-bucket',
- 'MaxKeys': 1000, 'Prefix': '',
- 'Contents': [{
- 'Key': 'test.txt',
- 'ETag': '"abc123"',
- 'StorageClass': 'STANDARD',
- 'LastModified': datetime.datetime(2016, 1, 20, 22, 9),
- 'Owner': {'ID': 'abc123', 'DisplayName': 'myname'},
- 'Size': 14814
- }],
- 'EncodingType': 'url',
- 'ResponseMetadata': {
- 'RequestId': 'abc123',
- 'HTTPStatusCode': 200,
- 'HostId': 'abc123'
- },
- 'Marker': ''
- }
-
- expected_params = {'Bucket': ANY}
- stubber.add_response('list_objects', response, expected_params)
-
- with stubber:
- service_response = s3.list_objects(Bucket='test-bucket')
-
- assert service_response == response
- """
- def __init__(self, client):
- """
- :param client: The client to add your stubs to.
- """
- self.client = client
- self._event_id = 'boto_stubber'
- self._expected_params_event_id = 'boto_stubber_expected_params'
- self._queue = deque()
-
- def __enter__(self):
- self.activate()
- return self
-
- def __exit__(self, exception_type, exception_value, traceback):
- self.deactivate()
-
- def activate(self):
- """
- Activates the stubber on the client
- """
- self.client.meta.events.register_first(
- 'before-parameter-build.*.*',
- self._assert_expected_params,
- unique_id=self._expected_params_event_id)
- self.client.meta.events.register(
- 'before-call.*.*',
- self._get_response_handler,
- unique_id=self._event_id)
-
- def deactivate(self):
- """
- Deactivates the stubber on the client
- """
- self.client.meta.events.unregister(
- 'before-parameter-build.*.*',
- self._assert_expected_params,
- unique_id=self._expected_params_event_id)
- self.client.meta.events.unregister(
- 'before-call.*.*',
- self._get_response_handler,
- unique_id=self._event_id)
-
- def add_response(self, method, service_response, expected_params=None):
- """
- Adds a service response to the response queue. This will be validated
- against the service model to ensure correctness. It should be noted,
- however, that while missing attributes are often considered correct,
- your code may not function properly if you leave them out. Therefore
- you should always fill in every value you see in a typical response for
- your particular request.
-
- :param method: The name of the client method to stub.
- :type method: str
-
- :param service_response: A dict response stub. Provided parameters will
- be validated against the service model.
- :type service_response: dict
-
- :param expected_params: A dictionary of the expected parameters to
- be called for the provided service response. The parameters match
- the names of keyword arguments passed to that client call. If
- any of the parameters differ a ``StubResponseError`` is thrown.
- You can use stub.ANY to indicate a particular parameter to ignore
- in validation. stub.ANY is only valid for top level params.
- """
- self._add_response(method, service_response, expected_params)
-
- def _add_response(self, method, service_response, expected_params):
- if not hasattr(self.client, method):
- raise ValueError(
- "Client %s does not have method: %s"
- % (self.client.meta.service_model.service_name, method))
-
- # Create a successful http response
+
+ **Example:**
+ ::
+ import datetime
+ import botocore.session
+ from botocore.stub import Stubber, ANY
+
+
+ s3 = botocore.session.get_session().create_client('s3')
+ stubber = Stubber(s3)
+
+ response = {
+ 'IsTruncated': False,
+ 'Name': 'test-bucket',
+ 'MaxKeys': 1000, 'Prefix': '',
+ 'Contents': [{
+ 'Key': 'test.txt',
+ 'ETag': '"abc123"',
+ 'StorageClass': 'STANDARD',
+ 'LastModified': datetime.datetime(2016, 1, 20, 22, 9),
+ 'Owner': {'ID': 'abc123', 'DisplayName': 'myname'},
+ 'Size': 14814
+ }],
+ 'EncodingType': 'url',
+ 'ResponseMetadata': {
+ 'RequestId': 'abc123',
+ 'HTTPStatusCode': 200,
+ 'HostId': 'abc123'
+ },
+ 'Marker': ''
+ }
+
+ expected_params = {'Bucket': ANY}
+ stubber.add_response('list_objects', response, expected_params)
+
+ with stubber:
+ service_response = s3.list_objects(Bucket='test-bucket')
+
+ assert service_response == response
+ """
+ def __init__(self, client):
+ """
+ :param client: The client to add your stubs to.
+ """
+ self.client = client
+ self._event_id = 'boto_stubber'
+ self._expected_params_event_id = 'boto_stubber_expected_params'
+ self._queue = deque()
+
+ def __enter__(self):
+ self.activate()
+ return self
+
+ def __exit__(self, exception_type, exception_value, traceback):
+ self.deactivate()
+
+ def activate(self):
+ """
+ Activates the stubber on the client
+ """
+ self.client.meta.events.register_first(
+ 'before-parameter-build.*.*',
+ self._assert_expected_params,
+ unique_id=self._expected_params_event_id)
+ self.client.meta.events.register(
+ 'before-call.*.*',
+ self._get_response_handler,
+ unique_id=self._event_id)
+
+ def deactivate(self):
+ """
+ Deactivates the stubber on the client
+ """
+ self.client.meta.events.unregister(
+ 'before-parameter-build.*.*',
+ self._assert_expected_params,
+ unique_id=self._expected_params_event_id)
+ self.client.meta.events.unregister(
+ 'before-call.*.*',
+ self._get_response_handler,
+ unique_id=self._event_id)
+
+ def add_response(self, method, service_response, expected_params=None):
+ """
+ Adds a service response to the response queue. This will be validated
+ against the service model to ensure correctness. It should be noted,
+ however, that while missing attributes are often considered correct,
+ your code may not function properly if you leave them out. Therefore
+ you should always fill in every value you see in a typical response for
+ your particular request.
+
+ :param method: The name of the client method to stub.
+ :type method: str
+
+ :param service_response: A dict response stub. Provided parameters will
+ be validated against the service model.
+ :type service_response: dict
+
+ :param expected_params: A dictionary of the expected parameters to
+ be called for the provided service response. The parameters match
+ the names of keyword arguments passed to that client call. If
+ any of the parameters differ a ``StubResponseError`` is thrown.
+ You can use stub.ANY to indicate a particular parameter to ignore
+ in validation. stub.ANY is only valid for top level params.
+ """
+ self._add_response(method, service_response, expected_params)
+
+ def _add_response(self, method, service_response, expected_params):
+ if not hasattr(self.client, method):
+ raise ValueError(
+ "Client %s does not have method: %s"
+ % (self.client.meta.service_model.service_name, method))
+
+ # Create a successful http response
http_response = AWSResponse(None, 200, {}, None)
-
- operation_name = self.client.meta.method_to_api_mapping.get(method)
- self._validate_response(operation_name, service_response)
-
- # Add the service_response to the queue for returning responses
- response = {
- 'operation_name': operation_name,
- 'response': (http_response, service_response),
- 'expected_params': expected_params
- }
- self._queue.append(response)
-
- def add_client_error(self, method, service_error_code='',
- service_message='', http_status_code=400,
+
+ operation_name = self.client.meta.method_to_api_mapping.get(method)
+ self._validate_response(operation_name, service_response)
+
+ # Add the service_response to the queue for returning responses
+ response = {
+ 'operation_name': operation_name,
+ 'response': (http_response, service_response),
+ 'expected_params': expected_params
+ }
+ self._queue.append(response)
+
+ def add_client_error(self, method, service_error_code='',
+ service_message='', http_status_code=400,
service_error_meta=None, expected_params=None,
response_meta=None):
- """
- Adds a ``ClientError`` to the response queue.
-
- :param method: The name of the service method to return the error on.
- :type method: str
-
- :param service_error_code: The service error code to return,
- e.g. ``NoSuchBucket``
- :type service_error_code: str
-
- :param service_message: The service message to return, e.g.
- 'The specified bucket does not exist.'
- :type service_message: str
-
- :param http_status_code: The HTTP status code to return, e.g. 404, etc
- :type http_status_code: int
-
- :param service_error_meta: Additional keys to be added to the
- service Error
- :type service_error_meta: dict
-
- :param expected_params: A dictionary of the expected parameters to
- be called for the provided service response. The parameters match
- the names of keyword arguments passed to that client call. If
- any of the parameters differ a ``StubResponseError`` is thrown.
- You can use stub.ANY to indicate a particular parameter to ignore
- in validation.
+ """
+ Adds a ``ClientError`` to the response queue.
+
+ :param method: The name of the service method to return the error on.
+ :type method: str
+
+ :param service_error_code: The service error code to return,
+ e.g. ``NoSuchBucket``
+ :type service_error_code: str
+
+ :param service_message: The service message to return, e.g.
+ 'The specified bucket does not exist.'
+ :type service_message: str
+
+ :param http_status_code: The HTTP status code to return, e.g. 404, etc
+ :type http_status_code: int
+
+ :param service_error_meta: Additional keys to be added to the
+ service Error
+ :type service_error_meta: dict
+
+ :param expected_params: A dictionary of the expected parameters to
+ be called for the provided service response. The parameters match
+ the names of keyword arguments passed to that client call. If
+ any of the parameters differ a ``StubResponseError`` is thrown.
+ You can use stub.ANY to indicate a particular parameter to ignore
+ in validation.
:param response_meta: Additional keys to be added to the
response's ResponseMetadata
:type response_meta: dict
- """
+ """
http_response = AWSResponse(None, http_status_code, {}, None)
-
- # We don't look to the model to build this because the caller would
- # need to know the details of what the HTTP body would need to
- # look like.
- parsed_response = {
- 'ResponseMetadata': {'HTTPStatusCode': http_status_code},
- 'Error': {
- 'Message': service_message,
- 'Code': service_error_code
- }
- }
-
- if service_error_meta is not None:
- parsed_response['Error'].update(service_error_meta)
-
+
+ # We don't look to the model to build this because the caller would
+ # need to know the details of what the HTTP body would need to
+ # look like.
+ parsed_response = {
+ 'ResponseMetadata': {'HTTPStatusCode': http_status_code},
+ 'Error': {
+ 'Message': service_message,
+ 'Code': service_error_code
+ }
+ }
+
+ if service_error_meta is not None:
+ parsed_response['Error'].update(service_error_meta)
+
if response_meta is not None:
parsed_response['ResponseMetadata'].update(response_meta)
- operation_name = self.client.meta.method_to_api_mapping.get(method)
- # Note that we do not allow for expected_params while
- # adding errors into the queue yet.
- response = {
- 'operation_name': operation_name,
- 'response': (http_response, parsed_response),
- 'expected_params': expected_params,
- }
- self._queue.append(response)
-
- def assert_no_pending_responses(self):
- """
- Asserts that all expected calls were made.
- """
- remaining = len(self._queue)
- if remaining != 0:
- raise AssertionError(
- "%d responses remaining in queue." % remaining)
-
- def _assert_expected_call_order(self, model, params):
- if not self._queue:
+ operation_name = self.client.meta.method_to_api_mapping.get(method)
+ # Note that we do not allow for expected_params while
+ # adding errors into the queue yet.
+ response = {
+ 'operation_name': operation_name,
+ 'response': (http_response, parsed_response),
+ 'expected_params': expected_params,
+ }
+ self._queue.append(response)
+
+ def assert_no_pending_responses(self):
+ """
+ Asserts that all expected calls were made.
+ """
+ remaining = len(self._queue)
+ if remaining != 0:
+ raise AssertionError(
+ "%d responses remaining in queue." % remaining)
+
+ def _assert_expected_call_order(self, model, params):
+ if not self._queue:
raise UnStubbedResponseError(
- operation_name=model.name,
+ operation_name=model.name,
reason=(
'Unexpected API Call: A call was made but no additional calls expected. '
'Either the API Call was not stubbed or it was called multiple times.'
)
)
-
- name = self._queue[0]['operation_name']
- if name != model.name:
- raise StubResponseError(
- operation_name=model.name,
- reason='Operation mismatch: found response for %s.' % name)
-
+
+ name = self._queue[0]['operation_name']
+ if name != model.name:
+ raise StubResponseError(
+ operation_name=model.name,
+ reason='Operation mismatch: found response for %s.' % name)
+
def _get_response_handler(self, model, params, context, **kwargs):
- self._assert_expected_call_order(model, params)
- # Pop off the entire response once everything has been validated
- return self._queue.popleft()['response']
-
+ self._assert_expected_call_order(model, params)
+ # Pop off the entire response once everything has been validated
+ return self._queue.popleft()['response']
+
def _assert_expected_params(self, model, params, context, **kwargs):
if self._should_not_stub(context):
return
- self._assert_expected_call_order(model, params)
- expected_params = self._queue[0]['expected_params']
- if expected_params is None:
- return
-
- # Validate the parameters are equal
- for param, value in expected_params.items():
- if param not in params or expected_params[param] != params[param]:
- raise StubAssertionError(
- operation_name=model.name,
- reason='Expected parameters:\n%s,\nbut received:\n%s' % (
- pformat(expected_params), pformat(params)))
-
- # Ensure there are no extra params hanging around
- if sorted(expected_params.keys()) != sorted(params.keys()):
- raise StubAssertionError(
- operation_name=model.name,
- reason='Expected parameters:\n%s,\nbut received:\n%s' % (
- pformat(expected_params), pformat(params)))
-
+ self._assert_expected_call_order(model, params)
+ expected_params = self._queue[0]['expected_params']
+ if expected_params is None:
+ return
+
+ # Validate the parameters are equal
+ for param, value in expected_params.items():
+ if param not in params or expected_params[param] != params[param]:
+ raise StubAssertionError(
+ operation_name=model.name,
+ reason='Expected parameters:\n%s,\nbut received:\n%s' % (
+ pformat(expected_params), pformat(params)))
+
+ # Ensure there are no extra params hanging around
+ if sorted(expected_params.keys()) != sorted(params.keys()):
+ raise StubAssertionError(
+ operation_name=model.name,
+ reason='Expected parameters:\n%s,\nbut received:\n%s' % (
+ pformat(expected_params), pformat(params)))
+
def _should_not_stub(self, context):
# Do not include presign requests when processing stubbed client calls
# as a presign request will never have an HTTP request sent over the
@@ -372,23 +372,23 @@ class Stubber(object):
if context and context.get('is_presign_request'):
return True
- def _validate_response(self, operation_name, service_response):
- service_model = self.client.meta.service_model
- operation_model = service_model.operation_model(operation_name)
- output_shape = operation_model.output_shape
-
- # Remove ResponseMetadata so that the validator doesn't attempt to
- # perform validation on it.
- response = service_response
- if 'ResponseMetadata' in response:
- response = copy.copy(service_response)
- del response['ResponseMetadata']
-
- if output_shape is not None:
- validate_parameters(response, output_shape)
- elif response:
- # If the output shape is None, that means the response should be
- # empty apart from ResponseMetadata
- raise ParamValidationError(
- report=(
- "Service response should only contain ResponseMetadata."))
+ def _validate_response(self, operation_name, service_response):
+ service_model = self.client.meta.service_model
+ operation_model = service_model.operation_model(operation_name)
+ output_shape = operation_model.output_shape
+
+ # Remove ResponseMetadata so that the validator doesn't attempt to
+ # perform validation on it.
+ response = service_response
+ if 'ResponseMetadata' in response:
+ response = copy.copy(service_response)
+ del response['ResponseMetadata']
+
+ if output_shape is not None:
+ validate_parameters(response, output_shape)
+ elif response:
+ # If the output shape is None, that means the response should be
+ # empty apart from ResponseMetadata
+ raise ParamValidationError(
+ report=(
+ "Service response should only contain ResponseMetadata."))
diff --git a/contrib/python/botocore/botocore/translate.py b/contrib/python/botocore/botocore/translate.py
index 11c551bbb4..5a661ab063 100644
--- a/contrib/python/botocore/botocore/translate.py
+++ b/contrib/python/botocore/botocore/translate.py
@@ -1,76 +1,76 @@
-# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/
-# Copyright 2012-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-import copy
-
-from botocore.utils import merge_dicts
-
-
-def build_retry_config(endpoint_prefix, retry_model, definitions,
- client_retry_config=None):
- service_config = retry_model.get(endpoint_prefix, {})
- resolve_references(service_config, definitions)
- # We want to merge the global defaults with the service specific
- # defaults, with the service specific defaults taking precedence.
- # So we use the global defaults as the base.
- #
- # A deepcopy is done on the retry defaults because it ensures the
- # retry model has no chance of getting mutated when the service specific
- # configuration or client retry config is merged in.
- final_retry_config = {
- '__default__': copy.deepcopy(retry_model.get('__default__', {}))
- }
- resolve_references(final_retry_config, definitions)
- # The merge the service specific config on top.
- merge_dicts(final_retry_config, service_config)
- if client_retry_config is not None:
- _merge_client_retry_config(final_retry_config, client_retry_config)
- return final_retry_config
-
-
-def _merge_client_retry_config(retry_config, client_retry_config):
- max_retry_attempts_override = client_retry_config.get('max_attempts')
- if max_retry_attempts_override is not None:
- # In the retry config, the max_attempts refers to the maximum number
- # of requests in general will be made. However, for the client's
- # retry config it refers to how many retry attempts will be made at
- # most. So to translate this number from the client config, one is
- # added to convert it to the maximum number request that will be made
- # by including the initial request.
- #
- # It is also important to note that if we ever support per operation
- # configuration in the retry model via the client, we will need to
- # revisit this logic to make sure max_attempts gets applied
- # per operation.
- retry_config['__default__'][
- 'max_attempts'] = max_retry_attempts_override + 1
-
-
-def resolve_references(config, definitions):
- """Recursively replace $ref keys.
-
- To cut down on duplication, common definitions can be declared
- (and passed in via the ``definitions`` attribute) and then
- references as {"$ref": "name"}, when this happens the reference
- dict is placed with the value from the ``definition`` dict.
-
- This is recursively done.
-
- """
- for key, value in config.items():
- if isinstance(value, dict):
- if len(value) == 1 and list(value.keys())[0] == '$ref':
- # Then we need to resolve this reference.
- config[key] = definitions[list(value.values())[0]]
- else:
- resolve_references(value, definitions)
+# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/
+# Copyright 2012-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+import copy
+
+from botocore.utils import merge_dicts
+
+
+def build_retry_config(endpoint_prefix, retry_model, definitions,
+ client_retry_config=None):
+ service_config = retry_model.get(endpoint_prefix, {})
+ resolve_references(service_config, definitions)
+ # We want to merge the global defaults with the service specific
+ # defaults, with the service specific defaults taking precedence.
+ # So we use the global defaults as the base.
+ #
+ # A deepcopy is done on the retry defaults because it ensures the
+ # retry model has no chance of getting mutated when the service specific
+ # configuration or client retry config is merged in.
+ final_retry_config = {
+ '__default__': copy.deepcopy(retry_model.get('__default__', {}))
+ }
+ resolve_references(final_retry_config, definitions)
+ # The merge the service specific config on top.
+ merge_dicts(final_retry_config, service_config)
+ if client_retry_config is not None:
+ _merge_client_retry_config(final_retry_config, client_retry_config)
+ return final_retry_config
+
+
+def _merge_client_retry_config(retry_config, client_retry_config):
+ max_retry_attempts_override = client_retry_config.get('max_attempts')
+ if max_retry_attempts_override is not None:
+ # In the retry config, the max_attempts refers to the maximum number
+ # of requests in general will be made. However, for the client's
+ # retry config it refers to how many retry attempts will be made at
+ # most. So to translate this number from the client config, one is
+ # added to convert it to the maximum number request that will be made
+ # by including the initial request.
+ #
+ # It is also important to note that if we ever support per operation
+ # configuration in the retry model via the client, we will need to
+ # revisit this logic to make sure max_attempts gets applied
+ # per operation.
+ retry_config['__default__'][
+ 'max_attempts'] = max_retry_attempts_override + 1
+
+
+def resolve_references(config, definitions):
+ """Recursively replace $ref keys.
+
+ To cut down on duplication, common definitions can be declared
+ (and passed in via the ``definitions`` attribute) and then
+ references as {"$ref": "name"}, when this happens the reference
+ dict is placed with the value from the ``definition`` dict.
+
+ This is recursively done.
+
+ """
+ for key, value in config.items():
+ if isinstance(value, dict):
+ if len(value) == 1 and list(value.keys())[0] == '$ref':
+ # Then we need to resolve this reference.
+ config[key] = definitions[list(value.values())[0]]
+ else:
+ resolve_references(value, definitions)
diff --git a/contrib/python/botocore/botocore/utils.py b/contrib/python/botocore/botocore/utils.py
index bda50e6644..d21a7622c1 100644
--- a/contrib/python/botocore/botocore/utils.py
+++ b/contrib/python/botocore/botocore/utils.py
@@ -1,34 +1,34 @@
-# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
+# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
import base64
-import re
-import time
-import logging
-import datetime
-import hashlib
-import binascii
-import functools
-import weakref
-import random
+import re
+import time
+import logging
+import datetime
+import hashlib
+import binascii
+import functools
+import weakref
+import random
import os
import socket
import cgi
import warnings
-
-import dateutil.parser
+
+import dateutil.parser
from dateutil.tz import tzutc
-
-import botocore
+
+import botocore
import botocore.awsrequest
import botocore.httpsession
from botocore.compat import (
@@ -46,21 +46,21 @@ from botocore.exceptions import (
InvalidHostLabelError, HTTPClientError, UnsupportedS3ConfigurationError,
)
from urllib3.exceptions import LocationParseError
-
-logger = logging.getLogger(__name__)
-DEFAULT_METADATA_SERVICE_TIMEOUT = 1
+
+logger = logging.getLogger(__name__)
+DEFAULT_METADATA_SERVICE_TIMEOUT = 1
METADATA_BASE_URL = 'http://169.254.169.254/'
METADATA_BASE_URL_IPv6 = 'http://[fe80:ec2::254%eth0]/'
-# These are chars that do not need to be urlencoded.
-# Based on rfc2986, section 2.3
-SAFE_CHARS = '-._~'
-LABEL_RE = re.compile(r'[a-z0-9][a-z0-9\-]*[a-z0-9]')
+# These are chars that do not need to be urlencoded.
+# Based on rfc2986, section 2.3
+SAFE_CHARS = '-._~'
+LABEL_RE = re.compile(r'[a-z0-9][a-z0-9\-]*[a-z0-9]')
RETRYABLE_HTTP_ERRORS = (
ReadTimeoutError, EndpointConnectionError, ConnectionClosedError,
ConnectTimeoutError,
)
-S3_ACCELERATE_WHITELIST = ['dualstack']
+S3_ACCELERATE_WHITELIST = ['dualstack']
# In switching events from using service name / endpoint prefix to service
# id, we have to preserve compatibility. This maps the instances where either
# is different than the transformed service id.
@@ -139,7 +139,7 @@ EVENT_ALIASES = {
"streams.dynamodb": "dynamodb-streams",
"tagging": "resource-groups-tagging-api"
}
-
+
# Vendoring IPv6 validation regex patterns from urllib3
# https://github.com/urllib3/urllib3/blob/7e856c0/src/urllib3/util/url.py
IPV4_PAT = r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}"
@@ -166,7 +166,7 @@ _variations = [
# [ *6( h16 ":" ) h16 ] "::"
"(?:(?:%(hex)s:){0,6}%(hex)s)?::",
]
-
+
UNRESERVED_PAT = r"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._!\-~"
IPV6_PAT = "(?:" + "|".join([x % _subs for x in _variations]) + ")"
ZONE_ID_PAT = "(?:%25|%)(?:[" + UNRESERVED_PAT + "]|%[a-fA-F0-9]{2})+"
@@ -179,51 +179,51 @@ UNSAFE_URL_CHARS = frozenset('\t\r\n')
def ensure_boolean(val):
"""Ensures a boolean value if a string or boolean is provided
-
+
For strings, the value for True/False is case insensitive
"""
if isinstance(val, bool):
return val
else:
return val.lower() == 'true'
-
-
-def is_json_value_header(shape):
- """Determines if the provided shape is the special header type jsonvalue.
-
- :type shape: botocore.shape
- :param shape: Shape to be inspected for the jsonvalue trait.
-
- :return: True if this type is a jsonvalue, False otherwise
- :rtype: Bool
- """
- return (hasattr(shape, 'serialization') and
- shape.serialization.get('jsonvalue', False) and
- shape.serialization.get('location') == 'header' and
- shape.type_name == 'string')
-
-
-def get_service_module_name(service_model):
- """Returns the module name for a service
-
- This is the value used in both the documentation and client class name
- """
- name = service_model.metadata.get(
- 'serviceAbbreviation',
- service_model.metadata.get(
- 'serviceFullName', service_model.service_name))
- name = name.replace('Amazon', '')
- name = name.replace('AWS', '')
- name = re.sub(r'\W+', '', name)
- return name
-
-
-def normalize_url_path(path):
- if not path:
- return '/'
- return remove_dot_segments(path)
-
-
+
+
+def is_json_value_header(shape):
+ """Determines if the provided shape is the special header type jsonvalue.
+
+ :type shape: botocore.shape
+ :param shape: Shape to be inspected for the jsonvalue trait.
+
+ :return: True if this type is a jsonvalue, False otherwise
+ :rtype: Bool
+ """
+ return (hasattr(shape, 'serialization') and
+ shape.serialization.get('jsonvalue', False) and
+ shape.serialization.get('location') == 'header' and
+ shape.type_name == 'string')
+
+
+def get_service_module_name(service_model):
+ """Returns the module name for a service
+
+ This is the value used in both the documentation and client class name
+ """
+ name = service_model.metadata.get(
+ 'serviceAbbreviation',
+ service_model.metadata.get(
+ 'serviceFullName', service_model.service_name))
+ name = name.replace('Amazon', '')
+ name = name.replace('AWS', '')
+ name = re.sub(r'\W+', '', name)
+ return name
+
+
+def normalize_url_path(path):
+ if not path:
+ return '/'
+ return remove_dot_segments(path)
+
+
def normalize_boolean(val):
"""Returns None if val is None, otherwise ensure value
converted to boolean"""
@@ -233,77 +233,77 @@ def normalize_boolean(val):
return ensure_boolean(val)
-def remove_dot_segments(url):
- # RFC 3986, section 5.2.4 "Remove Dot Segments"
- # Also, AWS services require consecutive slashes to be removed,
- # so that's done here as well
- if not url:
- return ''
- input_url = url.split('/')
- output_list = []
- for x in input_url:
- if x and x != '.':
- if x == '..':
- if output_list:
- output_list.pop()
- else:
- output_list.append(x)
-
- if url[0] == '/':
- first = '/'
- else:
- first = ''
- if url[-1] == '/' and output_list:
- last = '/'
- else:
- last = ''
- return first + '/'.join(output_list) + last
-
-
-def validate_jmespath_for_set(expression):
- # Validates a limited jmespath expression to determine if we can set a
- # value based on it. Only works with dotted paths.
- if not expression or expression == '.':
- raise InvalidExpressionError(expression=expression)
-
- for invalid in ['[', ']', '*']:
- if invalid in expression:
- raise InvalidExpressionError(expression=expression)
-
-
-def set_value_from_jmespath(source, expression, value, is_first=True):
- # This takes a (limited) jmespath-like expression & can set a value based
- # on it.
- # Limitations:
- # * Only handles dotted lookups
- # * No offsets/wildcards/slices/etc.
- if is_first:
- validate_jmespath_for_set(expression)
-
- bits = expression.split('.', 1)
- current_key, remainder = bits[0], bits[1] if len(bits) > 1 else ''
-
- if not current_key:
- raise InvalidExpressionError(expression=expression)
-
- if remainder:
- if current_key not in source:
- # We've got something in the expression that's not present in the
- # source (new key). If there's any more bits, we'll set the key
- # with an empty dictionary.
- source[current_key] = {}
-
- return set_value_from_jmespath(
- source[current_key],
- remainder,
- value,
- is_first=False
- )
-
- # If we're down to a single key, set it.
- source[current_key] = value
-
-
+def remove_dot_segments(url):
+ # RFC 3986, section 5.2.4 "Remove Dot Segments"
+ # Also, AWS services require consecutive slashes to be removed,
+ # so that's done here as well
+ if not url:
+ return ''
+ input_url = url.split('/')
+ output_list = []
+ for x in input_url:
+ if x and x != '.':
+ if x == '..':
+ if output_list:
+ output_list.pop()
+ else:
+ output_list.append(x)
+
+ if url[0] == '/':
+ first = '/'
+ else:
+ first = ''
+ if url[-1] == '/' and output_list:
+ last = '/'
+ else:
+ last = ''
+ return first + '/'.join(output_list) + last
+
+
+def validate_jmespath_for_set(expression):
+ # Validates a limited jmespath expression to determine if we can set a
+ # value based on it. Only works with dotted paths.
+ if not expression or expression == '.':
+ raise InvalidExpressionError(expression=expression)
+
+ for invalid in ['[', ']', '*']:
+ if invalid in expression:
+ raise InvalidExpressionError(expression=expression)
+
+
+def set_value_from_jmespath(source, expression, value, is_first=True):
+ # This takes a (limited) jmespath-like expression & can set a value based
+ # on it.
+ # Limitations:
+ # * Only handles dotted lookups
+ # * No offsets/wildcards/slices/etc.
+ if is_first:
+ validate_jmespath_for_set(expression)
+
+ bits = expression.split('.', 1)
+ current_key, remainder = bits[0], bits[1] if len(bits) > 1 else ''
+
+ if not current_key:
+ raise InvalidExpressionError(expression=expression)
+
+ if remainder:
+ if current_key not in source:
+ # We've got something in the expression that's not present in the
+ # source (new key). If there's any more bits, we'll set the key
+ # with an empty dictionary.
+ source[current_key] = {}
+
+ return set_value_from_jmespath(
+ source[current_key],
+ remainder,
+ value,
+ is_first=False
+ )
+
+ # If we're down to a single key, set it.
+ source[current_key] = value
+
+
class _RetriesExceededError(Exception):
"""Internal exception used when the number of retries are exceeded."""
pass
@@ -320,11 +320,11 @@ class IMDSFetcher(object):
_TOKEN_PATH = 'latest/api/token'
_TOKEN_TTL = '21600'
- def __init__(self, timeout=DEFAULT_METADATA_SERVICE_TIMEOUT,
+ def __init__(self, timeout=DEFAULT_METADATA_SERVICE_TIMEOUT,
num_attempts=1, base_url=METADATA_BASE_URL,
env=None, user_agent=None, config=None):
- self._timeout = timeout
- self._num_attempts = num_attempts
+ self._timeout = timeout
+ self._num_attempts = num_attempts
self._base_url = self._select_base_url(base_url, config)
if env is None:
@@ -336,7 +336,7 @@ class IMDSFetcher(object):
timeout=self._timeout,
proxies=get_environ_proxies(self._base_url),
)
-
+
def get_base_url(self):
return self._base_url
@@ -423,18 +423,18 @@ class IMDSFetcher(object):
headers['x-aws-ec2-metadata-token'] = token
self._add_user_agent(headers)
for i in range(self._num_attempts):
- try:
+ try:
request = botocore.awsrequest.AWSRequest(
method='GET', url=url, headers=headers)
response = self._session.send(request.prepare())
if not retry_func(response):
return response
- except RETRYABLE_HTTP_ERRORS as e:
+ except RETRYABLE_HTTP_ERRORS as e:
logger.debug(
"Caught retryable HTTP exception while making metadata "
"service request to %s: %s", url, e, exc_info=True)
raise self._RETRIES_EXCEEDED_ERROR_CLS()
-
+
def _add_user_agent(self, headers):
if self._user_agent is not None:
headers['User-Agent'] = self._user_agent
@@ -482,8 +482,8 @@ class InstanceMetadataFetcher(IMDSFetcher):
'AccessKeyId', 'SecretAccessKey', 'Token', 'Expiration'
]
- def retrieve_iam_role_credentials(self):
- try:
+ def retrieve_iam_role_credentials(self):
+ try:
token = self._fetch_metadata_token()
role_name = self._get_iam_role(token)
credentials = self._get_credentials(role_name, token)
@@ -495,7 +495,7 @@ class InstanceMetadataFetcher(IMDSFetcher):
'token': credentials['Token'],
'expiry_time': credentials['Expiration'],
}
- else:
+ else:
# IMDS can return a 200 response that has a JSON formatted
# error message (i.e. if ec2 is not trusted entity for the
# attached role). We do not necessarily want to retry for
@@ -509,20 +509,20 @@ class InstanceMetadataFetcher(IMDSFetcher):
'credentials: %s.', credentials)
return {}
except self._RETRIES_EXCEEDED_ERROR_CLS:
- logger.debug("Max number of attempts exceeded (%s) when "
- "attempting to retrieve data from metadata service.",
+ logger.debug("Max number of attempts exceeded (%s) when "
+ "attempting to retrieve data from metadata service.",
self._num_attempts)
except BadIMDSRequestError as e:
logger.debug("Bad IMDS request: %s", e.request)
return {}
-
+
def _get_iam_role(self, token=None):
return self._get_request(
url_path=self._URL_PATH,
retry_func=self._needs_retry_for_role_name,
token=token,
).text
-
+
def _get_credentials(self, role_name, token=None):
r = self._get_request(
url_path=self._URL_PATH + role_name,
@@ -562,35 +562,35 @@ class InstanceMetadataFetcher(IMDSFetcher):
return True
-def merge_dicts(dict1, dict2, append_lists=False):
- """Given two dict, merge the second dict into the first.
-
- The dicts can have arbitrary nesting.
-
- :param append_lists: If true, instead of clobbering a list with the new
- value, append all of the new values onto the original list.
- """
- for key in dict2:
- if isinstance(dict2[key], dict):
- if key in dict1 and key in dict2:
- merge_dicts(dict1[key], dict2[key])
- else:
- dict1[key] = dict2[key]
- # If the value is a list and the ``append_lists`` flag is set,
- # append the new values onto the original list
- elif isinstance(dict2[key], list) and append_lists:
- # The value in dict1 must be a list in order to append new
- # values onto it.
- if key in dict1 and isinstance(dict1[key], list):
- dict1[key].extend(dict2[key])
- else:
- dict1[key] = dict2[key]
- else:
- # At scalar types, we iterate and merge the
- # current dict that we're on.
- dict1[key] = dict2[key]
-
-
+def merge_dicts(dict1, dict2, append_lists=False):
+ """Given two dict, merge the second dict into the first.
+
+ The dicts can have arbitrary nesting.
+
+ :param append_lists: If true, instead of clobbering a list with the new
+ value, append all of the new values onto the original list.
+ """
+ for key in dict2:
+ if isinstance(dict2[key], dict):
+ if key in dict1 and key in dict2:
+ merge_dicts(dict1[key], dict2[key])
+ else:
+ dict1[key] = dict2[key]
+ # If the value is a list and the ``append_lists`` flag is set,
+ # append the new values onto the original list
+ elif isinstance(dict2[key], list) and append_lists:
+ # The value in dict1 must be a list in order to append new
+ # values onto it.
+ if key in dict1 and isinstance(dict1[key], list):
+ dict1[key].extend(dict2[key])
+ else:
+ dict1[key] = dict2[key]
+ else:
+ # At scalar types, we iterate and merge the
+ # current dict that we're on.
+ dict1[key] = dict2[key]
+
+
def lowercase_dict(original):
"""Copies the given dictionary ensuring all keys are lowercase strings. """
copy = {}
@@ -599,106 +599,106 @@ def lowercase_dict(original):
return copy
-def parse_key_val_file(filename, _open=open):
- try:
- with _open(filename) as f:
- contents = f.read()
- return parse_key_val_file_contents(contents)
- except OSError:
- raise ConfigNotFound(path=filename)
-
-
-def parse_key_val_file_contents(contents):
- # This was originally extracted from the EC2 credential provider, which was
- # fairly lenient in its parsing. We only try to parse key/val pairs if
- # there's a '=' in the line.
- final = {}
- for line in contents.splitlines():
- if '=' not in line:
- continue
- key, val = line.split('=', 1)
- key = key.strip()
- val = val.strip()
- final[key] = val
- return final
-
-
-def percent_encode_sequence(mapping, safe=SAFE_CHARS):
- """Urlencode a dict or list into a string.
-
- This is similar to urllib.urlencode except that:
-
- * It uses quote, and not quote_plus
- * It has a default list of safe chars that don't need
- to be encoded, which matches what AWS services expect.
-
- If any value in the input ``mapping`` is a list type,
- then each list element wil be serialized. This is the equivalent
- to ``urlencode``'s ``doseq=True`` argument.
-
- This function should be preferred over the stdlib
- ``urlencode()`` function.
-
- :param mapping: Either a dict to urlencode or a list of
- ``(key, value)`` pairs.
-
- """
- encoded_pairs = []
- if hasattr(mapping, 'items'):
- pairs = mapping.items()
- else:
- pairs = mapping
- for key, value in pairs:
- if isinstance(value, list):
- for element in value:
- encoded_pairs.append('%s=%s' % (percent_encode(key),
- percent_encode(element)))
- else:
- encoded_pairs.append('%s=%s' % (percent_encode(key),
- percent_encode(value)))
- return '&'.join(encoded_pairs)
-
-
-def percent_encode(input_str, safe=SAFE_CHARS):
- """Urlencodes a string.
-
- Whereas percent_encode_sequence handles taking a dict/sequence and
- producing a percent encoded string, this function deals only with
- taking a string (not a dict/sequence) and percent encoding it.
-
- If given the binary type, will simply URL encode it. If given the
- text type, will produce the binary type by UTF-8 encoding the
+def parse_key_val_file(filename, _open=open):
+ try:
+ with _open(filename) as f:
+ contents = f.read()
+ return parse_key_val_file_contents(contents)
+ except OSError:
+ raise ConfigNotFound(path=filename)
+
+
+def parse_key_val_file_contents(contents):
+ # This was originally extracted from the EC2 credential provider, which was
+ # fairly lenient in its parsing. We only try to parse key/val pairs if
+ # there's a '=' in the line.
+ final = {}
+ for line in contents.splitlines():
+ if '=' not in line:
+ continue
+ key, val = line.split('=', 1)
+ key = key.strip()
+ val = val.strip()
+ final[key] = val
+ return final
+
+
+def percent_encode_sequence(mapping, safe=SAFE_CHARS):
+ """Urlencode a dict or list into a string.
+
+ This is similar to urllib.urlencode except that:
+
+ * It uses quote, and not quote_plus
+ * It has a default list of safe chars that don't need
+ to be encoded, which matches what AWS services expect.
+
+ If any value in the input ``mapping`` is a list type,
+ then each list element wil be serialized. This is the equivalent
+ to ``urlencode``'s ``doseq=True`` argument.
+
+ This function should be preferred over the stdlib
+ ``urlencode()`` function.
+
+ :param mapping: Either a dict to urlencode or a list of
+ ``(key, value)`` pairs.
+
+ """
+ encoded_pairs = []
+ if hasattr(mapping, 'items'):
+ pairs = mapping.items()
+ else:
+ pairs = mapping
+ for key, value in pairs:
+ if isinstance(value, list):
+ for element in value:
+ encoded_pairs.append('%s=%s' % (percent_encode(key),
+ percent_encode(element)))
+ else:
+ encoded_pairs.append('%s=%s' % (percent_encode(key),
+ percent_encode(value)))
+ return '&'.join(encoded_pairs)
+
+
+def percent_encode(input_str, safe=SAFE_CHARS):
+ """Urlencodes a string.
+
+ Whereas percent_encode_sequence handles taking a dict/sequence and
+ producing a percent encoded string, this function deals only with
+ taking a string (not a dict/sequence) and percent encoding it.
+
+ If given the binary type, will simply URL encode it. If given the
+ text type, will produce the binary type by UTF-8 encoding the
text. If given something else, will convert it to the text type
- first.
- """
- # If its not a binary or text string, make it a text string.
- if not isinstance(input_str, (six.binary_type, six.text_type)):
- input_str = six.text_type(input_str)
- # If it's not bytes, make it bytes by UTF-8 encoding it.
- if not isinstance(input_str, six.binary_type):
- input_str = input_str.encode('utf-8')
- return quote(input_str, safe=safe)
-
-
+ first.
+ """
+ # If its not a binary or text string, make it a text string.
+ if not isinstance(input_str, (six.binary_type, six.text_type)):
+ input_str = six.text_type(input_str)
+ # If it's not bytes, make it bytes by UTF-8 encoding it.
+ if not isinstance(input_str, six.binary_type):
+ input_str = input_str.encode('utf-8')
+ return quote(input_str, safe=safe)
+
+
def _parse_timestamp_with_tzinfo(value, tzinfo):
"""Parse timestamp with pluggable tzinfo options."""
- if isinstance(value, (int, float)):
- # Possibly an epoch time.
+ if isinstance(value, (int, float)):
+ # Possibly an epoch time.
return datetime.datetime.fromtimestamp(value, tzinfo())
- else:
- try:
+ else:
+ try:
return datetime.datetime.fromtimestamp(float(value), tzinfo())
- except (TypeError, ValueError):
- pass
- try:
- # In certain cases, a timestamp marked with GMT can be parsed into a
- # different time zone, so here we provide a context which will
- # enforce that GMT == UTC.
- return dateutil.parser.parse(value, tzinfos={'GMT': tzutc()})
- except (TypeError, ValueError) as e:
- raise ValueError('Invalid timestamp "%s": %s' % (value, e))
-
-
+ except (TypeError, ValueError):
+ pass
+ try:
+ # In certain cases, a timestamp marked with GMT can be parsed into a
+ # different time zone, so here we provide a context which will
+ # enforce that GMT == UTC.
+ return dateutil.parser.parse(value, tzinfos={'GMT': tzutc()})
+ except (TypeError, ValueError) as e:
+ raise ValueError('Invalid timestamp "%s": %s' % (value, e))
+
+
def parse_timestamp(value):
"""Parse a timestamp into a datetime object.
@@ -721,301 +721,301 @@ def parse_timestamp(value):
'"%s"' % value)
-def parse_to_aware_datetime(value):
- """Converted the passed in value to a datetime object with tzinfo.
-
- This function can be used to normalize all timestamp inputs. This
- function accepts a number of different types of inputs, but
- will always return a datetime.datetime object with time zone
- information.
-
- The input param ``value`` can be one of several types:
-
- * A datetime object (both naive and aware)
- * An integer representing the epoch time (can also be a string
- of the integer, i.e '0', instead of 0). The epoch time is
- considered to be UTC.
- * An iso8601 formatted timestamp. This does not need to be
- a complete timestamp, it can contain just the date portion
- without the time component.
-
- The returned value will be a datetime object that will have tzinfo.
- If no timezone info was provided in the input value, then UTC is
- assumed, not local time.
-
- """
- # This is a general purpose method that handles several cases of
- # converting the provided value to a string timestamp suitable to be
- # serialized to an http request. It can handle:
- # 1) A datetime.datetime object.
- if isinstance(value, datetime.datetime):
- datetime_obj = value
- else:
- # 2) A string object that's formatted as a timestamp.
- # We document this as being an iso8601 timestamp, although
- # parse_timestamp is a bit more flexible.
- datetime_obj = parse_timestamp(value)
- if datetime_obj.tzinfo is None:
- # I think a case would be made that if no time zone is provided,
- # we should use the local time. However, to restore backwards
- # compat, the previous behavior was to assume UTC, which is
- # what we're going to do here.
- datetime_obj = datetime_obj.replace(tzinfo=tzutc())
- else:
- datetime_obj = datetime_obj.astimezone(tzutc())
- return datetime_obj
-
-
-def datetime2timestamp(dt, default_timezone=None):
- """Calculate the timestamp based on the given datetime instance.
-
- :type dt: datetime
- :param dt: A datetime object to be converted into timestamp
- :type default_timezone: tzinfo
- :param default_timezone: If it is provided as None, we treat it as tzutc().
- But it is only used when dt is a naive datetime.
- :returns: The timestamp
- """
- epoch = datetime.datetime(1970, 1, 1)
- if dt.tzinfo is None:
- if default_timezone is None:
- default_timezone = tzutc()
- dt = dt.replace(tzinfo=default_timezone)
- d = dt.replace(tzinfo=None) - dt.utcoffset() - epoch
- if hasattr(d, "total_seconds"):
- return d.total_seconds() # Works in Python 2.7+
- return (d.microseconds + (d.seconds + d.days * 24 * 3600) * 10**6) / 10**6
-
-
-def calculate_sha256(body, as_hex=False):
- """Calculate a sha256 checksum.
-
- This method will calculate the sha256 checksum of a file like
- object. Note that this method will iterate through the entire
- file contents. The caller is responsible for ensuring the proper
- starting position of the file and ``seek()``'ing the file back
- to its starting location if other consumers need to read from
- the file like object.
-
- :param body: Any file like object. The file must be opened
- in binary mode such that a ``.read()`` call returns bytes.
- :param as_hex: If True, then the hex digest is returned.
- If False, then the digest (as binary bytes) is returned.
-
- :returns: The sha256 checksum
-
- """
- checksum = hashlib.sha256()
- for chunk in iter(lambda: body.read(1024 * 1024), b''):
- checksum.update(chunk)
- if as_hex:
- return checksum.hexdigest()
- else:
- return checksum.digest()
-
-
-def calculate_tree_hash(body):
- """Calculate a tree hash checksum.
-
- For more information see:
-
- http://docs.aws.amazon.com/amazonglacier/latest/dev/checksum-calculations.html
-
- :param body: Any file like object. This has the same constraints as
- the ``body`` param in calculate_sha256
-
- :rtype: str
- :returns: The hex version of the calculated tree hash
-
- """
- chunks = []
- required_chunk_size = 1024 * 1024
- sha256 = hashlib.sha256
- for chunk in iter(lambda: body.read(required_chunk_size), b''):
- chunks.append(sha256(chunk).digest())
- if not chunks:
- return sha256(b'').hexdigest()
- while len(chunks) > 1:
- new_chunks = []
- for first, second in _in_pairs(chunks):
- if second is not None:
- new_chunks.append(sha256(first + second).digest())
- else:
- # We're at the end of the list and there's no pair left.
- new_chunks.append(first)
- chunks = new_chunks
- return binascii.hexlify(chunks[0]).decode('ascii')
-
-
-def _in_pairs(iterable):
- # Creates iterator that iterates over the list in pairs:
- # for a, b in _in_pairs([0, 1, 2, 3, 4]):
- # print(a, b)
- #
- # will print:
- # 0, 1
- # 2, 3
- # 4, None
- shared_iter = iter(iterable)
- # Note that zip_longest is a compat import that uses
- # the itertools izip_longest. This creates an iterator,
- # this call below does _not_ immediately create the list
- # of pairs.
- return zip_longest(shared_iter, shared_iter)
-
-
-class CachedProperty(object):
- """A read only property that caches the initially computed value.
-
- This descriptor will only call the provided ``fget`` function once.
- Subsequent access to this property will return the cached value.
-
- """
-
- def __init__(self, fget):
- self._fget = fget
-
- def __get__(self, obj, cls):
- if obj is None:
- return self
- else:
- computed_value = self._fget(obj)
- obj.__dict__[self._fget.__name__] = computed_value
- return computed_value
-
-
-class ArgumentGenerator(object):
- """Generate sample input based on a shape model.
-
- This class contains a ``generate_skeleton`` method that will take
- an input/output shape (created from ``botocore.model``) and generate
- a sample dictionary corresponding to the input/output shape.
-
- The specific values used are place holder values. For strings either an
- empty string or the member name can be used, for numbers 0 or 0.0 is used.
- The intended usage of this class is to generate the *shape* of the input
- structure.
-
- This can be useful for operations that have complex input shapes.
- This allows a user to just fill in the necessary data instead of
- worrying about the specific structure of the input arguments.
-
- Example usage::
-
- s = botocore.session.get_session()
- ddb = s.get_service_model('dynamodb')
- arg_gen = ArgumentGenerator()
- sample_input = arg_gen.generate_skeleton(
- ddb.operation_model('CreateTable').input_shape)
- print("Sample input for dynamodb.CreateTable: %s" % sample_input)
-
- """
- def __init__(self, use_member_names=False):
- self._use_member_names = use_member_names
-
- def generate_skeleton(self, shape):
- """Generate a sample input.
-
- :type shape: ``botocore.model.Shape``
- :param shape: The input shape.
-
- :return: The generated skeleton input corresponding to the
- provided input shape.
-
- """
- stack = []
- return self._generate_skeleton(shape, stack)
-
- def _generate_skeleton(self, shape, stack, name=''):
- stack.append(shape.name)
- try:
- if shape.type_name == 'structure':
- return self._generate_type_structure(shape, stack)
- elif shape.type_name == 'list':
- return self._generate_type_list(shape, stack)
- elif shape.type_name == 'map':
- return self._generate_type_map(shape, stack)
- elif shape.type_name == 'string':
- if self._use_member_names:
- return name
- if shape.enum:
- return random.choice(shape.enum)
- return ''
- elif shape.type_name in ['integer', 'long']:
- return 0
+def parse_to_aware_datetime(value):
+ """Converted the passed in value to a datetime object with tzinfo.
+
+ This function can be used to normalize all timestamp inputs. This
+ function accepts a number of different types of inputs, but
+ will always return a datetime.datetime object with time zone
+ information.
+
+ The input param ``value`` can be one of several types:
+
+ * A datetime object (both naive and aware)
+ * An integer representing the epoch time (can also be a string
+ of the integer, i.e '0', instead of 0). The epoch time is
+ considered to be UTC.
+ * An iso8601 formatted timestamp. This does not need to be
+ a complete timestamp, it can contain just the date portion
+ without the time component.
+
+ The returned value will be a datetime object that will have tzinfo.
+ If no timezone info was provided in the input value, then UTC is
+ assumed, not local time.
+
+ """
+ # This is a general purpose method that handles several cases of
+ # converting the provided value to a string timestamp suitable to be
+ # serialized to an http request. It can handle:
+ # 1) A datetime.datetime object.
+ if isinstance(value, datetime.datetime):
+ datetime_obj = value
+ else:
+ # 2) A string object that's formatted as a timestamp.
+ # We document this as being an iso8601 timestamp, although
+ # parse_timestamp is a bit more flexible.
+ datetime_obj = parse_timestamp(value)
+ if datetime_obj.tzinfo is None:
+ # I think a case would be made that if no time zone is provided,
+ # we should use the local time. However, to restore backwards
+ # compat, the previous behavior was to assume UTC, which is
+ # what we're going to do here.
+ datetime_obj = datetime_obj.replace(tzinfo=tzutc())
+ else:
+ datetime_obj = datetime_obj.astimezone(tzutc())
+ return datetime_obj
+
+
+def datetime2timestamp(dt, default_timezone=None):
+ """Calculate the timestamp based on the given datetime instance.
+
+ :type dt: datetime
+ :param dt: A datetime object to be converted into timestamp
+ :type default_timezone: tzinfo
+ :param default_timezone: If it is provided as None, we treat it as tzutc().
+ But it is only used when dt is a naive datetime.
+ :returns: The timestamp
+ """
+ epoch = datetime.datetime(1970, 1, 1)
+ if dt.tzinfo is None:
+ if default_timezone is None:
+ default_timezone = tzutc()
+ dt = dt.replace(tzinfo=default_timezone)
+ d = dt.replace(tzinfo=None) - dt.utcoffset() - epoch
+ if hasattr(d, "total_seconds"):
+ return d.total_seconds() # Works in Python 2.7+
+ return (d.microseconds + (d.seconds + d.days * 24 * 3600) * 10**6) / 10**6
+
+
+def calculate_sha256(body, as_hex=False):
+ """Calculate a sha256 checksum.
+
+ This method will calculate the sha256 checksum of a file like
+ object. Note that this method will iterate through the entire
+ file contents. The caller is responsible for ensuring the proper
+ starting position of the file and ``seek()``'ing the file back
+ to its starting location if other consumers need to read from
+ the file like object.
+
+ :param body: Any file like object. The file must be opened
+ in binary mode such that a ``.read()`` call returns bytes.
+ :param as_hex: If True, then the hex digest is returned.
+ If False, then the digest (as binary bytes) is returned.
+
+ :returns: The sha256 checksum
+
+ """
+ checksum = hashlib.sha256()
+ for chunk in iter(lambda: body.read(1024 * 1024), b''):
+ checksum.update(chunk)
+ if as_hex:
+ return checksum.hexdigest()
+ else:
+ return checksum.digest()
+
+
+def calculate_tree_hash(body):
+ """Calculate a tree hash checksum.
+
+ For more information see:
+
+ http://docs.aws.amazon.com/amazonglacier/latest/dev/checksum-calculations.html
+
+ :param body: Any file like object. This has the same constraints as
+ the ``body`` param in calculate_sha256
+
+ :rtype: str
+ :returns: The hex version of the calculated tree hash
+
+ """
+ chunks = []
+ required_chunk_size = 1024 * 1024
+ sha256 = hashlib.sha256
+ for chunk in iter(lambda: body.read(required_chunk_size), b''):
+ chunks.append(sha256(chunk).digest())
+ if not chunks:
+ return sha256(b'').hexdigest()
+ while len(chunks) > 1:
+ new_chunks = []
+ for first, second in _in_pairs(chunks):
+ if second is not None:
+ new_chunks.append(sha256(first + second).digest())
+ else:
+ # We're at the end of the list and there's no pair left.
+ new_chunks.append(first)
+ chunks = new_chunks
+ return binascii.hexlify(chunks[0]).decode('ascii')
+
+
+def _in_pairs(iterable):
+ # Creates iterator that iterates over the list in pairs:
+ # for a, b in _in_pairs([0, 1, 2, 3, 4]):
+ # print(a, b)
+ #
+ # will print:
+ # 0, 1
+ # 2, 3
+ # 4, None
+ shared_iter = iter(iterable)
+ # Note that zip_longest is a compat import that uses
+ # the itertools izip_longest. This creates an iterator,
+ # this call below does _not_ immediately create the list
+ # of pairs.
+ return zip_longest(shared_iter, shared_iter)
+
+
+class CachedProperty(object):
+ """A read only property that caches the initially computed value.
+
+ This descriptor will only call the provided ``fget`` function once.
+ Subsequent access to this property will return the cached value.
+
+ """
+
+ def __init__(self, fget):
+ self._fget = fget
+
+ def __get__(self, obj, cls):
+ if obj is None:
+ return self
+ else:
+ computed_value = self._fget(obj)
+ obj.__dict__[self._fget.__name__] = computed_value
+ return computed_value
+
+
+class ArgumentGenerator(object):
+ """Generate sample input based on a shape model.
+
+ This class contains a ``generate_skeleton`` method that will take
+ an input/output shape (created from ``botocore.model``) and generate
+ a sample dictionary corresponding to the input/output shape.
+
+ The specific values used are place holder values. For strings either an
+ empty string or the member name can be used, for numbers 0 or 0.0 is used.
+ The intended usage of this class is to generate the *shape* of the input
+ structure.
+
+ This can be useful for operations that have complex input shapes.
+ This allows a user to just fill in the necessary data instead of
+ worrying about the specific structure of the input arguments.
+
+ Example usage::
+
+ s = botocore.session.get_session()
+ ddb = s.get_service_model('dynamodb')
+ arg_gen = ArgumentGenerator()
+ sample_input = arg_gen.generate_skeleton(
+ ddb.operation_model('CreateTable').input_shape)
+ print("Sample input for dynamodb.CreateTable: %s" % sample_input)
+
+ """
+ def __init__(self, use_member_names=False):
+ self._use_member_names = use_member_names
+
+ def generate_skeleton(self, shape):
+ """Generate a sample input.
+
+ :type shape: ``botocore.model.Shape``
+ :param shape: The input shape.
+
+ :return: The generated skeleton input corresponding to the
+ provided input shape.
+
+ """
+ stack = []
+ return self._generate_skeleton(shape, stack)
+
+ def _generate_skeleton(self, shape, stack, name=''):
+ stack.append(shape.name)
+ try:
+ if shape.type_name == 'structure':
+ return self._generate_type_structure(shape, stack)
+ elif shape.type_name == 'list':
+ return self._generate_type_list(shape, stack)
+ elif shape.type_name == 'map':
+ return self._generate_type_map(shape, stack)
+ elif shape.type_name == 'string':
+ if self._use_member_names:
+ return name
+ if shape.enum:
+ return random.choice(shape.enum)
+ return ''
+ elif shape.type_name in ['integer', 'long']:
+ return 0
elif shape.type_name in ['float', 'double']:
- return 0.0
- elif shape.type_name == 'boolean':
- return True
- elif shape.type_name == 'timestamp':
- return datetime.datetime(1970, 1, 1, 0, 0, 0)
- finally:
- stack.pop()
-
- def _generate_type_structure(self, shape, stack):
- if stack.count(shape.name) > 1:
- return {}
- skeleton = OrderedDict()
- for member_name, member_shape in shape.members.items():
- skeleton[member_name] = self._generate_skeleton(
- member_shape, stack, name=member_name)
- return skeleton
-
- def _generate_type_list(self, shape, stack):
- # For list elements we've arbitrarily decided to
- # return two elements for the skeleton list.
- name = ''
- if self._use_member_names:
- name = shape.member.name
- return [
- self._generate_skeleton(shape.member, stack, name),
- ]
-
- def _generate_type_map(self, shape, stack):
- key_shape = shape.key
- value_shape = shape.value
- assert key_shape.type_name == 'string'
- return OrderedDict([
- ('KeyName', self._generate_skeleton(value_shape, stack)),
- ])
-
-
+ return 0.0
+ elif shape.type_name == 'boolean':
+ return True
+ elif shape.type_name == 'timestamp':
+ return datetime.datetime(1970, 1, 1, 0, 0, 0)
+ finally:
+ stack.pop()
+
+ def _generate_type_structure(self, shape, stack):
+ if stack.count(shape.name) > 1:
+ return {}
+ skeleton = OrderedDict()
+ for member_name, member_shape in shape.members.items():
+ skeleton[member_name] = self._generate_skeleton(
+ member_shape, stack, name=member_name)
+ return skeleton
+
+ def _generate_type_list(self, shape, stack):
+ # For list elements we've arbitrarily decided to
+ # return two elements for the skeleton list.
+ name = ''
+ if self._use_member_names:
+ name = shape.member.name
+ return [
+ self._generate_skeleton(shape.member, stack, name),
+ ]
+
+ def _generate_type_map(self, shape, stack):
+ key_shape = shape.key
+ value_shape = shape.value
+ assert key_shape.type_name == 'string'
+ return OrderedDict([
+ ('KeyName', self._generate_skeleton(value_shape, stack)),
+ ])
+
+
def is_valid_ipv6_endpoint_url(endpoint_url):
if UNSAFE_URL_CHARS.intersection(endpoint_url):
return False
netloc = urlparse(endpoint_url).netloc
return IPV6_ADDRZ_RE.match(netloc) is not None
-def is_valid_endpoint_url(endpoint_url):
- """Verify the endpoint_url is valid.
-
- :type endpoint_url: string
- :param endpoint_url: An endpoint_url. Must have at least a scheme
- and a hostname.
-
- :return: True if the endpoint url is valid. False otherwise.
-
- """
+def is_valid_endpoint_url(endpoint_url):
+ """Verify the endpoint_url is valid.
+
+ :type endpoint_url: string
+ :param endpoint_url: An endpoint_url. Must have at least a scheme
+ and a hostname.
+
+ :return: True if the endpoint url is valid. False otherwise.
+
+ """
# post-bpo-43882 urlsplit() strips unsafe characters from URL, causing
# it to pass hostname validation below. Detect them early to fix that.
if UNSAFE_URL_CHARS.intersection(endpoint_url):
return False
- parts = urlsplit(endpoint_url)
- hostname = parts.hostname
- if hostname is None:
- return False
- if len(hostname) > 255:
- return False
- if hostname[-1] == ".":
- hostname = hostname[:-1]
- allowed = re.compile(
- r"^((?!-)[A-Z\d-]{1,63}(?<!-)\.)*((?!-)[A-Z\d-]{1,63}(?<!-))$",
- re.IGNORECASE)
- return allowed.match(hostname)
-
+ parts = urlsplit(endpoint_url)
+ hostname = parts.hostname
+ if hostname is None:
+ return False
+ if len(hostname) > 255:
+ return False
+ if hostname[-1] == ".":
+ hostname = hostname[:-1]
+ allowed = re.compile(
+ r"^((?!-)[A-Z\d-]{1,63}(?<!-)\.)*((?!-)[A-Z\d-]{1,63}(?<!-))$",
+ re.IGNORECASE)
+ return allowed.match(hostname)
+
def is_valid_uri(endpoint_url):
return is_valid_endpoint_url(endpoint_url) or is_valid_ipv6_endpoint_url(endpoint_url)
-
+
def validate_region_name(region_name):
"""Provided region_name must be a valid host label."""
if region_name is None:
@@ -1026,220 +1026,220 @@ def validate_region_name(region_name):
raise InvalidRegionError(region_name=region_name)
-def check_dns_name(bucket_name):
- """
- Check to see if the ``bucket_name`` complies with the
- restricted DNS naming conventions necessary to allow
- access via virtual-hosting style.
-
- Even though "." characters are perfectly valid in this DNS
- naming scheme, we are going to punt on any name containing a
- "." character because these will cause SSL cert validation
- problems if we try to use virtual-hosting style addressing.
- """
- if '.' in bucket_name:
- return False
- n = len(bucket_name)
- if n < 3 or n > 63:
- # Wrong length
- return False
- match = LABEL_RE.match(bucket_name)
- if match is None or match.end() != len(bucket_name):
- return False
- return True
-
-
-def fix_s3_host(request, signature_version, region_name,
+def check_dns_name(bucket_name):
+ """
+ Check to see if the ``bucket_name`` complies with the
+ restricted DNS naming conventions necessary to allow
+ access via virtual-hosting style.
+
+ Even though "." characters are perfectly valid in this DNS
+ naming scheme, we are going to punt on any name containing a
+ "." character because these will cause SSL cert validation
+ problems if we try to use virtual-hosting style addressing.
+ """
+ if '.' in bucket_name:
+ return False
+ n = len(bucket_name)
+ if n < 3 or n > 63:
+ # Wrong length
+ return False
+ match = LABEL_RE.match(bucket_name)
+ if match is None or match.end() != len(bucket_name):
+ return False
+ return True
+
+
+def fix_s3_host(request, signature_version, region_name,
default_endpoint_url=None, **kwargs):
- """
- This handler looks at S3 requests just before they are signed.
- If there is a bucket name on the path (true for everything except
- ListAllBuckets) it checks to see if that bucket name conforms to
- the DNS naming conventions. If it does, it alters the request to
- use ``virtual hosting`` style addressing rather than ``path-style``
+ """
+ This handler looks at S3 requests just before they are signed.
+ If there is a bucket name on the path (true for everything except
+ ListAllBuckets) it checks to see if that bucket name conforms to
+ the DNS naming conventions. If it does, it alters the request to
+ use ``virtual hosting`` style addressing rather than ``path-style``
addressing.
- """
+ """
if request.context.get('use_global_endpoint', False):
default_endpoint_url = 's3.amazonaws.com'
- try:
- switch_to_virtual_host_style(
- request, signature_version, default_endpoint_url)
- except InvalidDNSNameError as e:
- bucket_name = e.kwargs['bucket_name']
- logger.debug('Not changing URI, bucket is not DNS compatible: %s',
- bucket_name)
-
-
-def switch_to_virtual_host_style(request, signature_version,
- default_endpoint_url=None, **kwargs):
- """
- This is a handler to force virtual host style s3 addressing no matter
- the signature version (which is taken in consideration for the default
- case). If the bucket is not DNS compatible an InvalidDNSName is thrown.
-
- :param request: A AWSRequest object that is about to be sent.
- :param signature_version: The signature version to sign with
- :param default_endpoint_url: The endpoint to use when switching to a
- virtual style. If None is supplied, the virtual host will be
- constructed from the url of the request.
- """
- if request.auth_path is not None:
- # The auth_path has already been applied (this may be a
- # retried request). We don't need to perform this
- # customization again.
- return
- elif _is_get_bucket_location_request(request):
- # For the GetBucketLocation response, we should not be using
- # the virtual host style addressing so we can avoid any sigv4
- # issues.
- logger.debug("Request is GetBucketLocation operation, not checking "
- "for DNS compatibility.")
- return
- parts = urlsplit(request.url)
- request.auth_path = parts.path
- path_parts = parts.path.split('/')
-
- # Retrieve what the endpoint we will be prepending the bucket name to.
- if default_endpoint_url is None:
- default_endpoint_url = parts.netloc
-
- if len(path_parts) > 1:
- bucket_name = path_parts[1]
- if not bucket_name:
- # If the bucket name is empty we should not be checking for
- # dns compatibility.
- return
- logger.debug('Checking for DNS compatible bucket for: %s',
- request.url)
- if check_dns_name(bucket_name):
- # If the operation is on a bucket, the auth_path must be
- # terminated with a '/' character.
- if len(path_parts) == 2:
- if request.auth_path[-1] != '/':
- request.auth_path += '/'
- path_parts.remove(bucket_name)
- # At the very least the path must be a '/', such as with the
- # CreateBucket operation when DNS style is being used. If this
- # is not used you will get an empty path which is incorrect.
- path = '/'.join(path_parts) or '/'
- global_endpoint = default_endpoint_url
- host = bucket_name + '.' + global_endpoint
- new_tuple = (parts.scheme, host, path,
- parts.query, '')
- new_uri = urlunsplit(new_tuple)
- request.url = new_uri
- logger.debug('URI updated to: %s', new_uri)
- else:
- raise InvalidDNSNameError(bucket_name=bucket_name)
-
-
-def _is_get_bucket_location_request(request):
- return request.url.endswith('?location')
-
-
-def instance_cache(func):
- """Method decorator for caching method calls to a single instance.
-
- **This is not a general purpose caching decorator.**
-
- In order to use this, you *must* provide an ``_instance_cache``
- attribute on the instance.
-
- This decorator is used to cache method calls. The cache is only
- scoped to a single instance though such that multiple instances
- will maintain their own cache. In order to keep things simple,
- this decorator requires that you provide an ``_instance_cache``
- attribute on your instance.
-
- """
- func_name = func.__name__
-
- @functools.wraps(func)
- def _cache_guard(self, *args, **kwargs):
- cache_key = (func_name, args)
- if kwargs:
- kwarg_items = tuple(sorted(kwargs.items()))
- cache_key = (func_name, args, kwarg_items)
- result = self._instance_cache.get(cache_key)
- if result is not None:
- return result
- result = func(self, *args, **kwargs)
- self._instance_cache[cache_key] = result
- return result
- return _cache_guard
-
-
-def switch_host_s3_accelerate(request, operation_name, **kwargs):
- """Switches the current s3 endpoint with an S3 Accelerate endpoint"""
-
- # Note that when registered the switching of the s3 host happens
- # before it gets changed to virtual. So we are not concerned with ensuring
- # that the bucket name is translated to the virtual style here and we
- # can hard code the Accelerate endpoint.
- parts = urlsplit(request.url).netloc.split('.')
- parts = [p for p in parts if p in S3_ACCELERATE_WHITELIST]
- endpoint = 'https://s3-accelerate.'
- if len(parts) > 0:
- endpoint += '.'.join(parts) + '.'
- endpoint += 'amazonaws.com'
-
- if operation_name in ['ListBuckets', 'CreateBucket', 'DeleteBucket']:
- return
- _switch_hosts(request, endpoint, use_new_scheme=False)
-
-
-def switch_host_with_param(request, param_name):
- """Switches the host using a parameter value from a JSON request body"""
- request_json = json.loads(request.data.decode('utf-8'))
- if request_json.get(param_name):
- new_endpoint = request_json[param_name]
- _switch_hosts(request, new_endpoint)
-
-
-def _switch_hosts(request, new_endpoint, use_new_scheme=True):
- final_endpoint = _get_new_endpoint(
- request.url, new_endpoint, use_new_scheme)
- request.url = final_endpoint
-
-
-def _get_new_endpoint(original_endpoint, new_endpoint, use_new_scheme=True):
- new_endpoint_components = urlsplit(new_endpoint)
- original_endpoint_components = urlsplit(original_endpoint)
- scheme = original_endpoint_components.scheme
- if use_new_scheme:
- scheme = new_endpoint_components.scheme
- final_endpoint_components = (
- scheme,
- new_endpoint_components.netloc,
- original_endpoint_components.path,
- original_endpoint_components.query,
- ''
- )
- final_endpoint = urlunsplit(final_endpoint_components)
- logger.debug('Updating URI from %s to %s' % (
- original_endpoint, final_endpoint))
- return final_endpoint
-
-
-def deep_merge(base, extra):
- """Deeply two dictionaries, overriding existing keys in the base.
-
- :param base: The base dictionary which will be merged into.
- :param extra: The dictionary to merge into the base. Keys from this
- dictionary will take precedence.
- """
- for key in extra:
- # If the key represents a dict on both given dicts, merge the sub-dicts
- if key in base and isinstance(base[key], dict)\
- and isinstance(extra[key], dict):
- deep_merge(base[key], extra[key])
- continue
-
- # Otherwise, set the key on the base to be the value of the extra.
- base[key] = extra[key]
-
-
+ try:
+ switch_to_virtual_host_style(
+ request, signature_version, default_endpoint_url)
+ except InvalidDNSNameError as e:
+ bucket_name = e.kwargs['bucket_name']
+ logger.debug('Not changing URI, bucket is not DNS compatible: %s',
+ bucket_name)
+
+
+def switch_to_virtual_host_style(request, signature_version,
+ default_endpoint_url=None, **kwargs):
+ """
+ This is a handler to force virtual host style s3 addressing no matter
+ the signature version (which is taken in consideration for the default
+ case). If the bucket is not DNS compatible an InvalidDNSName is thrown.
+
+ :param request: A AWSRequest object that is about to be sent.
+ :param signature_version: The signature version to sign with
+ :param default_endpoint_url: The endpoint to use when switching to a
+ virtual style. If None is supplied, the virtual host will be
+ constructed from the url of the request.
+ """
+ if request.auth_path is not None:
+ # The auth_path has already been applied (this may be a
+ # retried request). We don't need to perform this
+ # customization again.
+ return
+ elif _is_get_bucket_location_request(request):
+ # For the GetBucketLocation response, we should not be using
+ # the virtual host style addressing so we can avoid any sigv4
+ # issues.
+ logger.debug("Request is GetBucketLocation operation, not checking "
+ "for DNS compatibility.")
+ return
+ parts = urlsplit(request.url)
+ request.auth_path = parts.path
+ path_parts = parts.path.split('/')
+
+ # Retrieve what the endpoint we will be prepending the bucket name to.
+ if default_endpoint_url is None:
+ default_endpoint_url = parts.netloc
+
+ if len(path_parts) > 1:
+ bucket_name = path_parts[1]
+ if not bucket_name:
+ # If the bucket name is empty we should not be checking for
+ # dns compatibility.
+ return
+ logger.debug('Checking for DNS compatible bucket for: %s',
+ request.url)
+ if check_dns_name(bucket_name):
+ # If the operation is on a bucket, the auth_path must be
+ # terminated with a '/' character.
+ if len(path_parts) == 2:
+ if request.auth_path[-1] != '/':
+ request.auth_path += '/'
+ path_parts.remove(bucket_name)
+ # At the very least the path must be a '/', such as with the
+ # CreateBucket operation when DNS style is being used. If this
+ # is not used you will get an empty path which is incorrect.
+ path = '/'.join(path_parts) or '/'
+ global_endpoint = default_endpoint_url
+ host = bucket_name + '.' + global_endpoint
+ new_tuple = (parts.scheme, host, path,
+ parts.query, '')
+ new_uri = urlunsplit(new_tuple)
+ request.url = new_uri
+ logger.debug('URI updated to: %s', new_uri)
+ else:
+ raise InvalidDNSNameError(bucket_name=bucket_name)
+
+
+def _is_get_bucket_location_request(request):
+ return request.url.endswith('?location')
+
+
+def instance_cache(func):
+ """Method decorator for caching method calls to a single instance.
+
+ **This is not a general purpose caching decorator.**
+
+ In order to use this, you *must* provide an ``_instance_cache``
+ attribute on the instance.
+
+ This decorator is used to cache method calls. The cache is only
+ scoped to a single instance though such that multiple instances
+ will maintain their own cache. In order to keep things simple,
+ this decorator requires that you provide an ``_instance_cache``
+ attribute on your instance.
+
+ """
+ func_name = func.__name__
+
+ @functools.wraps(func)
+ def _cache_guard(self, *args, **kwargs):
+ cache_key = (func_name, args)
+ if kwargs:
+ kwarg_items = tuple(sorted(kwargs.items()))
+ cache_key = (func_name, args, kwarg_items)
+ result = self._instance_cache.get(cache_key)
+ if result is not None:
+ return result
+ result = func(self, *args, **kwargs)
+ self._instance_cache[cache_key] = result
+ return result
+ return _cache_guard
+
+
+def switch_host_s3_accelerate(request, operation_name, **kwargs):
+ """Switches the current s3 endpoint with an S3 Accelerate endpoint"""
+
+ # Note that when registered the switching of the s3 host happens
+ # before it gets changed to virtual. So we are not concerned with ensuring
+ # that the bucket name is translated to the virtual style here and we
+ # can hard code the Accelerate endpoint.
+ parts = urlsplit(request.url).netloc.split('.')
+ parts = [p for p in parts if p in S3_ACCELERATE_WHITELIST]
+ endpoint = 'https://s3-accelerate.'
+ if len(parts) > 0:
+ endpoint += '.'.join(parts) + '.'
+ endpoint += 'amazonaws.com'
+
+ if operation_name in ['ListBuckets', 'CreateBucket', 'DeleteBucket']:
+ return
+ _switch_hosts(request, endpoint, use_new_scheme=False)
+
+
+def switch_host_with_param(request, param_name):
+ """Switches the host using a parameter value from a JSON request body"""
+ request_json = json.loads(request.data.decode('utf-8'))
+ if request_json.get(param_name):
+ new_endpoint = request_json[param_name]
+ _switch_hosts(request, new_endpoint)
+
+
+def _switch_hosts(request, new_endpoint, use_new_scheme=True):
+ final_endpoint = _get_new_endpoint(
+ request.url, new_endpoint, use_new_scheme)
+ request.url = final_endpoint
+
+
+def _get_new_endpoint(original_endpoint, new_endpoint, use_new_scheme=True):
+ new_endpoint_components = urlsplit(new_endpoint)
+ original_endpoint_components = urlsplit(original_endpoint)
+ scheme = original_endpoint_components.scheme
+ if use_new_scheme:
+ scheme = new_endpoint_components.scheme
+ final_endpoint_components = (
+ scheme,
+ new_endpoint_components.netloc,
+ original_endpoint_components.path,
+ original_endpoint_components.query,
+ ''
+ )
+ final_endpoint = urlunsplit(final_endpoint_components)
+ logger.debug('Updating URI from %s to %s' % (
+ original_endpoint, final_endpoint))
+ return final_endpoint
+
+
+def deep_merge(base, extra):
+ """Deeply two dictionaries, overriding existing keys in the base.
+
+ :param base: The base dictionary which will be merged into.
+ :param extra: The dictionary to merge into the base. Keys from this
+ dictionary will take precedence.
+ """
+ for key in extra:
+ # If the key represents a dict on both given dicts, merge the sub-dicts
+ if key in base and isinstance(base[key], dict)\
+ and isinstance(extra[key], dict):
+ deep_merge(base[key], extra[key])
+ continue
+
+ # Otherwise, set the key on the base to be the value of the extra.
+ base[key] = extra[key]
+
+
def hyphenize_service_id(service_id):
"""Translate the form used for event emitters.
@@ -1248,36 +1248,36 @@ def hyphenize_service_id(service_id):
return service_id.replace(' ', '-').lower()
-class S3RegionRedirector(object):
- def __init__(self, endpoint_bridge, client, cache=None):
- self._endpoint_resolver = endpoint_bridge
- self._cache = cache
- if self._cache is None:
- self._cache = {}
-
- # This needs to be a weak ref in order to prevent memory leaks on
- # python 2.6
- self._client = weakref.proxy(client)
-
- def register(self, event_emitter=None):
- emitter = event_emitter or self._client.meta.events
- emitter.register('needs-retry.s3', self.redirect_from_error)
- emitter.register('before-call.s3', self.set_request_url)
- emitter.register('before-parameter-build.s3',
- self.redirect_from_cache)
-
- def redirect_from_error(self, request_dict, response, operation, **kwargs):
- """
- An S3 request sent to the wrong region will return an error that
- contains the endpoint the request should be sent to. This handler
- will add the redirect information to the signing context and then
- redirect the request.
- """
- if response is None:
- # This could be none if there was a ConnectionError or other
- # transport error.
- return
-
+class S3RegionRedirector(object):
+ def __init__(self, endpoint_bridge, client, cache=None):
+ self._endpoint_resolver = endpoint_bridge
+ self._cache = cache
+ if self._cache is None:
+ self._cache = {}
+
+ # This needs to be a weak ref in order to prevent memory leaks on
+ # python 2.6
+ self._client = weakref.proxy(client)
+
+ def register(self, event_emitter=None):
+ emitter = event_emitter or self._client.meta.events
+ emitter.register('needs-retry.s3', self.redirect_from_error)
+ emitter.register('before-call.s3', self.set_request_url)
+ emitter.register('before-parameter-build.s3',
+ self.redirect_from_cache)
+
+ def redirect_from_error(self, request_dict, response, operation, **kwargs):
+ """
+ An S3 request sent to the wrong region will return an error that
+ contains the endpoint the request should be sent to. This handler
+ will add the redirect information to the signing context and then
+ redirect the request.
+ """
+ if response is None:
+ # This could be none if there was a ConnectionError or other
+ # transport error.
+ return
+
if self._is_s3_accesspoint(request_dict.get('context', {})):
logger.debug(
'S3 request was previously to an accesspoint, not redirecting.'
@@ -1289,10 +1289,10 @@ class S3RegionRedirector(object):
'S3 request was previously redirected, not redirecting.')
return
- error = response[1].get('Error', {})
- error_code = error.get('Code')
+ error = response[1].get('Error', {})
+ error_code = error.get('Code')
response_metadata = response[1].get('ResponseMetadata', {})
-
+
# We have to account for 400 responses because
# if we sign a Head* request with the wrong region,
# we'll get a 400 Bad Request but we won't get a
@@ -1316,97 +1316,97 @@ class S3RegionRedirector(object):
if not any([is_special_head_object, is_wrong_signing_region,
is_permanent_redirect, is_special_head_bucket,
is_redirect_status]):
- return
-
- bucket = request_dict['context']['signing']['bucket']
- client_region = request_dict['context'].get('client_region')
- new_region = self.get_bucket_region(bucket, response)
-
- if new_region is None:
- logger.debug(
- "S3 client configured for region %s but the bucket %s is not "
- "in that region and the proper region could not be "
- "automatically determined." % (client_region, bucket))
- return
-
- logger.debug(
- "S3 client configured for region %s but the bucket %s is in region"
- " %s; Please configure the proper region to avoid multiple "
- "unnecessary redirects and signing attempts." % (
- client_region, bucket, new_region))
- endpoint = self._endpoint_resolver.resolve('s3', new_region)
- endpoint = endpoint['endpoint_url']
-
- signing_context = {
- 'region': new_region,
- 'bucket': bucket,
- 'endpoint': endpoint
- }
- request_dict['context']['signing'] = signing_context
-
- self._cache[bucket] = signing_context
- self.set_request_url(request_dict, request_dict['context'])
-
+ return
+
+ bucket = request_dict['context']['signing']['bucket']
+ client_region = request_dict['context'].get('client_region')
+ new_region = self.get_bucket_region(bucket, response)
+
+ if new_region is None:
+ logger.debug(
+ "S3 client configured for region %s but the bucket %s is not "
+ "in that region and the proper region could not be "
+ "automatically determined." % (client_region, bucket))
+ return
+
+ logger.debug(
+ "S3 client configured for region %s but the bucket %s is in region"
+ " %s; Please configure the proper region to avoid multiple "
+ "unnecessary redirects and signing attempts." % (
+ client_region, bucket, new_region))
+ endpoint = self._endpoint_resolver.resolve('s3', new_region)
+ endpoint = endpoint['endpoint_url']
+
+ signing_context = {
+ 'region': new_region,
+ 'bucket': bucket,
+ 'endpoint': endpoint
+ }
+ request_dict['context']['signing'] = signing_context
+
+ self._cache[bucket] = signing_context
+ self.set_request_url(request_dict, request_dict['context'])
+
request_dict['context']['s3_redirected'] = True
- # Return 0 so it doesn't wait to retry
- return 0
-
- def get_bucket_region(self, bucket, response):
- """
- There are multiple potential sources for the new region to redirect to,
- but they aren't all universally available for use. This will try to
- find region from response elements, but will fall back to calling
- HEAD on the bucket if all else fails.
-
- :param bucket: The bucket to find the region for. This is necessary if
- the region is not available in the error response.
- :param response: A response representing a service request that failed
- due to incorrect region configuration.
- """
- # First try to source the region from the headers.
- service_response = response[1]
- response_headers = service_response['ResponseMetadata']['HTTPHeaders']
- if 'x-amz-bucket-region' in response_headers:
- return response_headers['x-amz-bucket-region']
-
- # Next, check the error body
- region = service_response.get('Error', {}).get('Region', None)
- if region is not None:
- return region
-
- # Finally, HEAD the bucket. No other choice sadly.
- try:
- response = self._client.head_bucket(Bucket=bucket)
- headers = response['ResponseMetadata']['HTTPHeaders']
- except ClientError as e:
- headers = e.response['ResponseMetadata']['HTTPHeaders']
-
- region = headers.get('x-amz-bucket-region', None)
- return region
-
- def set_request_url(self, params, context, **kwargs):
- endpoint = context.get('signing', {}).get('endpoint', None)
- if endpoint is not None:
- params['url'] = _get_new_endpoint(params['url'], endpoint, False)
-
- def redirect_from_cache(self, params, context, **kwargs):
- """
- This handler retrieves a given bucket's signing context from the cache
- and adds it into the request context.
- """
+ # Return 0 so it doesn't wait to retry
+ return 0
+
+ def get_bucket_region(self, bucket, response):
+ """
+ There are multiple potential sources for the new region to redirect to,
+ but they aren't all universally available for use. This will try to
+ find region from response elements, but will fall back to calling
+ HEAD on the bucket if all else fails.
+
+ :param bucket: The bucket to find the region for. This is necessary if
+ the region is not available in the error response.
+ :param response: A response representing a service request that failed
+ due to incorrect region configuration.
+ """
+ # First try to source the region from the headers.
+ service_response = response[1]
+ response_headers = service_response['ResponseMetadata']['HTTPHeaders']
+ if 'x-amz-bucket-region' in response_headers:
+ return response_headers['x-amz-bucket-region']
+
+ # Next, check the error body
+ region = service_response.get('Error', {}).get('Region', None)
+ if region is not None:
+ return region
+
+ # Finally, HEAD the bucket. No other choice sadly.
+ try:
+ response = self._client.head_bucket(Bucket=bucket)
+ headers = response['ResponseMetadata']['HTTPHeaders']
+ except ClientError as e:
+ headers = e.response['ResponseMetadata']['HTTPHeaders']
+
+ region = headers.get('x-amz-bucket-region', None)
+ return region
+
+ def set_request_url(self, params, context, **kwargs):
+ endpoint = context.get('signing', {}).get('endpoint', None)
+ if endpoint is not None:
+ params['url'] = _get_new_endpoint(params['url'], endpoint, False)
+
+ def redirect_from_cache(self, params, context, **kwargs):
+ """
+ This handler retrieves a given bucket's signing context from the cache
+ and adds it into the request context.
+ """
if self._is_s3_accesspoint(context):
return
- bucket = params.get('Bucket')
- signing_context = self._cache.get(bucket)
- if signing_context is not None:
- context['signing'] = signing_context
- else:
- context['signing'] = {'bucket': bucket}
-
+ bucket = params.get('Bucket')
+ signing_context = self._cache.get(bucket)
+ if signing_context is not None:
+ context['signing'] = signing_context
+ else:
+ context['signing'] = {'bucket': bucket}
+
def _is_s3_accesspoint(self, context):
return 's3_accesspoint' in context
-
+
class InvalidArnException(ValueError):
pass
@@ -2152,104 +2152,104 @@ class S3ControlArnParamHandler(object):
context['arn_details'] = arn_details
-class ContainerMetadataFetcher(object):
-
- TIMEOUT_SECONDS = 2
- RETRY_ATTEMPTS = 3
- SLEEP_TIME = 1
- IP_ADDRESS = '169.254.170.2'
- _ALLOWED_HOSTS = [IP_ADDRESS, 'localhost', '127.0.0.1']
-
- def __init__(self, session=None, sleep=time.sleep):
- if session is None:
+class ContainerMetadataFetcher(object):
+
+ TIMEOUT_SECONDS = 2
+ RETRY_ATTEMPTS = 3
+ SLEEP_TIME = 1
+ IP_ADDRESS = '169.254.170.2'
+ _ALLOWED_HOSTS = [IP_ADDRESS, 'localhost', '127.0.0.1']
+
+ def __init__(self, session=None, sleep=time.sleep):
+ if session is None:
session = botocore.httpsession.URLLib3Session(
timeout=self.TIMEOUT_SECONDS
)
- self._session = session
- self._sleep = sleep
-
- def retrieve_full_uri(self, full_url, headers=None):
- """Retrieve JSON metadata from container metadata.
-
- :type full_url: str
- :param full_url: The full URL of the metadata service.
- This should include the scheme as well, e.g
- "http://localhost:123/foo"
-
- """
- self._validate_allowed_url(full_url)
- return self._retrieve_credentials(full_url, headers)
-
- def _validate_allowed_url(self, full_url):
- parsed = botocore.compat.urlparse(full_url)
- is_whitelisted_host = self._check_if_whitelisted_host(
- parsed.hostname)
- if not is_whitelisted_host:
- raise ValueError(
- "Unsupported host '%s'. Can only "
- "retrieve metadata from these hosts: %s" %
- (parsed.hostname, ', '.join(self._ALLOWED_HOSTS)))
-
- def _check_if_whitelisted_host(self, host):
- if host in self._ALLOWED_HOSTS:
- return True
- return False
-
- def retrieve_uri(self, relative_uri):
- """Retrieve JSON metadata from ECS metadata.
-
- :type relative_uri: str
- :param relative_uri: A relative URI, e.g "/foo/bar?id=123"
-
- :return: The parsed JSON response.
-
- """
- full_url = self.full_url(relative_uri)
- return self._retrieve_credentials(full_url)
-
- def _retrieve_credentials(self, full_url, extra_headers=None):
- headers = {'Accept': 'application/json'}
- if extra_headers is not None:
- headers.update(extra_headers)
- attempts = 0
- while True:
- try:
+ self._session = session
+ self._sleep = sleep
+
+ def retrieve_full_uri(self, full_url, headers=None):
+ """Retrieve JSON metadata from container metadata.
+
+ :type full_url: str
+ :param full_url: The full URL of the metadata service.
+ This should include the scheme as well, e.g
+ "http://localhost:123/foo"
+
+ """
+ self._validate_allowed_url(full_url)
+ return self._retrieve_credentials(full_url, headers)
+
+ def _validate_allowed_url(self, full_url):
+ parsed = botocore.compat.urlparse(full_url)
+ is_whitelisted_host = self._check_if_whitelisted_host(
+ parsed.hostname)
+ if not is_whitelisted_host:
+ raise ValueError(
+ "Unsupported host '%s'. Can only "
+ "retrieve metadata from these hosts: %s" %
+ (parsed.hostname, ', '.join(self._ALLOWED_HOSTS)))
+
+ def _check_if_whitelisted_host(self, host):
+ if host in self._ALLOWED_HOSTS:
+ return True
+ return False
+
+ def retrieve_uri(self, relative_uri):
+ """Retrieve JSON metadata from ECS metadata.
+
+ :type relative_uri: str
+ :param relative_uri: A relative URI, e.g "/foo/bar?id=123"
+
+ :return: The parsed JSON response.
+
+ """
+ full_url = self.full_url(relative_uri)
+ return self._retrieve_credentials(full_url)
+
+ def _retrieve_credentials(self, full_url, extra_headers=None):
+ headers = {'Accept': 'application/json'}
+ if extra_headers is not None:
+ headers.update(extra_headers)
+ attempts = 0
+ while True:
+ try:
return self._get_response(
full_url, headers, self.TIMEOUT_SECONDS)
- except MetadataRetrievalError as e:
- logger.debug("Received error when attempting to retrieve "
- "container metadata: %s", e, exc_info=True)
- self._sleep(self.SLEEP_TIME)
- attempts += 1
- if attempts >= self.RETRY_ATTEMPTS:
- raise
-
- def _get_response(self, full_url, headers, timeout):
- try:
+ except MetadataRetrievalError as e:
+ logger.debug("Received error when attempting to retrieve "
+ "container metadata: %s", e, exc_info=True)
+ self._sleep(self.SLEEP_TIME)
+ attempts += 1
+ if attempts >= self.RETRY_ATTEMPTS:
+ raise
+
+ def _get_response(self, full_url, headers, timeout):
+ try:
AWSRequest = botocore.awsrequest.AWSRequest
request = AWSRequest(method='GET', url=full_url, headers=headers)
response = self._session.send(request.prepare())
response_text = response.content.decode('utf-8')
- if response.status_code != 200:
- raise MetadataRetrievalError(
+ if response.status_code != 200:
+ raise MetadataRetrievalError(
error_msg=(
"Received non 200 response (%s) from ECS metadata: %s"
) % (response.status_code, response_text))
- try:
+ try:
return json.loads(response_text)
- except ValueError:
+ except ValueError:
error_msg = (
"Unable to parse JSON returned from ECS metadata services"
)
logger.debug('%s:%s', error_msg, response_text)
raise MetadataRetrievalError(error_msg=error_msg)
- except RETRYABLE_HTTP_ERRORS as e:
- error_msg = ("Received error when attempting to retrieve "
- "ECS metadata: %s" % e)
- raise MetadataRetrievalError(error_msg=error_msg)
-
- def full_url(self, relative_uri):
- return 'http://%s%s' % (self.IP_ADDRESS, relative_uri)
+ except RETRYABLE_HTTP_ERRORS as e:
+ error_msg = ("Received error when attempting to retrieve "
+ "ECS metadata: %s" % e)
+ raise MetadataRetrievalError(error_msg=error_msg)
+
+ def full_url(self, relative_uri):
+ return 'http://%s%s' % (self.IP_ADDRESS, relative_uri)
def get_environ_proxies(url):
diff --git a/contrib/python/botocore/botocore/validate.py b/contrib/python/botocore/botocore/validate.py
index 4443aee7a5..5fa55d48c9 100644
--- a/contrib/python/botocore/botocore/validate.py
+++ b/contrib/python/botocore/botocore/validate.py
@@ -1,193 +1,193 @@
-"""User input parameter validation.
-
-This module handles user input parameter validation
-against a provided input model.
-
-Note that the objects in this module do *not* mutate any
-arguments. No type version happens here. It is up to another
-layer to properly convert arguments to any required types.
-
-Validation Errors
------------------
-
-
-"""
-
-from botocore.compat import six
-import decimal
-import json
-from datetime import datetime
-
-from botocore.utils import parse_to_aware_datetime
-from botocore.utils import is_json_value_header
-from botocore.exceptions import ParamValidationError
-
-
-def validate_parameters(params, shape):
- """Validates input parameters against a schema.
-
- This is a convenience function that validates parameters against a schema.
- You can also instantiate and use the ParamValidator class directly if you
- want more control.
-
- If there are any validation errors then a ParamValidationError
- will be raised. If there are no validation errors than no exception
- is raised and a value of None is returned.
-
- :param params: The user provided input parameters.
-
- :type shape: botocore.model.Shape
- :param shape: The schema which the input parameters should
- adhere to.
-
- :raise: ParamValidationError
-
- """
- validator = ParamValidator()
- report = validator.validate(params, shape)
- if report.has_errors():
- raise ParamValidationError(report=report.generate_report())
-
-
-def type_check(valid_types):
- def _create_type_check_guard(func):
- def _on_passes_type_check(self, param, shape, errors, name):
- if _type_check(param, errors, name):
- return func(self, param, shape, errors, name)
-
- def _type_check(param, errors, name):
- if not isinstance(param, valid_types):
- valid_type_names = [six.text_type(t) for t in valid_types]
- errors.report(name, 'invalid type', param=param,
- valid_types=valid_type_names)
- return False
- return True
-
- return _on_passes_type_check
- return _create_type_check_guard
-
-
-def range_check(name, value, shape, error_type, errors):
- failed = False
- min_allowed = float('-inf')
- if 'min' in shape.metadata:
- min_allowed = shape.metadata['min']
- if value < min_allowed:
- failed = True
+"""User input parameter validation.
+
+This module handles user input parameter validation
+against a provided input model.
+
+Note that the objects in this module do *not* mutate any
+arguments. No type version happens here. It is up to another
+layer to properly convert arguments to any required types.
+
+Validation Errors
+-----------------
+
+
+"""
+
+from botocore.compat import six
+import decimal
+import json
+from datetime import datetime
+
+from botocore.utils import parse_to_aware_datetime
+from botocore.utils import is_json_value_header
+from botocore.exceptions import ParamValidationError
+
+
+def validate_parameters(params, shape):
+ """Validates input parameters against a schema.
+
+ This is a convenience function that validates parameters against a schema.
+ You can also instantiate and use the ParamValidator class directly if you
+ want more control.
+
+ If there are any validation errors then a ParamValidationError
+ will be raised. If there are no validation errors than no exception
+ is raised and a value of None is returned.
+
+ :param params: The user provided input parameters.
+
+ :type shape: botocore.model.Shape
+ :param shape: The schema which the input parameters should
+ adhere to.
+
+ :raise: ParamValidationError
+
+ """
+ validator = ParamValidator()
+ report = validator.validate(params, shape)
+ if report.has_errors():
+ raise ParamValidationError(report=report.generate_report())
+
+
+def type_check(valid_types):
+ def _create_type_check_guard(func):
+ def _on_passes_type_check(self, param, shape, errors, name):
+ if _type_check(param, errors, name):
+ return func(self, param, shape, errors, name)
+
+ def _type_check(param, errors, name):
+ if not isinstance(param, valid_types):
+ valid_type_names = [six.text_type(t) for t in valid_types]
+ errors.report(name, 'invalid type', param=param,
+ valid_types=valid_type_names)
+ return False
+ return True
+
+ return _on_passes_type_check
+ return _create_type_check_guard
+
+
+def range_check(name, value, shape, error_type, errors):
+ failed = False
+ min_allowed = float('-inf')
+ if 'min' in shape.metadata:
+ min_allowed = shape.metadata['min']
+ if value < min_allowed:
+ failed = True
elif hasattr(shape, 'serialization'):
# Members that can be bound to the host have an implicit min of 1
if shape.serialization.get('hostLabel'):
min_allowed = 1
if value < min_allowed:
failed = True
- if failed:
+ if failed:
errors.report(name, error_type, param=value, min_allowed=min_allowed)
-
-
-class ValidationErrors(object):
- def __init__(self):
- self._errors = []
-
- def has_errors(self):
- if self._errors:
- return True
- return False
-
- def generate_report(self):
- error_messages = []
- for error in self._errors:
- error_messages.append(self._format_error(error))
- return '\n'.join(error_messages)
-
- def _format_error(self, error):
- error_type, name, additional = error
- name = self._get_name(name)
- if error_type == 'missing required field':
- return 'Missing required parameter in %s: "%s"' % (
- name, additional['required_name'])
- elif error_type == 'unknown field':
- return 'Unknown parameter in %s: "%s", must be one of: %s' % (
- name, additional['unknown_param'],
- ', '.join(additional['valid_names']))
- elif error_type == 'invalid type':
- return 'Invalid type for parameter %s, value: %s, type: %s, ' \
- 'valid types: %s' % (name, additional['param'],
- str(type(additional['param'])),
- ', '.join(additional['valid_types']))
- elif error_type == 'invalid range':
+
+
+class ValidationErrors(object):
+ def __init__(self):
+ self._errors = []
+
+ def has_errors(self):
+ if self._errors:
+ return True
+ return False
+
+ def generate_report(self):
+ error_messages = []
+ for error in self._errors:
+ error_messages.append(self._format_error(error))
+ return '\n'.join(error_messages)
+
+ def _format_error(self, error):
+ error_type, name, additional = error
+ name = self._get_name(name)
+ if error_type == 'missing required field':
+ return 'Missing required parameter in %s: "%s"' % (
+ name, additional['required_name'])
+ elif error_type == 'unknown field':
+ return 'Unknown parameter in %s: "%s", must be one of: %s' % (
+ name, additional['unknown_param'],
+ ', '.join(additional['valid_names']))
+ elif error_type == 'invalid type':
+ return 'Invalid type for parameter %s, value: %s, type: %s, ' \
+ 'valid types: %s' % (name, additional['param'],
+ str(type(additional['param'])),
+ ', '.join(additional['valid_types']))
+ elif error_type == 'invalid range':
min_allowed = additional['min_allowed']
return ('Invalid value for parameter %s, value: %s, '
'valid min value: %s' % (name, additional['param'],
min_allowed))
- elif error_type == 'invalid length':
+ elif error_type == 'invalid length':
min_allowed = additional['min_allowed']
return ('Invalid length for parameter %s, value: %s, '
'valid min length: %s' % (name, additional['param'],
min_allowed))
- elif error_type == 'unable to encode to json':
- return 'Invalid parameter %s must be json serializable: %s' \
- % (name, additional['type_error'])
+ elif error_type == 'unable to encode to json':
+ return 'Invalid parameter %s must be json serializable: %s' \
+ % (name, additional['type_error'])
elif error_type == 'invalid type for document':
return 'Invalid type for document parameter %s, value: %s, type: %s, ' \
'valid types: %s' % (name, additional['param'],
str(type(additional['param'])),
', '.join(additional['valid_types']))
-
- def _get_name(self, name):
- if not name:
- return 'input'
- elif name.startswith('.'):
- return name[1:]
- else:
- return name
-
- def report(self, name, reason, **kwargs):
- self._errors.append((reason, name, kwargs))
-
-
-class ParamValidator(object):
- """Validates parameters against a shape model."""
-
- def validate(self, params, shape):
- """Validate parameters against a shape model.
-
- This method will validate the parameters against a provided shape model.
- All errors will be collected before returning to the caller. This means
- that this method will not stop at the first error, it will return all
- possible errors.
-
- :param params: User provided dict of parameters
- :param shape: A shape model describing the expected input.
-
- :return: A list of errors.
-
- """
- errors = ValidationErrors()
- self._validate(params, shape, errors, name='')
- return errors
-
- def _check_special_validation_cases(self, shape):
- if is_json_value_header(shape):
- return self._validate_jsonvalue_string
+
+ def _get_name(self, name):
+ if not name:
+ return 'input'
+ elif name.startswith('.'):
+ return name[1:]
+ else:
+ return name
+
+ def report(self, name, reason, **kwargs):
+ self._errors.append((reason, name, kwargs))
+
+
+class ParamValidator(object):
+ """Validates parameters against a shape model."""
+
+ def validate(self, params, shape):
+ """Validate parameters against a shape model.
+
+ This method will validate the parameters against a provided shape model.
+ All errors will be collected before returning to the caller. This means
+ that this method will not stop at the first error, it will return all
+ possible errors.
+
+ :param params: User provided dict of parameters
+ :param shape: A shape model describing the expected input.
+
+ :return: A list of errors.
+
+ """
+ errors = ValidationErrors()
+ self._validate(params, shape, errors, name='')
+ return errors
+
+ def _check_special_validation_cases(self, shape):
+ if is_json_value_header(shape):
+ return self._validate_jsonvalue_string
if shape.type_name == 'structure' and shape.is_document_type:
return self._validate_document
-
- def _validate(self, params, shape, errors, name):
- special_validator = self._check_special_validation_cases(shape)
- if special_validator:
- special_validator(params, shape, errors, name)
- else:
- getattr(self, '_validate_%s' % shape.type_name)(
- params, shape, errors, name)
-
- def _validate_jsonvalue_string(self, params, shape, errors, name):
- # Check to see if a value marked as a jsonvalue can be dumped to
- # a json string.
- try:
- json.dumps(params)
- except (ValueError, TypeError) as e:
- errors.report(name, 'unable to encode to json', type_error=e)
-
+
+ def _validate(self, params, shape, errors, name):
+ special_validator = self._check_special_validation_cases(shape)
+ if special_validator:
+ special_validator(params, shape, errors, name)
+ else:
+ getattr(self, '_validate_%s' % shape.type_name)(
+ params, shape, errors, name)
+
+ def _validate_jsonvalue_string(self, params, shape, errors, name):
+ # Check to see if a value marked as a jsonvalue can be dumped to
+ # a json string.
+ try:
+ json.dumps(params)
+ except (ValueError, TypeError) as e:
+ errors.report(name, 'unable to encode to json', type_error=e)
+
def _validate_document(self, params, shape, errors, name):
if params is None:
return
@@ -207,115 +207,115 @@ class ParamValidator(object):
param_type=type(params),
valid_types=valid_type_names)
- @type_check(valid_types=(dict,))
- def _validate_structure(self, params, shape, errors, name):
- # Validate required fields.
- for required_member in shape.metadata.get('required', []):
- if required_member not in params:
- errors.report(name, 'missing required field',
- required_name=required_member, user_params=params)
- members = shape.members
- known_params = []
- # Validate known params.
- for param in params:
- if param not in members:
- errors.report(name, 'unknown field', unknown_param=param,
- valid_names=list(members))
- else:
- known_params.append(param)
- # Validate structure members.
- for param in known_params:
- self._validate(params[param], shape.members[param],
- errors, '%s.%s' % (name, param))
-
- @type_check(valid_types=six.string_types)
- def _validate_string(self, param, shape, errors, name):
- # Validate range. For a string, the min/max contraints
- # are of the string length.
- # Looks like:
- # "WorkflowId":{
- # "type":"string",
- # "min":1,
- # "max":256
- # }
- range_check(name, len(param), shape, 'invalid length', errors)
-
- @type_check(valid_types=(list, tuple))
- def _validate_list(self, param, shape, errors, name):
- member_shape = shape.member
- range_check(name, len(param), shape, 'invalid length', errors)
- for i, item in enumerate(param):
- self._validate(item, member_shape, errors, '%s[%s]' % (name, i))
-
- @type_check(valid_types=(dict,))
- def _validate_map(self, param, shape, errors, name):
- key_shape = shape.key
- value_shape = shape.value
- for key, value in param.items():
- self._validate(key, key_shape, errors, "%s (key: %s)"
- % (name, key))
- self._validate(value, value_shape, errors, '%s.%s' % (name, key))
-
- @type_check(valid_types=six.integer_types)
- def _validate_integer(self, param, shape, errors, name):
- range_check(name, param, shape, 'invalid range', errors)
-
- def _validate_blob(self, param, shape, errors, name):
- if isinstance(param, (bytes, bytearray, six.text_type)):
- return
- elif hasattr(param, 'read'):
- # File like objects are also allowed for blob types.
- return
- else:
- errors.report(name, 'invalid type', param=param,
- valid_types=[str(bytes), str(bytearray),
- 'file-like object'])
-
- @type_check(valid_types=(bool,))
- def _validate_boolean(self, param, shape, errors, name):
- pass
-
- @type_check(valid_types=(float, decimal.Decimal) + six.integer_types)
- def _validate_double(self, param, shape, errors, name):
- range_check(name, param, shape, 'invalid range', errors)
-
- _validate_float = _validate_double
-
- @type_check(valid_types=six.integer_types)
- def _validate_long(self, param, shape, errors, name):
- range_check(name, param, shape, 'invalid range', errors)
-
- def _validate_timestamp(self, param, shape, errors, name):
- # We don't use @type_check because datetimes are a bit
- # more flexible. You can either provide a datetime
- # object, or a string that parses to a datetime.
- is_valid_type = self._type_check_datetime(param)
- if not is_valid_type:
- valid_type_names = [six.text_type(datetime), 'timestamp-string']
- errors.report(name, 'invalid type', param=param,
- valid_types=valid_type_names)
-
- def _type_check_datetime(self, value):
- try:
- parse_to_aware_datetime(value)
- return True
- except (TypeError, ValueError, AttributeError):
- # Yes, dateutil can sometimes raise an AttributeError
- # when parsing timestamps.
- return False
-
-
-class ParamValidationDecorator(object):
- def __init__(self, param_validator, serializer):
- self._param_validator = param_validator
- self._serializer = serializer
-
- def serialize_to_request(self, parameters, operation_model):
- input_shape = operation_model.input_shape
- if input_shape is not None:
- report = self._param_validator.validate(parameters,
- operation_model.input_shape)
- if report.has_errors():
- raise ParamValidationError(report=report.generate_report())
- return self._serializer.serialize_to_request(parameters,
- operation_model)
+ @type_check(valid_types=(dict,))
+ def _validate_structure(self, params, shape, errors, name):
+ # Validate required fields.
+ for required_member in shape.metadata.get('required', []):
+ if required_member not in params:
+ errors.report(name, 'missing required field',
+ required_name=required_member, user_params=params)
+ members = shape.members
+ known_params = []
+ # Validate known params.
+ for param in params:
+ if param not in members:
+ errors.report(name, 'unknown field', unknown_param=param,
+ valid_names=list(members))
+ else:
+ known_params.append(param)
+ # Validate structure members.
+ for param in known_params:
+ self._validate(params[param], shape.members[param],
+ errors, '%s.%s' % (name, param))
+
+ @type_check(valid_types=six.string_types)
+ def _validate_string(self, param, shape, errors, name):
+ # Validate range. For a string, the min/max contraints
+ # are of the string length.
+ # Looks like:
+ # "WorkflowId":{
+ # "type":"string",
+ # "min":1,
+ # "max":256
+ # }
+ range_check(name, len(param), shape, 'invalid length', errors)
+
+ @type_check(valid_types=(list, tuple))
+ def _validate_list(self, param, shape, errors, name):
+ member_shape = shape.member
+ range_check(name, len(param), shape, 'invalid length', errors)
+ for i, item in enumerate(param):
+ self._validate(item, member_shape, errors, '%s[%s]' % (name, i))
+
+ @type_check(valid_types=(dict,))
+ def _validate_map(self, param, shape, errors, name):
+ key_shape = shape.key
+ value_shape = shape.value
+ for key, value in param.items():
+ self._validate(key, key_shape, errors, "%s (key: %s)"
+ % (name, key))
+ self._validate(value, value_shape, errors, '%s.%s' % (name, key))
+
+ @type_check(valid_types=six.integer_types)
+ def _validate_integer(self, param, shape, errors, name):
+ range_check(name, param, shape, 'invalid range', errors)
+
+ def _validate_blob(self, param, shape, errors, name):
+ if isinstance(param, (bytes, bytearray, six.text_type)):
+ return
+ elif hasattr(param, 'read'):
+ # File like objects are also allowed for blob types.
+ return
+ else:
+ errors.report(name, 'invalid type', param=param,
+ valid_types=[str(bytes), str(bytearray),
+ 'file-like object'])
+
+ @type_check(valid_types=(bool,))
+ def _validate_boolean(self, param, shape, errors, name):
+ pass
+
+ @type_check(valid_types=(float, decimal.Decimal) + six.integer_types)
+ def _validate_double(self, param, shape, errors, name):
+ range_check(name, param, shape, 'invalid range', errors)
+
+ _validate_float = _validate_double
+
+ @type_check(valid_types=six.integer_types)
+ def _validate_long(self, param, shape, errors, name):
+ range_check(name, param, shape, 'invalid range', errors)
+
+ def _validate_timestamp(self, param, shape, errors, name):
+ # We don't use @type_check because datetimes are a bit
+ # more flexible. You can either provide a datetime
+ # object, or a string that parses to a datetime.
+ is_valid_type = self._type_check_datetime(param)
+ if not is_valid_type:
+ valid_type_names = [six.text_type(datetime), 'timestamp-string']
+ errors.report(name, 'invalid type', param=param,
+ valid_types=valid_type_names)
+
+ def _type_check_datetime(self, value):
+ try:
+ parse_to_aware_datetime(value)
+ return True
+ except (TypeError, ValueError, AttributeError):
+ # Yes, dateutil can sometimes raise an AttributeError
+ # when parsing timestamps.
+ return False
+
+
+class ParamValidationDecorator(object):
+ def __init__(self, param_validator, serializer):
+ self._param_validator = param_validator
+ self._serializer = serializer
+
+ def serialize_to_request(self, parameters, operation_model):
+ input_shape = operation_model.input_shape
+ if input_shape is not None:
+ report = self._param_validator.validate(parameters,
+ operation_model.input_shape)
+ if report.has_errors():
+ raise ParamValidationError(report=report.generate_report())
+ return self._serializer.serialize_to_request(parameters,
+ operation_model)
diff --git a/contrib/python/botocore/botocore/waiter.py b/contrib/python/botocore/botocore/waiter.py
index 6f03e85165..6e8834a22d 100644
--- a/contrib/python/botocore/botocore/waiter.py
+++ b/contrib/python/botocore/botocore/waiter.py
@@ -1,80 +1,80 @@
-# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"). You
-# may not use this file except in compliance with the License. A copy of
-# the License is located at
-#
-# http://aws.amazon.com/apache2.0/
-#
-# or in the "license" file accompanying this file. This file is
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-# ANY KIND, either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-import jmespath
-import logging
-import time
-
-from botocore.utils import get_service_module_name
-from botocore.docs.docstring import WaiterDocstring
-from .exceptions import WaiterError, ClientError, WaiterConfigError
-from . import xform_name
-
-
-logger = logging.getLogger(__name__)
-
-
-def create_waiter_with_client(waiter_name, waiter_model, client):
- """
-
- :type waiter_name: str
- :param waiter_name: The name of the waiter. The name should match
- the name (including the casing) of the key name in the waiter
- model file (typically this is CamelCasing).
-
- :type waiter_model: botocore.waiter.WaiterModel
- :param waiter_model: The model for the waiter configuration.
-
- :type client: botocore.client.BaseClient
- :param client: The botocore client associated with the service.
-
- :rtype: botocore.waiter.Waiter
- :return: The waiter object.
-
- """
- single_waiter_config = waiter_model.get_waiter(waiter_name)
- operation_name = xform_name(single_waiter_config.operation)
- operation_method = NormalizedOperationMethod(
- getattr(client, operation_name))
-
- # Create a new wait method that will serve as a proxy to the underlying
- # Waiter.wait method. This is needed to attach a docstring to the
- # method.
- def wait(self, **kwargs):
- Waiter.wait(self, **kwargs)
-
- wait.__doc__ = WaiterDocstring(
- waiter_name=waiter_name,
- event_emitter=client.meta.events,
- service_model=client.meta.service_model,
- service_waiter_model=waiter_model,
- include_signature=False
- )
-
- # Rename the waiter class based on the type of waiter.
- waiter_class_name = str('%s.Waiter.%s' % (
- get_service_module_name(client.meta.service_model),
- waiter_name))
-
- # Create the new waiter class
- documented_waiter_cls = type(
- waiter_class_name, (Waiter,), {'wait': wait})
-
- # Return an instance of the new waiter class.
- return documented_waiter_cls(
- waiter_name, single_waiter_config, operation_method
- )
-
-
+# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+import jmespath
+import logging
+import time
+
+from botocore.utils import get_service_module_name
+from botocore.docs.docstring import WaiterDocstring
+from .exceptions import WaiterError, ClientError, WaiterConfigError
+from . import xform_name
+
+
+logger = logging.getLogger(__name__)
+
+
+def create_waiter_with_client(waiter_name, waiter_model, client):
+ """
+
+ :type waiter_name: str
+ :param waiter_name: The name of the waiter. The name should match
+ the name (including the casing) of the key name in the waiter
+ model file (typically this is CamelCasing).
+
+ :type waiter_model: botocore.waiter.WaiterModel
+ :param waiter_model: The model for the waiter configuration.
+
+ :type client: botocore.client.BaseClient
+ :param client: The botocore client associated with the service.
+
+ :rtype: botocore.waiter.Waiter
+ :return: The waiter object.
+
+ """
+ single_waiter_config = waiter_model.get_waiter(waiter_name)
+ operation_name = xform_name(single_waiter_config.operation)
+ operation_method = NormalizedOperationMethod(
+ getattr(client, operation_name))
+
+ # Create a new wait method that will serve as a proxy to the underlying
+ # Waiter.wait method. This is needed to attach a docstring to the
+ # method.
+ def wait(self, **kwargs):
+ Waiter.wait(self, **kwargs)
+
+ wait.__doc__ = WaiterDocstring(
+ waiter_name=waiter_name,
+ event_emitter=client.meta.events,
+ service_model=client.meta.service_model,
+ service_waiter_model=waiter_model,
+ include_signature=False
+ )
+
+ # Rename the waiter class based on the type of waiter.
+ waiter_class_name = str('%s.Waiter.%s' % (
+ get_service_module_name(client.meta.service_model),
+ waiter_name))
+
+ # Create the new waiter class
+ documented_waiter_cls = type(
+ waiter_class_name, (Waiter,), {'wait': wait})
+
+ # Return an instance of the new waiter class.
+ return documented_waiter_cls(
+ waiter_name, single_waiter_config, operation_method
+ )
+
+
def is_valid_waiter_error(response):
error = response.get('Error')
if isinstance(error, dict) and 'Code' in error:
@@ -82,94 +82,94 @@ def is_valid_waiter_error(response):
return False
-class NormalizedOperationMethod(object):
- def __init__(self, client_method):
- self._client_method = client_method
-
- def __call__(self, **kwargs):
- try:
- return self._client_method(**kwargs)
- except ClientError as e:
- return e.response
-
-
-class WaiterModel(object):
- SUPPORTED_VERSION = 2
-
- def __init__(self, waiter_config):
- """
-
- Note that the WaiterModel takes ownership of the waiter_config.
- It may or may not mutate the waiter_config. If this is a concern,
- it is best to make a copy of the waiter config before passing it to
- the WaiterModel.
-
- :type waiter_config: dict
- :param waiter_config: The loaded waiter config
- from the <service>*.waiters.json file. This can be
- obtained from a botocore Loader object as well.
-
- """
- self._waiter_config = waiter_config['waiters']
-
- # These are part of the public API. Changing these
- # will result in having to update the consuming code,
- # so don't change unless you really need to.
- version = waiter_config.get('version', 'unknown')
- self._verify_supported_version(version)
- self.version = version
- self.waiter_names = list(sorted(waiter_config['waiters'].keys()))
-
- def _verify_supported_version(self, version):
- if version != self.SUPPORTED_VERSION:
- raise WaiterConfigError(
- error_msg=("Unsupported waiter version, supported version "
- "must be: %s, but version of waiter config "
- "is: %s" % (self.SUPPORTED_VERSION,
- version)))
-
- def get_waiter(self, waiter_name):
- try:
- single_waiter_config = self._waiter_config[waiter_name]
- except KeyError:
- raise ValueError("Waiter does not exist: %s" % waiter_name)
- return SingleWaiterConfig(single_waiter_config)
-
-
-class SingleWaiterConfig(object):
- """Represents the waiter configuration for a single waiter.
-
- A single waiter is considered the configuration for a single
- value associated with a named waiter (i.e TableExists).
-
- """
- def __init__(self, single_waiter_config):
- self._config = single_waiter_config
-
- # These attributes are part of the public API.
- self.description = single_waiter_config.get('description', '')
- # Per the spec, these three fields are required.
- self.operation = single_waiter_config['operation']
- self.delay = single_waiter_config['delay']
- self.max_attempts = single_waiter_config['maxAttempts']
-
- @property
- def acceptors(self):
- acceptors = []
- for acceptor_config in self._config['acceptors']:
- acceptor = AcceptorConfig(acceptor_config)
- acceptors.append(acceptor)
- return acceptors
-
-
-class AcceptorConfig(object):
- def __init__(self, config):
- self.state = config['state']
- self.matcher = config['matcher']
- self.expected = config['expected']
- self.argument = config.get('argument')
- self.matcher_func = self._create_matcher_func()
-
+class NormalizedOperationMethod(object):
+ def __init__(self, client_method):
+ self._client_method = client_method
+
+ def __call__(self, **kwargs):
+ try:
+ return self._client_method(**kwargs)
+ except ClientError as e:
+ return e.response
+
+
+class WaiterModel(object):
+ SUPPORTED_VERSION = 2
+
+ def __init__(self, waiter_config):
+ """
+
+ Note that the WaiterModel takes ownership of the waiter_config.
+ It may or may not mutate the waiter_config. If this is a concern,
+ it is best to make a copy of the waiter config before passing it to
+ the WaiterModel.
+
+ :type waiter_config: dict
+ :param waiter_config: The loaded waiter config
+ from the <service>*.waiters.json file. This can be
+ obtained from a botocore Loader object as well.
+
+ """
+ self._waiter_config = waiter_config['waiters']
+
+ # These are part of the public API. Changing these
+ # will result in having to update the consuming code,
+ # so don't change unless you really need to.
+ version = waiter_config.get('version', 'unknown')
+ self._verify_supported_version(version)
+ self.version = version
+ self.waiter_names = list(sorted(waiter_config['waiters'].keys()))
+
+ def _verify_supported_version(self, version):
+ if version != self.SUPPORTED_VERSION:
+ raise WaiterConfigError(
+ error_msg=("Unsupported waiter version, supported version "
+ "must be: %s, but version of waiter config "
+ "is: %s" % (self.SUPPORTED_VERSION,
+ version)))
+
+ def get_waiter(self, waiter_name):
+ try:
+ single_waiter_config = self._waiter_config[waiter_name]
+ except KeyError:
+ raise ValueError("Waiter does not exist: %s" % waiter_name)
+ return SingleWaiterConfig(single_waiter_config)
+
+
+class SingleWaiterConfig(object):
+ """Represents the waiter configuration for a single waiter.
+
+ A single waiter is considered the configuration for a single
+ value associated with a named waiter (i.e TableExists).
+
+ """
+ def __init__(self, single_waiter_config):
+ self._config = single_waiter_config
+
+ # These attributes are part of the public API.
+ self.description = single_waiter_config.get('description', '')
+ # Per the spec, these three fields are required.
+ self.operation = single_waiter_config['operation']
+ self.delay = single_waiter_config['delay']
+ self.max_attempts = single_waiter_config['maxAttempts']
+
+ @property
+ def acceptors(self):
+ acceptors = []
+ for acceptor_config in self._config['acceptors']:
+ acceptor = AcceptorConfig(acceptor_config)
+ acceptors.append(acceptor)
+ return acceptors
+
+
+class AcceptorConfig(object):
+ def __init__(self, config):
+ self.state = config['state']
+ self.matcher = config['matcher']
+ self.expected = config['expected']
+ self.argument = config.get('argument')
+ self.matcher_func = self._create_matcher_func()
+
@property
def explanation(self):
if self.matcher == 'path':
@@ -185,183 +185,183 @@ class AcceptorConfig(object):
else:
return 'No explanation for unknown waiter type: "%s"' % self.matcher
- def _create_matcher_func(self):
- # An acceptor function is a callable that takes a single value. The
- # parsed AWS response. Note that the parsed error response is also
- # provided in the case of errors, so it's entirely possible to
- # handle all the available matcher capabilities in the future.
- # There's only three supported matchers, so for now, this is all
- # contained to a single method. If this grows, we can expand this
- # out to separate methods or even objects.
-
- if self.matcher == 'path':
- return self._create_path_matcher()
- elif self.matcher == 'pathAll':
- return self._create_path_all_matcher()
- elif self.matcher == 'pathAny':
- return self._create_path_any_matcher()
- elif self.matcher == 'status':
- return self._create_status_matcher()
- elif self.matcher == 'error':
- return self._create_error_matcher()
- else:
- raise WaiterConfigError(
- error_msg="Unknown acceptor: %s" % self.matcher)
-
- def _create_path_matcher(self):
- expression = jmespath.compile(self.argument)
- expected = self.expected
-
- def acceptor_matches(response):
+ def _create_matcher_func(self):
+ # An acceptor function is a callable that takes a single value. The
+ # parsed AWS response. Note that the parsed error response is also
+ # provided in the case of errors, so it's entirely possible to
+ # handle all the available matcher capabilities in the future.
+ # There's only three supported matchers, so for now, this is all
+ # contained to a single method. If this grows, we can expand this
+ # out to separate methods or even objects.
+
+ if self.matcher == 'path':
+ return self._create_path_matcher()
+ elif self.matcher == 'pathAll':
+ return self._create_path_all_matcher()
+ elif self.matcher == 'pathAny':
+ return self._create_path_any_matcher()
+ elif self.matcher == 'status':
+ return self._create_status_matcher()
+ elif self.matcher == 'error':
+ return self._create_error_matcher()
+ else:
+ raise WaiterConfigError(
+ error_msg="Unknown acceptor: %s" % self.matcher)
+
+ def _create_path_matcher(self):
+ expression = jmespath.compile(self.argument)
+ expected = self.expected
+
+ def acceptor_matches(response):
if is_valid_waiter_error(response):
- return
- return expression.search(response) == expected
- return acceptor_matches
-
- def _create_path_all_matcher(self):
- expression = jmespath.compile(self.argument)
- expected = self.expected
-
- def acceptor_matches(response):
+ return
+ return expression.search(response) == expected
+ return acceptor_matches
+
+ def _create_path_all_matcher(self):
+ expression = jmespath.compile(self.argument)
+ expected = self.expected
+
+ def acceptor_matches(response):
if is_valid_waiter_error(response):
- return
- result = expression.search(response)
- if not isinstance(result, list) or not result:
- # pathAll matcher must result in a list.
- # Also we require at least one element in the list,
- # that is, an empty list should not result in this
- # acceptor match.
- return False
- for element in result:
- if element != expected:
- return False
- return True
- return acceptor_matches
-
- def _create_path_any_matcher(self):
- expression = jmespath.compile(self.argument)
- expected = self.expected
-
- def acceptor_matches(response):
+ return
+ result = expression.search(response)
+ if not isinstance(result, list) or not result:
+ # pathAll matcher must result in a list.
+ # Also we require at least one element in the list,
+ # that is, an empty list should not result in this
+ # acceptor match.
+ return False
+ for element in result:
+ if element != expected:
+ return False
+ return True
+ return acceptor_matches
+
+ def _create_path_any_matcher(self):
+ expression = jmespath.compile(self.argument)
+ expected = self.expected
+
+ def acceptor_matches(response):
if is_valid_waiter_error(response):
- return
- result = expression.search(response)
- if not isinstance(result, list) or not result:
- # pathAny matcher must result in a list.
- # Also we require at least one element in the list,
- # that is, an empty list should not result in this
- # acceptor match.
- return False
- for element in result:
- if element == expected:
- return True
- return False
- return acceptor_matches
-
- def _create_status_matcher(self):
- expected = self.expected
-
- def acceptor_matches(response):
- # We don't have any requirements on the expected incoming data
- # other than it is a dict, so we don't assume there's
- # a ResponseMetadata.HTTPStatusCode.
- status_code = response.get('ResponseMetadata', {}).get(
- 'HTTPStatusCode')
- return status_code == expected
- return acceptor_matches
-
- def _create_error_matcher(self):
- expected = self.expected
-
- def acceptor_matches(response):
- # When the client encounters an error, it will normally raise
- # an exception. However, the waiter implementation will catch
- # this exception, and instead send us the parsed error
- # response. So response is still a dictionary, and in the case
- # of an error response will contain the "Error" and
- # "ResponseMetadata" key.
- return response.get("Error", {}).get("Code", "") == expected
- return acceptor_matches
-
-
-class Waiter(object):
- def __init__(self, name, config, operation_method):
- """
-
- :type name: string
- :param name: The name of the waiter
-
- :type config: botocore.waiter.SingleWaiterConfig
- :param config: The configuration for the waiter.
-
- :type operation_method: callable
- :param operation_method: A callable that accepts **kwargs
- and returns a response. For example, this can be
- a method from a botocore client.
-
- """
- self._operation_method = operation_method
- # The two attributes are exposed to allow for introspection
- # and documentation.
- self.name = name
- self.config = config
-
- def wait(self, **kwargs):
- acceptors = list(self.config.acceptors)
- current_state = 'waiting'
- # pop the invocation specific config
- config = kwargs.pop('WaiterConfig', {})
- sleep_amount = config.get('Delay', self.config.delay)
- max_attempts = config.get('MaxAttempts', self.config.max_attempts)
+ return
+ result = expression.search(response)
+ if not isinstance(result, list) or not result:
+ # pathAny matcher must result in a list.
+ # Also we require at least one element in the list,
+ # that is, an empty list should not result in this
+ # acceptor match.
+ return False
+ for element in result:
+ if element == expected:
+ return True
+ return False
+ return acceptor_matches
+
+ def _create_status_matcher(self):
+ expected = self.expected
+
+ def acceptor_matches(response):
+ # We don't have any requirements on the expected incoming data
+ # other than it is a dict, so we don't assume there's
+ # a ResponseMetadata.HTTPStatusCode.
+ status_code = response.get('ResponseMetadata', {}).get(
+ 'HTTPStatusCode')
+ return status_code == expected
+ return acceptor_matches
+
+ def _create_error_matcher(self):
+ expected = self.expected
+
+ def acceptor_matches(response):
+ # When the client encounters an error, it will normally raise
+ # an exception. However, the waiter implementation will catch
+ # this exception, and instead send us the parsed error
+ # response. So response is still a dictionary, and in the case
+ # of an error response will contain the "Error" and
+ # "ResponseMetadata" key.
+ return response.get("Error", {}).get("Code", "") == expected
+ return acceptor_matches
+
+
+class Waiter(object):
+ def __init__(self, name, config, operation_method):
+ """
+
+ :type name: string
+ :param name: The name of the waiter
+
+ :type config: botocore.waiter.SingleWaiterConfig
+ :param config: The configuration for the waiter.
+
+ :type operation_method: callable
+ :param operation_method: A callable that accepts **kwargs
+ and returns a response. For example, this can be
+ a method from a botocore client.
+
+ """
+ self._operation_method = operation_method
+ # The two attributes are exposed to allow for introspection
+ # and documentation.
+ self.name = name
+ self.config = config
+
+ def wait(self, **kwargs):
+ acceptors = list(self.config.acceptors)
+ current_state = 'waiting'
+ # pop the invocation specific config
+ config = kwargs.pop('WaiterConfig', {})
+ sleep_amount = config.get('Delay', self.config.delay)
+ max_attempts = config.get('MaxAttempts', self.config.max_attempts)
last_matched_acceptor = None
- num_attempts = 0
-
- while True:
- response = self._operation_method(**kwargs)
- num_attempts += 1
- for acceptor in acceptors:
- if acceptor.matcher_func(response):
+ num_attempts = 0
+
+ while True:
+ response = self._operation_method(**kwargs)
+ num_attempts += 1
+ for acceptor in acceptors:
+ if acceptor.matcher_func(response):
last_matched_acceptor = acceptor
- current_state = acceptor.state
- break
- else:
- # If none of the acceptors matched, we should
- # transition to the failure state if an error
- # response was received.
+ current_state = acceptor.state
+ break
+ else:
+ # If none of the acceptors matched, we should
+ # transition to the failure state if an error
+ # response was received.
if is_valid_waiter_error(response):
- # Transition to a failure state, which we
- # can just handle here by raising an exception.
- raise WaiterError(
- name=self.name,
+ # Transition to a failure state, which we
+ # can just handle here by raising an exception.
+ raise WaiterError(
+ name=self.name,
reason='An error occurred (%s): %s' % (
response['Error'].get('Code', 'Unknown'),
response['Error'].get('Message', 'Unknown'),
),
last_response=response,
- )
- if current_state == 'success':
- logger.debug("Waiting complete, waiter matched the "
- "success state.")
- return
- if current_state == 'failure':
+ )
+ if current_state == 'success':
+ logger.debug("Waiting complete, waiter matched the "
+ "success state.")
+ return
+ if current_state == 'failure':
reason = 'Waiter encountered a terminal failure state: %s' % (
acceptor.explanation
)
- raise WaiterError(
- name=self.name,
+ raise WaiterError(
+ name=self.name,
reason=reason,
- last_response=response,
- )
- if num_attempts >= max_attempts:
+ last_response=response,
+ )
+ if num_attempts >= max_attempts:
if last_matched_acceptor is None:
reason = 'Max attempts exceeded'
else:
reason = 'Max attempts exceeded. Previously accepted state: %s' %(
acceptor.explanation
)
- raise WaiterError(
- name=self.name,
+ raise WaiterError(
+ name=self.name,
reason=reason,
last_response=response,
- )
- time.sleep(sleep_amount)
+ )
+ time.sleep(sleep_amount)
diff --git a/contrib/python/botocore/ya.make b/contrib/python/botocore/ya.make
index 92027671ce..dc02d4a419 100644
--- a/contrib/python/botocore/ya.make
+++ b/contrib/python/botocore/ya.make
@@ -1,31 +1,31 @@
PY23_LIBRARY()
-
+
OWNER(g:python-contrib)
VERSION(1.20.112)
-
+
LICENSE(Apache-2.0)
-
-PEERDIR(
- contrib/python/dateutil
- contrib/python/jmespath
+
+PEERDIR(
+ contrib/python/dateutil
+ contrib/python/jmespath
contrib/python/six
contrib/python/urllib3
library/python/resource
-)
-
+)
+
NO_LINT()
NO_CHECK_IMPORTS(
botocore.crt.auth
)
-PY_SRCS(
- TOP_LEVEL
+PY_SRCS(
+ TOP_LEVEL
botocore/__init__.py
botocore/args.py
botocore/auth.py
- botocore/awsrequest.py
+ botocore/awsrequest.py
botocore/client.py
botocore/compat.py
botocore/config.py
@@ -43,12 +43,12 @@ PY_SRCS(
botocore/docs/client.py
botocore/docs/docstring.py
botocore/docs/example.py
- botocore/docs/method.py
+ botocore/docs/method.py
botocore/docs/paginator.py
- botocore/docs/params.py
+ botocore/docs/params.py
botocore/docs/service.py
- botocore/docs/shape.py
- botocore/docs/sharedexample.py
+ botocore/docs/shape.py
+ botocore/docs/sharedexample.py
botocore/docs/utils.py
botocore/docs/waiter.py
botocore/endpoint.py
@@ -89,8 +89,8 @@ PY_SRCS(
botocore/vendored/requests/packages/urllib3/__init__.py
botocore/vendored/requests/packages/urllib3/exceptions.py
botocore/waiter.py
-)
-
+)
+
RESOURCE_FILES(
PREFIX contrib/python/botocore/
.dist-info/METADATA
@@ -916,7 +916,7 @@ RESOURCE_FILES(
#botocore/data/xray/2016-04-12/service-2.json
)
-END()
+END()
RECURSE_FOR_TESTS(
tests
diff --git a/contrib/python/jmespath/jmespath/__init__.py b/contrib/python/jmespath/jmespath/__init__.py
index 430c031828..99482dba8e 100644
--- a/contrib/python/jmespath/jmespath/__init__.py
+++ b/contrib/python/jmespath/jmespath/__init__.py
@@ -1,11 +1,11 @@
import warnings
import sys
-from jmespath import parser
-from jmespath.visitor import Options
-
+from jmespath import parser
+from jmespath.visitor import Options
+
__version__ = '0.10.0'
-
-
+
+
if sys.version_info[:2] <= (2, 6) or ((3, 0) <= sys.version_info[:2] <= (3, 3)):
python_ver = '.'.join(str(x) for x in sys.version_info[:3])
@@ -15,9 +15,9 @@ if sys.version_info[:2] <= (2, 6) or ((3, 0) <= sys.version_info[:2] <= (3, 3)):
DeprecationWarning)
-def compile(expression):
- return parser.Parser().parse(expression)
-
-
-def search(expression, data, options=None):
- return parser.Parser().parse(expression).search(data, options=options)
+def compile(expression):
+ return parser.Parser().parse(expression)
+
+
+def search(expression, data, options=None):
+ return parser.Parser().parse(expression).search(data, options=options)
diff --git a/contrib/python/jmespath/jmespath/ast.py b/contrib/python/jmespath/jmespath/ast.py
index 3c25dc67d7..dd56c6ed6b 100644
--- a/contrib/python/jmespath/jmespath/ast.py
+++ b/contrib/python/jmespath/jmespath/ast.py
@@ -1,90 +1,90 @@
-# AST nodes have this structure:
-# {"type": <node type>", children: [], "value": ""}
-
-
-def comparator(name, first, second):
- return {'type': 'comparator', 'children': [first, second], 'value': name}
-
-
-def current_node():
- return {'type': 'current', 'children': []}
-
-
-def expref(expression):
- return {'type': 'expref', 'children': [expression]}
-
-
-def function_expression(name, args):
- return {'type': 'function_expression', 'children': args, 'value': name}
-
-
-def field(name):
- return {"type": "field", "children": [], "value": name}
-
-
-def filter_projection(left, right, comparator):
- return {'type': 'filter_projection', 'children': [left, right, comparator]}
-
-
-def flatten(node):
- return {'type': 'flatten', 'children': [node]}
-
-
-def identity():
- return {"type": "identity", 'children': []}
-
-
-def index(index):
- return {"type": "index", "value": index, "children": []}
-
-
-def index_expression(children):
- return {"type": "index_expression", 'children': children}
-
-
-def key_val_pair(key_name, node):
- return {"type": "key_val_pair", 'children': [node], "value": key_name}
-
-
-def literal(literal_value):
- return {'type': 'literal', 'value': literal_value, 'children': []}
-
-
-def multi_select_dict(nodes):
- return {"type": "multi_select_dict", "children": nodes}
-
-
-def multi_select_list(nodes):
- return {"type": "multi_select_list", "children": nodes}
-
-
-def or_expression(left, right):
- return {"type": "or_expression", "children": [left, right]}
-
-
-def and_expression(left, right):
- return {"type": "and_expression", "children": [left, right]}
-
-
-def not_expression(expr):
- return {"type": "not_expression", "children": [expr]}
-
-
-def pipe(left, right):
- return {'type': 'pipe', 'children': [left, right]}
-
-
-def projection(left, right):
- return {'type': 'projection', 'children': [left, right]}
-
-
-def subexpression(children):
- return {"type": "subexpression", 'children': children}
-
-
-def slice(start, end, step):
- return {"type": "slice", "children": [start, end, step]}
-
-
-def value_projection(left, right):
- return {'type': 'value_projection', 'children': [left, right]}
+# AST nodes have this structure:
+# {"type": <node type>", children: [], "value": ""}
+
+
+def comparator(name, first, second):
+ return {'type': 'comparator', 'children': [first, second], 'value': name}
+
+
+def current_node():
+ return {'type': 'current', 'children': []}
+
+
+def expref(expression):
+ return {'type': 'expref', 'children': [expression]}
+
+
+def function_expression(name, args):
+ return {'type': 'function_expression', 'children': args, 'value': name}
+
+
+def field(name):
+ return {"type": "field", "children": [], "value": name}
+
+
+def filter_projection(left, right, comparator):
+ return {'type': 'filter_projection', 'children': [left, right, comparator]}
+
+
+def flatten(node):
+ return {'type': 'flatten', 'children': [node]}
+
+
+def identity():
+ return {"type": "identity", 'children': []}
+
+
+def index(index):
+ return {"type": "index", "value": index, "children": []}
+
+
+def index_expression(children):
+ return {"type": "index_expression", 'children': children}
+
+
+def key_val_pair(key_name, node):
+ return {"type": "key_val_pair", 'children': [node], "value": key_name}
+
+
+def literal(literal_value):
+ return {'type': 'literal', 'value': literal_value, 'children': []}
+
+
+def multi_select_dict(nodes):
+ return {"type": "multi_select_dict", "children": nodes}
+
+
+def multi_select_list(nodes):
+ return {"type": "multi_select_list", "children": nodes}
+
+
+def or_expression(left, right):
+ return {"type": "or_expression", "children": [left, right]}
+
+
+def and_expression(left, right):
+ return {"type": "and_expression", "children": [left, right]}
+
+
+def not_expression(expr):
+ return {"type": "not_expression", "children": [expr]}
+
+
+def pipe(left, right):
+ return {'type': 'pipe', 'children': [left, right]}
+
+
+def projection(left, right):
+ return {'type': 'projection', 'children': [left, right]}
+
+
+def subexpression(children):
+ return {"type": "subexpression", 'children': children}
+
+
+def slice(start, end, step):
+ return {"type": "slice", "children": [start, end, step]}
+
+
+def value_projection(left, right):
+ return {'type': 'value_projection', 'children': [left, right]}
diff --git a/contrib/python/jmespath/jmespath/compat.py b/contrib/python/jmespath/jmespath/compat.py
index df3a0e3a87..2ed0fe7879 100644
--- a/contrib/python/jmespath/jmespath/compat.py
+++ b/contrib/python/jmespath/jmespath/compat.py
@@ -1,65 +1,65 @@
-import sys
-import inspect
-
-PY2 = sys.version_info[0] == 2
-
-
-def with_metaclass(meta, *bases):
- # Taken from flask/six.
- class metaclass(meta):
- def __new__(cls, name, this_bases, d):
- return meta(name, bases, d)
- return type.__new__(metaclass, 'temporary_class', (), {})
-
-
-if PY2:
- text_type = unicode
- string_type = basestring
- from itertools import izip_longest as zip_longest
-
- def with_str_method(cls):
- """Class decorator that handles __str__ compat between py2 and py3."""
- # In python2, the __str__ should be __unicode__
- # and __str__ should return bytes.
- cls.__unicode__ = cls.__str__
- def __str__(self):
- return self.__unicode__().encode('utf-8')
- cls.__str__ = __str__
- return cls
-
- def with_repr_method(cls):
- """Class decorator that handle __repr__ with py2 and py3."""
- # This is almost the same thing as with_str_method *except*
- # it uses the unicode_escape encoding. This also means we need to be
- # careful encoding the input multiple times, so we only encode
- # if we get a unicode type.
- original_repr_method = cls.__repr__
- def __repr__(self):
- original_repr = original_repr_method(self)
- if isinstance(original_repr, text_type):
- original_repr = original_repr.encode('unicode_escape')
- return original_repr
- cls.__repr__ = __repr__
- return cls
-
- def get_methods(cls):
- for name, method in inspect.getmembers(cls,
- predicate=inspect.ismethod):
- yield name, method
-
-else:
- text_type = str
- string_type = str
- from itertools import zip_longest
-
- def with_str_method(cls):
- # In python3, we don't need to do anything, we return a str type.
- return cls
-
- def with_repr_method(cls):
- return cls
-
- def get_methods(cls):
- for name, method in inspect.getmembers(cls,
- predicate=inspect.isfunction):
- yield name, method
+import sys
+import inspect
+
+PY2 = sys.version_info[0] == 2
+
+
+def with_metaclass(meta, *bases):
+ # Taken from flask/six.
+ class metaclass(meta):
+ def __new__(cls, name, this_bases, d):
+ return meta(name, bases, d)
+ return type.__new__(metaclass, 'temporary_class', (), {})
+
+
+if PY2:
+ text_type = unicode
+ string_type = basestring
+ from itertools import izip_longest as zip_longest
+
+ def with_str_method(cls):
+ """Class decorator that handles __str__ compat between py2 and py3."""
+ # In python2, the __str__ should be __unicode__
+ # and __str__ should return bytes.
+ cls.__unicode__ = cls.__str__
+ def __str__(self):
+ return self.__unicode__().encode('utf-8')
+ cls.__str__ = __str__
+ return cls
+
+ def with_repr_method(cls):
+ """Class decorator that handle __repr__ with py2 and py3."""
+ # This is almost the same thing as with_str_method *except*
+ # it uses the unicode_escape encoding. This also means we need to be
+ # careful encoding the input multiple times, so we only encode
+ # if we get a unicode type.
+ original_repr_method = cls.__repr__
+ def __repr__(self):
+ original_repr = original_repr_method(self)
+ if isinstance(original_repr, text_type):
+ original_repr = original_repr.encode('unicode_escape')
+ return original_repr
+ cls.__repr__ = __repr__
+ return cls
+
+ def get_methods(cls):
+ for name, method in inspect.getmembers(cls,
+ predicate=inspect.ismethod):
+ yield name, method
+
+else:
+ text_type = str
+ string_type = str
+ from itertools import zip_longest
+
+ def with_str_method(cls):
+ # In python3, we don't need to do anything, we return a str type.
+ return cls
+
+ def with_repr_method(cls):
+ return cls
+
+ def get_methods(cls):
+ for name, method in inspect.getmembers(cls,
+ predicate=inspect.isfunction):
+ yield name, method
diff --git a/contrib/python/jmespath/jmespath/exceptions.py b/contrib/python/jmespath/jmespath/exceptions.py
index 32f2eec4e4..0156015918 100644
--- a/contrib/python/jmespath/jmespath/exceptions.py
+++ b/contrib/python/jmespath/jmespath/exceptions.py
@@ -1,122 +1,122 @@
-from jmespath.compat import with_str_method
-
-
-class JMESPathError(ValueError):
- pass
-
-
-@with_str_method
-class ParseError(JMESPathError):
- _ERROR_MESSAGE = 'Invalid jmespath expression'
- def __init__(self, lex_position, token_value, token_type,
- msg=_ERROR_MESSAGE):
- super(ParseError, self).__init__(lex_position, token_value, token_type)
- self.lex_position = lex_position
- self.token_value = token_value
- self.token_type = token_type.upper()
- self.msg = msg
- # Whatever catches the ParseError can fill in the full expression
- self.expression = None
-
- def __str__(self):
- # self.lex_position +1 to account for the starting double quote char.
- underline = ' ' * (self.lex_position + 1) + '^'
- return (
- '%s: Parse error at column %s, '
- 'token "%s" (%s), for expression:\n"%s"\n%s' % (
- self.msg, self.lex_position, self.token_value, self.token_type,
- self.expression, underline))
-
-
-@with_str_method
-class IncompleteExpressionError(ParseError):
- def set_expression(self, expression):
- self.expression = expression
- self.lex_position = len(expression)
- self.token_type = None
- self.token_value = None
-
- def __str__(self):
- # self.lex_position +1 to account for the starting double quote char.
- underline = ' ' * (self.lex_position + 1) + '^'
- return (
- 'Invalid jmespath expression: Incomplete expression:\n'
- '"%s"\n%s' % (self.expression, underline))
-
-
-@with_str_method
-class LexerError(ParseError):
- def __init__(self, lexer_position, lexer_value, message, expression=None):
- self.lexer_position = lexer_position
- self.lexer_value = lexer_value
- self.message = message
- super(LexerError, self).__init__(lexer_position,
- lexer_value,
- message)
- # Whatever catches LexerError can set this.
- self.expression = expression
-
- def __str__(self):
- underline = ' ' * self.lexer_position + '^'
- return 'Bad jmespath expression: %s:\n%s\n%s' % (
- self.message, self.expression, underline)
-
-
-@with_str_method
-class ArityError(ParseError):
- def __init__(self, expected, actual, name):
- self.expected_arity = expected
- self.actual_arity = actual
- self.function_name = name
- self.expression = None
-
- def __str__(self):
- return ("Expected %s %s for function %s(), "
- "received %s" % (
- self.expected_arity,
- self._pluralize('argument', self.expected_arity),
- self.function_name,
- self.actual_arity))
-
- def _pluralize(self, word, count):
- if count == 1:
- return word
- else:
- return word + 's'
-
-
-@with_str_method
-class VariadictArityError(ArityError):
- def __str__(self):
- return ("Expected at least %s %s for function %s(), "
- "received %s" % (
- self.expected_arity,
- self._pluralize('argument', self.expected_arity),
- self.function_name,
- self.actual_arity))
-
-
-@with_str_method
-class JMESPathTypeError(JMESPathError):
- def __init__(self, function_name, current_value, actual_type,
- expected_types):
- self.function_name = function_name
- self.current_value = current_value
- self.actual_type = actual_type
- self.expected_types = expected_types
-
- def __str__(self):
- return ('In function %s(), invalid type for value: %s, '
- 'expected one of: %s, received: "%s"' % (
- self.function_name, self.current_value,
- self.expected_types, self.actual_type))
-
-
-class EmptyExpressionError(JMESPathError):
- def __init__(self):
- super(EmptyExpressionError, self).__init__(
- "Invalid JMESPath expression: cannot be empty.")
-
-
-class UnknownFunctionError(JMESPathError):
- pass
+from jmespath.compat import with_str_method
+
+
+class JMESPathError(ValueError):
+ pass
+
+
+@with_str_method
+class ParseError(JMESPathError):
+ _ERROR_MESSAGE = 'Invalid jmespath expression'
+ def __init__(self, lex_position, token_value, token_type,
+ msg=_ERROR_MESSAGE):
+ super(ParseError, self).__init__(lex_position, token_value, token_type)
+ self.lex_position = lex_position
+ self.token_value = token_value
+ self.token_type = token_type.upper()
+ self.msg = msg
+ # Whatever catches the ParseError can fill in the full expression
+ self.expression = None
+
+ def __str__(self):
+ # self.lex_position +1 to account for the starting double quote char.
+ underline = ' ' * (self.lex_position + 1) + '^'
+ return (
+ '%s: Parse error at column %s, '
+ 'token "%s" (%s), for expression:\n"%s"\n%s' % (
+ self.msg, self.lex_position, self.token_value, self.token_type,
+ self.expression, underline))
+
+
+@with_str_method
+class IncompleteExpressionError(ParseError):
+ def set_expression(self, expression):
+ self.expression = expression
+ self.lex_position = len(expression)
+ self.token_type = None
+ self.token_value = None
+
+ def __str__(self):
+ # self.lex_position +1 to account for the starting double quote char.
+ underline = ' ' * (self.lex_position + 1) + '^'
+ return (
+ 'Invalid jmespath expression: Incomplete expression:\n'
+ '"%s"\n%s' % (self.expression, underline))
+
+
+@with_str_method
+class LexerError(ParseError):
+ def __init__(self, lexer_position, lexer_value, message, expression=None):
+ self.lexer_position = lexer_position
+ self.lexer_value = lexer_value
+ self.message = message
+ super(LexerError, self).__init__(lexer_position,
+ lexer_value,
+ message)
+ # Whatever catches LexerError can set this.
+ self.expression = expression
+
+ def __str__(self):
+ underline = ' ' * self.lexer_position + '^'
+ return 'Bad jmespath expression: %s:\n%s\n%s' % (
+ self.message, self.expression, underline)
+
+
+@with_str_method
+class ArityError(ParseError):
+ def __init__(self, expected, actual, name):
+ self.expected_arity = expected
+ self.actual_arity = actual
+ self.function_name = name
+ self.expression = None
+
+ def __str__(self):
+ return ("Expected %s %s for function %s(), "
+ "received %s" % (
+ self.expected_arity,
+ self._pluralize('argument', self.expected_arity),
+ self.function_name,
+ self.actual_arity))
+
+ def _pluralize(self, word, count):
+ if count == 1:
+ return word
+ else:
+ return word + 's'
+
+
+@with_str_method
+class VariadictArityError(ArityError):
+ def __str__(self):
+ return ("Expected at least %s %s for function %s(), "
+ "received %s" % (
+ self.expected_arity,
+ self._pluralize('argument', self.expected_arity),
+ self.function_name,
+ self.actual_arity))
+
+
+@with_str_method
+class JMESPathTypeError(JMESPathError):
+ def __init__(self, function_name, current_value, actual_type,
+ expected_types):
+ self.function_name = function_name
+ self.current_value = current_value
+ self.actual_type = actual_type
+ self.expected_types = expected_types
+
+ def __str__(self):
+ return ('In function %s(), invalid type for value: %s, '
+ 'expected one of: %s, received: "%s"' % (
+ self.function_name, self.current_value,
+ self.expected_types, self.actual_type))
+
+
+class EmptyExpressionError(JMESPathError):
+ def __init__(self):
+ super(EmptyExpressionError, self).__init__(
+ "Invalid JMESPath expression: cannot be empty.")
+
+
+class UnknownFunctionError(JMESPathError):
+ pass
diff --git a/contrib/python/jmespath/jmespath/functions.py b/contrib/python/jmespath/jmespath/functions.py
index 2ab913387b..31dab05169 100644
--- a/contrib/python/jmespath/jmespath/functions.py
+++ b/contrib/python/jmespath/jmespath/functions.py
@@ -1,362 +1,362 @@
-import math
-import json
-
-from jmespath import exceptions
-from jmespath.compat import string_type as STRING_TYPE
-from jmespath.compat import get_methods, with_metaclass
-
-
-# python types -> jmespath types
-TYPES_MAP = {
- 'bool': 'boolean',
- 'list': 'array',
- 'dict': 'object',
- 'NoneType': 'null',
- 'unicode': 'string',
- 'str': 'string',
- 'float': 'number',
- 'int': 'number',
- 'long': 'number',
- 'OrderedDict': 'object',
- '_Projection': 'array',
- '_Expression': 'expref',
-}
-
-
-# jmespath types -> python types
-REVERSE_TYPES_MAP = {
- 'boolean': ('bool',),
- 'array': ('list', '_Projection'),
- 'object': ('dict', 'OrderedDict',),
- 'null': ('NoneType',),
- 'string': ('unicode', 'str'),
- 'number': ('float', 'int', 'long'),
- 'expref': ('_Expression',),
-}
-
-
-def signature(*arguments):
- def _record_signature(func):
- func.signature = arguments
- return func
- return _record_signature
-
-
-class FunctionRegistry(type):
- def __init__(cls, name, bases, attrs):
- cls._populate_function_table()
- super(FunctionRegistry, cls).__init__(name, bases, attrs)
-
- def _populate_function_table(cls):
- function_table = {}
- # Any method with a @signature decorator that also
- # starts with "_func_" is registered as a function.
- # _func_max_by -> max_by function.
- for name, method in get_methods(cls):
- if not name.startswith('_func_'):
- continue
- signature = getattr(method, 'signature', None)
- if signature is not None:
- function_table[name[6:]] = {
- 'function': method,
- 'signature': signature,
- }
- cls.FUNCTION_TABLE = function_table
-
-
-class Functions(with_metaclass(FunctionRegistry, object)):
-
- FUNCTION_TABLE = {
- }
-
- def call_function(self, function_name, resolved_args):
- try:
- spec = self.FUNCTION_TABLE[function_name]
- except KeyError:
- raise exceptions.UnknownFunctionError(
- "Unknown function: %s()" % function_name)
- function = spec['function']
- signature = spec['signature']
- self._validate_arguments(resolved_args, signature, function_name)
- return function(self, *resolved_args)
-
- def _validate_arguments(self, args, signature, function_name):
- if signature and signature[-1].get('variadic'):
- if len(args) < len(signature):
- raise exceptions.VariadictArityError(
- len(signature), len(args), function_name)
- elif len(args) != len(signature):
- raise exceptions.ArityError(
- len(signature), len(args), function_name)
- return self._type_check(args, signature, function_name)
-
- def _type_check(self, actual, signature, function_name):
- for i in range(len(signature)):
- allowed_types = signature[i]['types']
- if allowed_types:
- self._type_check_single(actual[i], allowed_types,
- function_name)
-
- def _type_check_single(self, current, types, function_name):
- # Type checking involves checking the top level type,
- # and in the case of arrays, potentially checking the types
- # of each element.
- allowed_types, allowed_subtypes = self._get_allowed_pytypes(types)
- # We're not using isinstance() on purpose.
- # The type model for jmespath does not map
- # 1-1 with python types (booleans are considered
- # integers in python for example).
- actual_typename = type(current).__name__
- if actual_typename not in allowed_types:
- raise exceptions.JMESPathTypeError(
- function_name, current,
- self._convert_to_jmespath_type(actual_typename), types)
- # If we're dealing with a list type, we can have
- # additional restrictions on the type of the list
- # elements (for example a function can require a
- # list of numbers or a list of strings).
- # Arrays are the only types that can have subtypes.
- if allowed_subtypes:
- self._subtype_check(current, allowed_subtypes,
- types, function_name)
-
- def _get_allowed_pytypes(self, types):
- allowed_types = []
- allowed_subtypes = []
- for t in types:
- type_ = t.split('-', 1)
- if len(type_) == 2:
- type_, subtype = type_
- allowed_subtypes.append(REVERSE_TYPES_MAP[subtype])
- else:
- type_ = type_[0]
- allowed_types.extend(REVERSE_TYPES_MAP[type_])
- return allowed_types, allowed_subtypes
-
- def _subtype_check(self, current, allowed_subtypes, types, function_name):
- if len(allowed_subtypes) == 1:
- # The easy case, we know up front what type
- # we need to validate.
- allowed_subtypes = allowed_subtypes[0]
- for element in current:
- actual_typename = type(element).__name__
- if actual_typename not in allowed_subtypes:
- raise exceptions.JMESPathTypeError(
- function_name, element, actual_typename, types)
- elif len(allowed_subtypes) > 1 and current:
- # Dynamic type validation. Based on the first
- # type we see, we validate that the remaining types
- # match.
- first = type(current[0]).__name__
- for subtypes in allowed_subtypes:
- if first in subtypes:
- allowed = subtypes
- break
- else:
- raise exceptions.JMESPathTypeError(
- function_name, current[0], first, types)
- for element in current:
- actual_typename = type(element).__name__
- if actual_typename not in allowed:
- raise exceptions.JMESPathTypeError(
- function_name, element, actual_typename, types)
-
- @signature({'types': ['number']})
- def _func_abs(self, arg):
- return abs(arg)
-
- @signature({'types': ['array-number']})
- def _func_avg(self, arg):
- if arg:
- return sum(arg) / float(len(arg))
- else:
- return None
-
- @signature({'types': [], 'variadic': True})
- def _func_not_null(self, *arguments):
- for argument in arguments:
- if argument is not None:
- return argument
-
- @signature({'types': []})
- def _func_to_array(self, arg):
- if isinstance(arg, list):
- return arg
- else:
- return [arg]
-
- @signature({'types': []})
- def _func_to_string(self, arg):
- if isinstance(arg, STRING_TYPE):
- return arg
- else:
- return json.dumps(arg, separators=(',', ':'),
- default=str)
-
- @signature({'types': []})
- def _func_to_number(self, arg):
- if isinstance(arg, (list, dict, bool)):
- return None
- elif arg is None:
- return None
- elif isinstance(arg, (int, float)):
- return arg
- else:
- try:
- return int(arg)
- except ValueError:
- try:
- return float(arg)
- except ValueError:
- return None
-
- @signature({'types': ['array', 'string']}, {'types': []})
- def _func_contains(self, subject, search):
- return search in subject
-
- @signature({'types': ['string', 'array', 'object']})
- def _func_length(self, arg):
- return len(arg)
-
- @signature({'types': ['string']}, {'types': ['string']})
- def _func_ends_with(self, search, suffix):
- return search.endswith(suffix)
-
- @signature({'types': ['string']}, {'types': ['string']})
- def _func_starts_with(self, search, suffix):
- return search.startswith(suffix)
-
- @signature({'types': ['array', 'string']})
- def _func_reverse(self, arg):
- if isinstance(arg, STRING_TYPE):
- return arg[::-1]
- else:
- return list(reversed(arg))
-
- @signature({"types": ['number']})
- def _func_ceil(self, arg):
- return math.ceil(arg)
-
- @signature({"types": ['number']})
- def _func_floor(self, arg):
- return math.floor(arg)
-
- @signature({"types": ['string']}, {"types": ['array-string']})
- def _func_join(self, separator, array):
- return separator.join(array)
-
- @signature({'types': ['expref']}, {'types': ['array']})
- def _func_map(self, expref, arg):
- result = []
- for element in arg:
- result.append(expref.visit(expref.expression, element))
- return result
-
- @signature({"types": ['array-number', 'array-string']})
- def _func_max(self, arg):
- if arg:
- return max(arg)
- else:
- return None
-
- @signature({"types": ["object"], "variadic": True})
- def _func_merge(self, *arguments):
- merged = {}
- for arg in arguments:
- merged.update(arg)
- return merged
-
- @signature({"types": ['array-number', 'array-string']})
- def _func_min(self, arg):
- if arg:
- return min(arg)
- else:
- return None
-
- @signature({"types": ['array-string', 'array-number']})
- def _func_sort(self, arg):
- return list(sorted(arg))
-
- @signature({"types": ['array-number']})
- def _func_sum(self, arg):
- return sum(arg)
-
- @signature({"types": ['object']})
- def _func_keys(self, arg):
- # To be consistent with .values()
- # should we also return the indices of a list?
- return list(arg.keys())
-
- @signature({"types": ['object']})
- def _func_values(self, arg):
- return list(arg.values())
-
- @signature({'types': []})
- def _func_type(self, arg):
- if isinstance(arg, STRING_TYPE):
- return "string"
- elif isinstance(arg, bool):
- return "boolean"
- elif isinstance(arg, list):
- return "array"
- elif isinstance(arg, dict):
- return "object"
- elif isinstance(arg, (float, int)):
- return "number"
- elif arg is None:
- return "null"
-
- @signature({'types': ['array']}, {'types': ['expref']})
- def _func_sort_by(self, array, expref):
- if not array:
- return array
- # sort_by allows for the expref to be either a number of
- # a string, so we have some special logic to handle this.
- # We evaluate the first array element and verify that it's
- # either a string of a number. We then create a key function
- # that validates that type, which requires that remaining array
- # elements resolve to the same type as the first element.
- required_type = self._convert_to_jmespath_type(
- type(expref.visit(expref.expression, array[0])).__name__)
- if required_type not in ['number', 'string']:
- raise exceptions.JMESPathTypeError(
- 'sort_by', array[0], required_type, ['string', 'number'])
- keyfunc = self._create_key_func(expref,
- [required_type],
- 'sort_by')
- return list(sorted(array, key=keyfunc))
-
- @signature({'types': ['array']}, {'types': ['expref']})
- def _func_min_by(self, array, expref):
- keyfunc = self._create_key_func(expref,
- ['number', 'string'],
- 'min_by')
- if array:
- return min(array, key=keyfunc)
- else:
- return None
-
- @signature({'types': ['array']}, {'types': ['expref']})
- def _func_max_by(self, array, expref):
- keyfunc = self._create_key_func(expref,
- ['number', 'string'],
- 'max_by')
- if array:
- return max(array, key=keyfunc)
- else:
- return None
-
- def _create_key_func(self, expref, allowed_types, function_name):
- def keyfunc(x):
- result = expref.visit(expref.expression, x)
- actual_typename = type(result).__name__
- jmespath_type = self._convert_to_jmespath_type(actual_typename)
- # allowed_types is in term of jmespath types, not python types.
- if jmespath_type not in allowed_types:
- raise exceptions.JMESPathTypeError(
- function_name, result, jmespath_type, allowed_types)
- return result
- return keyfunc
-
- def _convert_to_jmespath_type(self, pyobject):
- return TYPES_MAP.get(pyobject, 'unknown')
+import math
+import json
+
+from jmespath import exceptions
+from jmespath.compat import string_type as STRING_TYPE
+from jmespath.compat import get_methods, with_metaclass
+
+
+# python types -> jmespath types
+TYPES_MAP = {
+ 'bool': 'boolean',
+ 'list': 'array',
+ 'dict': 'object',
+ 'NoneType': 'null',
+ 'unicode': 'string',
+ 'str': 'string',
+ 'float': 'number',
+ 'int': 'number',
+ 'long': 'number',
+ 'OrderedDict': 'object',
+ '_Projection': 'array',
+ '_Expression': 'expref',
+}
+
+
+# jmespath types -> python types
+REVERSE_TYPES_MAP = {
+ 'boolean': ('bool',),
+ 'array': ('list', '_Projection'),
+ 'object': ('dict', 'OrderedDict',),
+ 'null': ('NoneType',),
+ 'string': ('unicode', 'str'),
+ 'number': ('float', 'int', 'long'),
+ 'expref': ('_Expression',),
+}
+
+
+def signature(*arguments):
+ def _record_signature(func):
+ func.signature = arguments
+ return func
+ return _record_signature
+
+
+class FunctionRegistry(type):
+ def __init__(cls, name, bases, attrs):
+ cls._populate_function_table()
+ super(FunctionRegistry, cls).__init__(name, bases, attrs)
+
+ def _populate_function_table(cls):
+ function_table = {}
+ # Any method with a @signature decorator that also
+ # starts with "_func_" is registered as a function.
+ # _func_max_by -> max_by function.
+ for name, method in get_methods(cls):
+ if not name.startswith('_func_'):
+ continue
+ signature = getattr(method, 'signature', None)
+ if signature is not None:
+ function_table[name[6:]] = {
+ 'function': method,
+ 'signature': signature,
+ }
+ cls.FUNCTION_TABLE = function_table
+
+
+class Functions(with_metaclass(FunctionRegistry, object)):
+
+ FUNCTION_TABLE = {
+ }
+
+ def call_function(self, function_name, resolved_args):
+ try:
+ spec = self.FUNCTION_TABLE[function_name]
+ except KeyError:
+ raise exceptions.UnknownFunctionError(
+ "Unknown function: %s()" % function_name)
+ function = spec['function']
+ signature = spec['signature']
+ self._validate_arguments(resolved_args, signature, function_name)
+ return function(self, *resolved_args)
+
+ def _validate_arguments(self, args, signature, function_name):
+ if signature and signature[-1].get('variadic'):
+ if len(args) < len(signature):
+ raise exceptions.VariadictArityError(
+ len(signature), len(args), function_name)
+ elif len(args) != len(signature):
+ raise exceptions.ArityError(
+ len(signature), len(args), function_name)
+ return self._type_check(args, signature, function_name)
+
+ def _type_check(self, actual, signature, function_name):
+ for i in range(len(signature)):
+ allowed_types = signature[i]['types']
+ if allowed_types:
+ self._type_check_single(actual[i], allowed_types,
+ function_name)
+
+ def _type_check_single(self, current, types, function_name):
+ # Type checking involves checking the top level type,
+ # and in the case of arrays, potentially checking the types
+ # of each element.
+ allowed_types, allowed_subtypes = self._get_allowed_pytypes(types)
+ # We're not using isinstance() on purpose.
+ # The type model for jmespath does not map
+ # 1-1 with python types (booleans are considered
+ # integers in python for example).
+ actual_typename = type(current).__name__
+ if actual_typename not in allowed_types:
+ raise exceptions.JMESPathTypeError(
+ function_name, current,
+ self._convert_to_jmespath_type(actual_typename), types)
+ # If we're dealing with a list type, we can have
+ # additional restrictions on the type of the list
+ # elements (for example a function can require a
+ # list of numbers or a list of strings).
+ # Arrays are the only types that can have subtypes.
+ if allowed_subtypes:
+ self._subtype_check(current, allowed_subtypes,
+ types, function_name)
+
+ def _get_allowed_pytypes(self, types):
+ allowed_types = []
+ allowed_subtypes = []
+ for t in types:
+ type_ = t.split('-', 1)
+ if len(type_) == 2:
+ type_, subtype = type_
+ allowed_subtypes.append(REVERSE_TYPES_MAP[subtype])
+ else:
+ type_ = type_[0]
+ allowed_types.extend(REVERSE_TYPES_MAP[type_])
+ return allowed_types, allowed_subtypes
+
+ def _subtype_check(self, current, allowed_subtypes, types, function_name):
+ if len(allowed_subtypes) == 1:
+ # The easy case, we know up front what type
+ # we need to validate.
+ allowed_subtypes = allowed_subtypes[0]
+ for element in current:
+ actual_typename = type(element).__name__
+ if actual_typename not in allowed_subtypes:
+ raise exceptions.JMESPathTypeError(
+ function_name, element, actual_typename, types)
+ elif len(allowed_subtypes) > 1 and current:
+ # Dynamic type validation. Based on the first
+ # type we see, we validate that the remaining types
+ # match.
+ first = type(current[0]).__name__
+ for subtypes in allowed_subtypes:
+ if first in subtypes:
+ allowed = subtypes
+ break
+ else:
+ raise exceptions.JMESPathTypeError(
+ function_name, current[0], first, types)
+ for element in current:
+ actual_typename = type(element).__name__
+ if actual_typename not in allowed:
+ raise exceptions.JMESPathTypeError(
+ function_name, element, actual_typename, types)
+
+ @signature({'types': ['number']})
+ def _func_abs(self, arg):
+ return abs(arg)
+
+ @signature({'types': ['array-number']})
+ def _func_avg(self, arg):
+ if arg:
+ return sum(arg) / float(len(arg))
+ else:
+ return None
+
+ @signature({'types': [], 'variadic': True})
+ def _func_not_null(self, *arguments):
+ for argument in arguments:
+ if argument is not None:
+ return argument
+
+ @signature({'types': []})
+ def _func_to_array(self, arg):
+ if isinstance(arg, list):
+ return arg
+ else:
+ return [arg]
+
+ @signature({'types': []})
+ def _func_to_string(self, arg):
+ if isinstance(arg, STRING_TYPE):
+ return arg
+ else:
+ return json.dumps(arg, separators=(',', ':'),
+ default=str)
+
+ @signature({'types': []})
+ def _func_to_number(self, arg):
+ if isinstance(arg, (list, dict, bool)):
+ return None
+ elif arg is None:
+ return None
+ elif isinstance(arg, (int, float)):
+ return arg
+ else:
+ try:
+ return int(arg)
+ except ValueError:
+ try:
+ return float(arg)
+ except ValueError:
+ return None
+
+ @signature({'types': ['array', 'string']}, {'types': []})
+ def _func_contains(self, subject, search):
+ return search in subject
+
+ @signature({'types': ['string', 'array', 'object']})
+ def _func_length(self, arg):
+ return len(arg)
+
+ @signature({'types': ['string']}, {'types': ['string']})
+ def _func_ends_with(self, search, suffix):
+ return search.endswith(suffix)
+
+ @signature({'types': ['string']}, {'types': ['string']})
+ def _func_starts_with(self, search, suffix):
+ return search.startswith(suffix)
+
+ @signature({'types': ['array', 'string']})
+ def _func_reverse(self, arg):
+ if isinstance(arg, STRING_TYPE):
+ return arg[::-1]
+ else:
+ return list(reversed(arg))
+
+ @signature({"types": ['number']})
+ def _func_ceil(self, arg):
+ return math.ceil(arg)
+
+ @signature({"types": ['number']})
+ def _func_floor(self, arg):
+ return math.floor(arg)
+
+ @signature({"types": ['string']}, {"types": ['array-string']})
+ def _func_join(self, separator, array):
+ return separator.join(array)
+
+ @signature({'types': ['expref']}, {'types': ['array']})
+ def _func_map(self, expref, arg):
+ result = []
+ for element in arg:
+ result.append(expref.visit(expref.expression, element))
+ return result
+
+ @signature({"types": ['array-number', 'array-string']})
+ def _func_max(self, arg):
+ if arg:
+ return max(arg)
+ else:
+ return None
+
+ @signature({"types": ["object"], "variadic": True})
+ def _func_merge(self, *arguments):
+ merged = {}
+ for arg in arguments:
+ merged.update(arg)
+ return merged
+
+ @signature({"types": ['array-number', 'array-string']})
+ def _func_min(self, arg):
+ if arg:
+ return min(arg)
+ else:
+ return None
+
+ @signature({"types": ['array-string', 'array-number']})
+ def _func_sort(self, arg):
+ return list(sorted(arg))
+
+ @signature({"types": ['array-number']})
+ def _func_sum(self, arg):
+ return sum(arg)
+
+ @signature({"types": ['object']})
+ def _func_keys(self, arg):
+ # To be consistent with .values()
+ # should we also return the indices of a list?
+ return list(arg.keys())
+
+ @signature({"types": ['object']})
+ def _func_values(self, arg):
+ return list(arg.values())
+
+ @signature({'types': []})
+ def _func_type(self, arg):
+ if isinstance(arg, STRING_TYPE):
+ return "string"
+ elif isinstance(arg, bool):
+ return "boolean"
+ elif isinstance(arg, list):
+ return "array"
+ elif isinstance(arg, dict):
+ return "object"
+ elif isinstance(arg, (float, int)):
+ return "number"
+ elif arg is None:
+ return "null"
+
+ @signature({'types': ['array']}, {'types': ['expref']})
+ def _func_sort_by(self, array, expref):
+ if not array:
+ return array
+ # sort_by allows for the expref to be either a number of
+ # a string, so we have some special logic to handle this.
+ # We evaluate the first array element and verify that it's
+ # either a string of a number. We then create a key function
+ # that validates that type, which requires that remaining array
+ # elements resolve to the same type as the first element.
+ required_type = self._convert_to_jmespath_type(
+ type(expref.visit(expref.expression, array[0])).__name__)
+ if required_type not in ['number', 'string']:
+ raise exceptions.JMESPathTypeError(
+ 'sort_by', array[0], required_type, ['string', 'number'])
+ keyfunc = self._create_key_func(expref,
+ [required_type],
+ 'sort_by')
+ return list(sorted(array, key=keyfunc))
+
+ @signature({'types': ['array']}, {'types': ['expref']})
+ def _func_min_by(self, array, expref):
+ keyfunc = self._create_key_func(expref,
+ ['number', 'string'],
+ 'min_by')
+ if array:
+ return min(array, key=keyfunc)
+ else:
+ return None
+
+ @signature({'types': ['array']}, {'types': ['expref']})
+ def _func_max_by(self, array, expref):
+ keyfunc = self._create_key_func(expref,
+ ['number', 'string'],
+ 'max_by')
+ if array:
+ return max(array, key=keyfunc)
+ else:
+ return None
+
+ def _create_key_func(self, expref, allowed_types, function_name):
+ def keyfunc(x):
+ result = expref.visit(expref.expression, x)
+ actual_typename = type(result).__name__
+ jmespath_type = self._convert_to_jmespath_type(actual_typename)
+ # allowed_types is in term of jmespath types, not python types.
+ if jmespath_type not in allowed_types:
+ raise exceptions.JMESPathTypeError(
+ function_name, result, jmespath_type, allowed_types)
+ return result
+ return keyfunc
+
+ def _convert_to_jmespath_type(self, pyobject):
+ return TYPES_MAP.get(pyobject, 'unknown')
diff --git a/contrib/python/jmespath/jmespath/lexer.py b/contrib/python/jmespath/jmespath/lexer.py
index 6062946889..8db05e3760 100644
--- a/contrib/python/jmespath/jmespath/lexer.py
+++ b/contrib/python/jmespath/jmespath/lexer.py
@@ -1,208 +1,208 @@
-import string
-import warnings
-from json import loads
-
-from jmespath.exceptions import LexerError, EmptyExpressionError
-
-
-class Lexer(object):
- START_IDENTIFIER = set(string.ascii_letters + '_')
- VALID_IDENTIFIER = set(string.ascii_letters + string.digits + '_')
- VALID_NUMBER = set(string.digits)
- WHITESPACE = set(" \t\n\r")
- SIMPLE_TOKENS = {
- '.': 'dot',
- '*': 'star',
- ']': 'rbracket',
- ',': 'comma',
- ':': 'colon',
- '@': 'current',
- '(': 'lparen',
- ')': 'rparen',
- '{': 'lbrace',
- '}': 'rbrace',
- }
-
- def tokenize(self, expression):
- self._initialize_for_expression(expression)
- while self._current is not None:
- if self._current in self.SIMPLE_TOKENS:
- yield {'type': self.SIMPLE_TOKENS[self._current],
- 'value': self._current,
- 'start': self._position, 'end': self._position + 1}
- self._next()
- elif self._current in self.START_IDENTIFIER:
- start = self._position
- buff = self._current
- while self._next() in self.VALID_IDENTIFIER:
- buff += self._current
- yield {'type': 'unquoted_identifier', 'value': buff,
- 'start': start, 'end': start + len(buff)}
- elif self._current in self.WHITESPACE:
- self._next()
- elif self._current == '[':
- start = self._position
- next_char = self._next()
- if next_char == ']':
- self._next()
- yield {'type': 'flatten', 'value': '[]',
- 'start': start, 'end': start + 2}
- elif next_char == '?':
- self._next()
- yield {'type': 'filter', 'value': '[?',
- 'start': start, 'end': start + 2}
- else:
- yield {'type': 'lbracket', 'value': '[',
- 'start': start, 'end': start + 1}
- elif self._current == "'":
- yield self._consume_raw_string_literal()
- elif self._current == '|':
- yield self._match_or_else('|', 'or', 'pipe')
- elif self._current == '&':
- yield self._match_or_else('&', 'and', 'expref')
- elif self._current == '`':
- yield self._consume_literal()
- elif self._current in self.VALID_NUMBER:
- start = self._position
- buff = self._consume_number()
- yield {'type': 'number', 'value': int(buff),
- 'start': start, 'end': start + len(buff)}
- elif self._current == '-':
- # Negative number.
- start = self._position
- buff = self._consume_number()
- if len(buff) > 1:
- yield {'type': 'number', 'value': int(buff),
- 'start': start, 'end': start + len(buff)}
- else:
- raise LexerError(lexer_position=start,
- lexer_value=buff,
- message="Unknown token '%s'" % buff)
- elif self._current == '"':
- yield self._consume_quoted_identifier()
- elif self._current == '<':
- yield self._match_or_else('=', 'lte', 'lt')
- elif self._current == '>':
- yield self._match_or_else('=', 'gte', 'gt')
- elif self._current == '!':
- yield self._match_or_else('=', 'ne', 'not')
- elif self._current == '=':
- if self._next() == '=':
- yield {'type': 'eq', 'value': '==',
- 'start': self._position - 1, 'end': self._position}
- self._next()
- else:
- if self._current is None:
- # If we're at the EOF, we never advanced
- # the position so we don't need to rewind
- # it back one location.
- position = self._position
- else:
- position = self._position - 1
- raise LexerError(
- lexer_position=position,
- lexer_value='=',
- message="Unknown token '='")
- else:
- raise LexerError(lexer_position=self._position,
- lexer_value=self._current,
- message="Unknown token %s" % self._current)
- yield {'type': 'eof', 'value': '',
- 'start': self._length, 'end': self._length}
-
- def _consume_number(self):
- start = self._position
- buff = self._current
- while self._next() in self.VALID_NUMBER:
- buff += self._current
- return buff
-
- def _initialize_for_expression(self, expression):
- if not expression:
- raise EmptyExpressionError()
- self._position = 0
- self._expression = expression
- self._chars = list(self._expression)
- self._current = self._chars[self._position]
- self._length = len(self._expression)
-
- def _next(self):
- if self._position == self._length - 1:
- self._current = None
- else:
- self._position += 1
- self._current = self._chars[self._position]
- return self._current
-
- def _consume_until(self, delimiter):
- # Consume until the delimiter is reached,
- # allowing for the delimiter to be escaped with "\".
- start = self._position
- buff = ''
- self._next()
- while self._current != delimiter:
- if self._current == '\\':
- buff += '\\'
- self._next()
- if self._current is None:
- # We're at the EOF.
- raise LexerError(lexer_position=start,
- lexer_value=self._expression[start:],
- message="Unclosed %s delimiter" % delimiter)
- buff += self._current
- self._next()
- # Skip the closing delimiter.
- self._next()
- return buff
-
- def _consume_literal(self):
- start = self._position
- lexeme = self._consume_until('`').replace('\\`', '`')
- try:
- # Assume it is valid JSON and attempt to parse.
- parsed_json = loads(lexeme)
- except ValueError:
- try:
- # Invalid JSON values should be converted to quoted
- # JSON strings during the JEP-12 deprecation period.
- parsed_json = loads('"%s"' % lexeme.lstrip())
- warnings.warn("deprecated string literal syntax",
- PendingDeprecationWarning)
- except ValueError:
- raise LexerError(lexer_position=start,
- lexer_value=self._expression[start:],
- message="Bad token %s" % lexeme)
- token_len = self._position - start
- return {'type': 'literal', 'value': parsed_json,
- 'start': start, 'end': token_len}
-
- def _consume_quoted_identifier(self):
- start = self._position
- lexeme = '"' + self._consume_until('"') + '"'
- try:
- token_len = self._position - start
- return {'type': 'quoted_identifier', 'value': loads(lexeme),
- 'start': start, 'end': token_len}
- except ValueError as e:
- error_message = str(e).split(':')[0]
- raise LexerError(lexer_position=start,
- lexer_value=lexeme,
- message=error_message)
-
- def _consume_raw_string_literal(self):
- start = self._position
- lexeme = self._consume_until("'").replace("\\'", "'")
- token_len = self._position - start
- return {'type': 'literal', 'value': lexeme,
- 'start': start, 'end': token_len}
-
- def _match_or_else(self, expected, match_type, else_type):
- start = self._position
- current = self._current
- next_char = self._next()
- if next_char == expected:
- self._next()
- return {'type': match_type, 'value': current + next_char,
- 'start': start, 'end': start + 1}
- return {'type': else_type, 'value': current,
- 'start': start, 'end': start}
+import string
+import warnings
+from json import loads
+
+from jmespath.exceptions import LexerError, EmptyExpressionError
+
+
+class Lexer(object):
+ START_IDENTIFIER = set(string.ascii_letters + '_')
+ VALID_IDENTIFIER = set(string.ascii_letters + string.digits + '_')
+ VALID_NUMBER = set(string.digits)
+ WHITESPACE = set(" \t\n\r")
+ SIMPLE_TOKENS = {
+ '.': 'dot',
+ '*': 'star',
+ ']': 'rbracket',
+ ',': 'comma',
+ ':': 'colon',
+ '@': 'current',
+ '(': 'lparen',
+ ')': 'rparen',
+ '{': 'lbrace',
+ '}': 'rbrace',
+ }
+
+ def tokenize(self, expression):
+ self._initialize_for_expression(expression)
+ while self._current is not None:
+ if self._current in self.SIMPLE_TOKENS:
+ yield {'type': self.SIMPLE_TOKENS[self._current],
+ 'value': self._current,
+ 'start': self._position, 'end': self._position + 1}
+ self._next()
+ elif self._current in self.START_IDENTIFIER:
+ start = self._position
+ buff = self._current
+ while self._next() in self.VALID_IDENTIFIER:
+ buff += self._current
+ yield {'type': 'unquoted_identifier', 'value': buff,
+ 'start': start, 'end': start + len(buff)}
+ elif self._current in self.WHITESPACE:
+ self._next()
+ elif self._current == '[':
+ start = self._position
+ next_char = self._next()
+ if next_char == ']':
+ self._next()
+ yield {'type': 'flatten', 'value': '[]',
+ 'start': start, 'end': start + 2}
+ elif next_char == '?':
+ self._next()
+ yield {'type': 'filter', 'value': '[?',
+ 'start': start, 'end': start + 2}
+ else:
+ yield {'type': 'lbracket', 'value': '[',
+ 'start': start, 'end': start + 1}
+ elif self._current == "'":
+ yield self._consume_raw_string_literal()
+ elif self._current == '|':
+ yield self._match_or_else('|', 'or', 'pipe')
+ elif self._current == '&':
+ yield self._match_or_else('&', 'and', 'expref')
+ elif self._current == '`':
+ yield self._consume_literal()
+ elif self._current in self.VALID_NUMBER:
+ start = self._position
+ buff = self._consume_number()
+ yield {'type': 'number', 'value': int(buff),
+ 'start': start, 'end': start + len(buff)}
+ elif self._current == '-':
+ # Negative number.
+ start = self._position
+ buff = self._consume_number()
+ if len(buff) > 1:
+ yield {'type': 'number', 'value': int(buff),
+ 'start': start, 'end': start + len(buff)}
+ else:
+ raise LexerError(lexer_position=start,
+ lexer_value=buff,
+ message="Unknown token '%s'" % buff)
+ elif self._current == '"':
+ yield self._consume_quoted_identifier()
+ elif self._current == '<':
+ yield self._match_or_else('=', 'lte', 'lt')
+ elif self._current == '>':
+ yield self._match_or_else('=', 'gte', 'gt')
+ elif self._current == '!':
+ yield self._match_or_else('=', 'ne', 'not')
+ elif self._current == '=':
+ if self._next() == '=':
+ yield {'type': 'eq', 'value': '==',
+ 'start': self._position - 1, 'end': self._position}
+ self._next()
+ else:
+ if self._current is None:
+ # If we're at the EOF, we never advanced
+ # the position so we don't need to rewind
+ # it back one location.
+ position = self._position
+ else:
+ position = self._position - 1
+ raise LexerError(
+ lexer_position=position,
+ lexer_value='=',
+ message="Unknown token '='")
+ else:
+ raise LexerError(lexer_position=self._position,
+ lexer_value=self._current,
+ message="Unknown token %s" % self._current)
+ yield {'type': 'eof', 'value': '',
+ 'start': self._length, 'end': self._length}
+
+ def _consume_number(self):
+ start = self._position
+ buff = self._current
+ while self._next() in self.VALID_NUMBER:
+ buff += self._current
+ return buff
+
+ def _initialize_for_expression(self, expression):
+ if not expression:
+ raise EmptyExpressionError()
+ self._position = 0
+ self._expression = expression
+ self._chars = list(self._expression)
+ self._current = self._chars[self._position]
+ self._length = len(self._expression)
+
+ def _next(self):
+ if self._position == self._length - 1:
+ self._current = None
+ else:
+ self._position += 1
+ self._current = self._chars[self._position]
+ return self._current
+
+ def _consume_until(self, delimiter):
+ # Consume until the delimiter is reached,
+ # allowing for the delimiter to be escaped with "\".
+ start = self._position
+ buff = ''
+ self._next()
+ while self._current != delimiter:
+ if self._current == '\\':
+ buff += '\\'
+ self._next()
+ if self._current is None:
+ # We're at the EOF.
+ raise LexerError(lexer_position=start,
+ lexer_value=self._expression[start:],
+ message="Unclosed %s delimiter" % delimiter)
+ buff += self._current
+ self._next()
+ # Skip the closing delimiter.
+ self._next()
+ return buff
+
+ def _consume_literal(self):
+ start = self._position
+ lexeme = self._consume_until('`').replace('\\`', '`')
+ try:
+ # Assume it is valid JSON and attempt to parse.
+ parsed_json = loads(lexeme)
+ except ValueError:
+ try:
+ # Invalid JSON values should be converted to quoted
+ # JSON strings during the JEP-12 deprecation period.
+ parsed_json = loads('"%s"' % lexeme.lstrip())
+ warnings.warn("deprecated string literal syntax",
+ PendingDeprecationWarning)
+ except ValueError:
+ raise LexerError(lexer_position=start,
+ lexer_value=self._expression[start:],
+ message="Bad token %s" % lexeme)
+ token_len = self._position - start
+ return {'type': 'literal', 'value': parsed_json,
+ 'start': start, 'end': token_len}
+
+ def _consume_quoted_identifier(self):
+ start = self._position
+ lexeme = '"' + self._consume_until('"') + '"'
+ try:
+ token_len = self._position - start
+ return {'type': 'quoted_identifier', 'value': loads(lexeme),
+ 'start': start, 'end': token_len}
+ except ValueError as e:
+ error_message = str(e).split(':')[0]
+ raise LexerError(lexer_position=start,
+ lexer_value=lexeme,
+ message=error_message)
+
+ def _consume_raw_string_literal(self):
+ start = self._position
+ lexeme = self._consume_until("'").replace("\\'", "'")
+ token_len = self._position - start
+ return {'type': 'literal', 'value': lexeme,
+ 'start': start, 'end': token_len}
+
+ def _match_or_else(self, expected, match_type, else_type):
+ start = self._position
+ current = self._current
+ next_char = self._next()
+ if next_char == expected:
+ self._next()
+ return {'type': match_type, 'value': current + next_char,
+ 'start': start, 'end': start + 1}
+ return {'type': else_type, 'value': current,
+ 'start': start, 'end': start}
diff --git a/contrib/python/jmespath/jmespath/parser.py b/contrib/python/jmespath/jmespath/parser.py
index 7508a555f8..eeac38fa89 100644
--- a/contrib/python/jmespath/jmespath/parser.py
+++ b/contrib/python/jmespath/jmespath/parser.py
@@ -1,527 +1,527 @@
-"""Top down operator precedence parser.
-
-This is an implementation of Vaughan R. Pratt's
-"Top Down Operator Precedence" parser.
-(http://dl.acm.org/citation.cfm?doid=512927.512931).
-
-These are some additional resources that help explain the
-general idea behind a Pratt parser:
-
-* http://effbot.org/zone/simple-top-down-parsing.htm
-* http://javascript.crockford.com/tdop/tdop.html
-
-A few notes on the implementation.
-
-* All the nud/led tokens are on the Parser class itself, and are dispatched
- using getattr(). This keeps all the parsing logic contained to a single
- class.
-* We use two passes through the data. One to create a list of token,
- then one pass through the tokens to create the AST. While the lexer actually
- yields tokens, we convert it to a list so we can easily implement two tokens
- of lookahead. A previous implementation used a fixed circular buffer, but it
- was significantly slower. Also, the average jmespath expression typically
- does not have a large amount of token so this is not an issue. And
- interestingly enough, creating a token list first is actually faster than
- consuming from the token iterator one token at a time.
-
-"""
-import random
-
-from jmespath import lexer
-from jmespath.compat import with_repr_method
-from jmespath import ast
-from jmespath import exceptions
-from jmespath import visitor
-
-
-class Parser(object):
- BINDING_POWER = {
- 'eof': 0,
- 'unquoted_identifier': 0,
- 'quoted_identifier': 0,
- 'literal': 0,
- 'rbracket': 0,
- 'rparen': 0,
- 'comma': 0,
- 'rbrace': 0,
- 'number': 0,
- 'current': 0,
- 'expref': 0,
- 'colon': 0,
- 'pipe': 1,
- 'or': 2,
- 'and': 3,
- 'eq': 5,
- 'gt': 5,
- 'lt': 5,
- 'gte': 5,
- 'lte': 5,
- 'ne': 5,
- 'flatten': 9,
- # Everything above stops a projection.
- 'star': 20,
- 'filter': 21,
- 'dot': 40,
- 'not': 45,
- 'lbrace': 50,
- 'lbracket': 55,
- 'lparen': 60,
- }
- # The maximum binding power for a token that can stop
- # a projection.
- _PROJECTION_STOP = 10
- # The _MAX_SIZE most recent expressions are cached in
- # _CACHE dict.
- _CACHE = {}
- _MAX_SIZE = 128
-
- def __init__(self, lookahead=2):
- self.tokenizer = None
- self._tokens = [None] * lookahead
- self._buffer_size = lookahead
- self._index = 0
-
- def parse(self, expression):
- cached = self._CACHE.get(expression)
- if cached is not None:
- return cached
- parsed_result = self._do_parse(expression)
- self._CACHE[expression] = parsed_result
- if len(self._CACHE) > self._MAX_SIZE:
- self._free_cache_entries()
- return parsed_result
-
- def _do_parse(self, expression):
- try:
- return self._parse(expression)
- except exceptions.LexerError as e:
- e.expression = expression
- raise
- except exceptions.IncompleteExpressionError as e:
- e.set_expression(expression)
- raise
- except exceptions.ParseError as e:
- e.expression = expression
- raise
-
- def _parse(self, expression):
- self.tokenizer = lexer.Lexer().tokenize(expression)
- self._tokens = list(self.tokenizer)
- self._index = 0
- parsed = self._expression(binding_power=0)
- if not self._current_token() == 'eof':
- t = self._lookahead_token(0)
- raise exceptions.ParseError(t['start'], t['value'], t['type'],
- "Unexpected token: %s" % t['value'])
- return ParsedResult(expression, parsed)
-
- def _expression(self, binding_power=0):
- left_token = self._lookahead_token(0)
- self._advance()
- nud_function = getattr(
- self, '_token_nud_%s' % left_token['type'],
- self._error_nud_token)
- left = nud_function(left_token)
- current_token = self._current_token()
- while binding_power < self.BINDING_POWER[current_token]:
- led = getattr(self, '_token_led_%s' % current_token, None)
- if led is None:
- error_token = self._lookahead_token(0)
- self._error_led_token(error_token)
- else:
- self._advance()
- left = led(left)
- current_token = self._current_token()
- return left
-
- def _token_nud_literal(self, token):
- return ast.literal(token['value'])
-
- def _token_nud_unquoted_identifier(self, token):
- return ast.field(token['value'])
-
- def _token_nud_quoted_identifier(self, token):
- field = ast.field(token['value'])
- # You can't have a quoted identifier as a function
- # name.
- if self._current_token() == 'lparen':
- t = self._lookahead_token(0)
- raise exceptions.ParseError(
- 0, t['value'], t['type'],
- 'Quoted identifier not allowed for function names.')
- return field
-
- def _token_nud_star(self, token):
- left = ast.identity()
- if self._current_token() == 'rbracket':
- right = ast.identity()
- else:
- right = self._parse_projection_rhs(self.BINDING_POWER['star'])
- return ast.value_projection(left, right)
-
- def _token_nud_filter(self, token):
- return self._token_led_filter(ast.identity())
-
- def _token_nud_lbrace(self, token):
- return self._parse_multi_select_hash()
-
- def _token_nud_lparen(self, token):
- expression = self._expression()
- self._match('rparen')
- return expression
-
- def _token_nud_flatten(self, token):
- left = ast.flatten(ast.identity())
- right = self._parse_projection_rhs(
- self.BINDING_POWER['flatten'])
- return ast.projection(left, right)
-
- def _token_nud_not(self, token):
- expr = self._expression(self.BINDING_POWER['not'])
- return ast.not_expression(expr)
-
- def _token_nud_lbracket(self, token):
- if self._current_token() in ['number', 'colon']:
- right = self._parse_index_expression()
- # We could optimize this and remove the identity() node.
- # We don't really need an index_expression node, we can
- # just use emit an index node here if we're not dealing
- # with a slice.
- return self._project_if_slice(ast.identity(), right)
- elif self._current_token() == 'star' and \
- self._lookahead(1) == 'rbracket':
- self._advance()
- self._advance()
- right = self._parse_projection_rhs(self.BINDING_POWER['star'])
- return ast.projection(ast.identity(), right)
- else:
- return self._parse_multi_select_list()
-
- def _parse_index_expression(self):
- # We're here:
- # [<current>
- # ^
- # | current token
- if (self._lookahead(0) == 'colon' or
- self._lookahead(1) == 'colon'):
- return self._parse_slice_expression()
- else:
- # Parse the syntax [number]
- node = ast.index(self._lookahead_token(0)['value'])
- self._advance()
- self._match('rbracket')
- return node
-
- def _parse_slice_expression(self):
- # [start:end:step]
- # Where start, end, and step are optional.
- # The last colon is optional as well.
- parts = [None, None, None]
- index = 0
- current_token = self._current_token()
- while not current_token == 'rbracket' and index < 3:
- if current_token == 'colon':
- index += 1
- if index == 3:
- self._raise_parse_error_for_token(
- self._lookahead_token(0), 'syntax error')
- self._advance()
- elif current_token == 'number':
- parts[index] = self._lookahead_token(0)['value']
- self._advance()
- else:
- self._raise_parse_error_for_token(
- self._lookahead_token(0), 'syntax error')
- current_token = self._current_token()
- self._match('rbracket')
- return ast.slice(*parts)
-
- def _token_nud_current(self, token):
- return ast.current_node()
-
- def _token_nud_expref(self, token):
- expression = self._expression(self.BINDING_POWER['expref'])
- return ast.expref(expression)
-
- def _token_led_dot(self, left):
- if not self._current_token() == 'star':
- right = self._parse_dot_rhs(self.BINDING_POWER['dot'])
- if left['type'] == 'subexpression':
- left['children'].append(right)
- return left
- else:
- return ast.subexpression([left, right])
- else:
- # We're creating a projection.
- self._advance()
- right = self._parse_projection_rhs(
- self.BINDING_POWER['dot'])
- return ast.value_projection(left, right)
-
- def _token_led_pipe(self, left):
- right = self._expression(self.BINDING_POWER['pipe'])
- return ast.pipe(left, right)
-
- def _token_led_or(self, left):
- right = self._expression(self.BINDING_POWER['or'])
- return ast.or_expression(left, right)
-
- def _token_led_and(self, left):
- right = self._expression(self.BINDING_POWER['and'])
- return ast.and_expression(left, right)
-
- def _token_led_lparen(self, left):
- if left['type'] != 'field':
- # 0 - first func arg or closing paren.
- # -1 - '(' token
- # -2 - invalid function "name".
- prev_t = self._lookahead_token(-2)
- raise exceptions.ParseError(
- prev_t['start'], prev_t['value'], prev_t['type'],
- "Invalid function name '%s'" % prev_t['value'])
- name = left['value']
- args = []
- while not self._current_token() == 'rparen':
- expression = self._expression()
- if self._current_token() == 'comma':
- self._match('comma')
- args.append(expression)
- self._match('rparen')
- function_node = ast.function_expression(name, args)
- return function_node
-
- def _token_led_filter(self, left):
- # Filters are projections.
- condition = self._expression(0)
- self._match('rbracket')
- if self._current_token() == 'flatten':
- right = ast.identity()
- else:
- right = self._parse_projection_rhs(self.BINDING_POWER['filter'])
- return ast.filter_projection(left, right, condition)
-
- def _token_led_eq(self, left):
- return self._parse_comparator(left, 'eq')
-
- def _token_led_ne(self, left):
- return self._parse_comparator(left, 'ne')
-
- def _token_led_gt(self, left):
- return self._parse_comparator(left, 'gt')
-
- def _token_led_gte(self, left):
- return self._parse_comparator(left, 'gte')
-
- def _token_led_lt(self, left):
- return self._parse_comparator(left, 'lt')
-
- def _token_led_lte(self, left):
- return self._parse_comparator(left, 'lte')
-
- def _token_led_flatten(self, left):
- left = ast.flatten(left)
- right = self._parse_projection_rhs(
- self.BINDING_POWER['flatten'])
- return ast.projection(left, right)
-
- def _token_led_lbracket(self, left):
- token = self._lookahead_token(0)
- if token['type'] in ['number', 'colon']:
- right = self._parse_index_expression()
- if left['type'] == 'index_expression':
- # Optimization: if the left node is an index expr,
- # we can avoid creating another node and instead just add
- # the right node as a child of the left.
- left['children'].append(right)
- return left
- else:
- return self._project_if_slice(left, right)
- else:
- # We have a projection
- self._match('star')
- self._match('rbracket')
- right = self._parse_projection_rhs(self.BINDING_POWER['star'])
- return ast.projection(left, right)
-
- def _project_if_slice(self, left, right):
- index_expr = ast.index_expression([left, right])
- if right['type'] == 'slice':
- return ast.projection(
- index_expr,
- self._parse_projection_rhs(self.BINDING_POWER['star']))
- else:
- return index_expr
-
- def _parse_comparator(self, left, comparator):
- right = self._expression(self.BINDING_POWER[comparator])
- return ast.comparator(comparator, left, right)
-
- def _parse_multi_select_list(self):
- expressions = []
- while True:
- expression = self._expression()
- expressions.append(expression)
- if self._current_token() == 'rbracket':
- break
- else:
- self._match('comma')
- self._match('rbracket')
- return ast.multi_select_list(expressions)
-
- def _parse_multi_select_hash(self):
- pairs = []
- while True:
- key_token = self._lookahead_token(0)
- # Before getting the token value, verify it's
- # an identifier.
- self._match_multiple_tokens(
- token_types=['quoted_identifier', 'unquoted_identifier'])
- key_name = key_token['value']
- self._match('colon')
- value = self._expression(0)
- node = ast.key_val_pair(key_name=key_name, node=value)
- pairs.append(node)
- if self._current_token() == 'comma':
- self._match('comma')
- elif self._current_token() == 'rbrace':
- self._match('rbrace')
- break
- return ast.multi_select_dict(nodes=pairs)
-
- def _parse_projection_rhs(self, binding_power):
- # Parse the right hand side of the projection.
- if self.BINDING_POWER[self._current_token()] < self._PROJECTION_STOP:
- # BP of 10 are all the tokens that stop a projection.
- right = ast.identity()
- elif self._current_token() == 'lbracket':
- right = self._expression(binding_power)
- elif self._current_token() == 'filter':
- right = self._expression(binding_power)
- elif self._current_token() == 'dot':
- self._match('dot')
- right = self._parse_dot_rhs(binding_power)
- else:
- self._raise_parse_error_for_token(self._lookahead_token(0),
- 'syntax error')
- return right
-
- def _parse_dot_rhs(self, binding_power):
- # From the grammar:
- # expression '.' ( identifier /
- # multi-select-list /
- # multi-select-hash /
- # function-expression /
- # *
- # In terms of tokens that means that after a '.',
- # you can have:
- lookahead = self._current_token()
- # Common case "foo.bar", so first check for an identifier.
- if lookahead in ['quoted_identifier', 'unquoted_identifier', 'star']:
- return self._expression(binding_power)
- elif lookahead == 'lbracket':
- self._match('lbracket')
- return self._parse_multi_select_list()
- elif lookahead == 'lbrace':
- self._match('lbrace')
- return self._parse_multi_select_hash()
- else:
- t = self._lookahead_token(0)
- allowed = ['quoted_identifier', 'unquoted_identifier',
- 'lbracket', 'lbrace']
- msg = (
- "Expecting: %s, got: %s" % (allowed, t['type'])
- )
- self._raise_parse_error_for_token(t, msg)
-
- def _error_nud_token(self, token):
- if token['type'] == 'eof':
- raise exceptions.IncompleteExpressionError(
- token['start'], token['value'], token['type'])
- self._raise_parse_error_for_token(token, 'invalid token')
-
- def _error_led_token(self, token):
- self._raise_parse_error_for_token(token, 'invalid token')
-
- def _match(self, token_type=None):
- # inline'd self._current_token()
- if self._current_token() == token_type:
- # inline'd self._advance()
- self._advance()
- else:
- self._raise_parse_error_maybe_eof(
- token_type, self._lookahead_token(0))
-
- def _match_multiple_tokens(self, token_types):
- if self._current_token() not in token_types:
- self._raise_parse_error_maybe_eof(
- token_types, self._lookahead_token(0))
- self._advance()
-
- def _advance(self):
- self._index += 1
-
- def _current_token(self):
- return self._tokens[self._index]['type']
-
- def _lookahead(self, number):
- return self._tokens[self._index + number]['type']
-
- def _lookahead_token(self, number):
- return self._tokens[self._index + number]
-
- def _raise_parse_error_for_token(self, token, reason):
- lex_position = token['start']
- actual_value = token['value']
- actual_type = token['type']
- raise exceptions.ParseError(lex_position, actual_value,
- actual_type, reason)
-
- def _raise_parse_error_maybe_eof(self, expected_type, token):
- lex_position = token['start']
- actual_value = token['value']
- actual_type = token['type']
- if actual_type == 'eof':
- raise exceptions.IncompleteExpressionError(
- lex_position, actual_value, actual_type)
- message = 'Expecting: %s, got: %s' % (expected_type,
- actual_type)
- raise exceptions.ParseError(
- lex_position, actual_value, actual_type, message)
-
- def _free_cache_entries(self):
- for key in random.sample(self._CACHE.keys(), int(self._MAX_SIZE / 2)):
+"""Top down operator precedence parser.
+
+This is an implementation of Vaughan R. Pratt's
+"Top Down Operator Precedence" parser.
+(http://dl.acm.org/citation.cfm?doid=512927.512931).
+
+These are some additional resources that help explain the
+general idea behind a Pratt parser:
+
+* http://effbot.org/zone/simple-top-down-parsing.htm
+* http://javascript.crockford.com/tdop/tdop.html
+
+A few notes on the implementation.
+
+* All the nud/led tokens are on the Parser class itself, and are dispatched
+ using getattr(). This keeps all the parsing logic contained to a single
+ class.
+* We use two passes through the data. One to create a list of token,
+ then one pass through the tokens to create the AST. While the lexer actually
+ yields tokens, we convert it to a list so we can easily implement two tokens
+ of lookahead. A previous implementation used a fixed circular buffer, but it
+ was significantly slower. Also, the average jmespath expression typically
+ does not have a large amount of token so this is not an issue. And
+ interestingly enough, creating a token list first is actually faster than
+ consuming from the token iterator one token at a time.
+
+"""
+import random
+
+from jmespath import lexer
+from jmespath.compat import with_repr_method
+from jmespath import ast
+from jmespath import exceptions
+from jmespath import visitor
+
+
+class Parser(object):
+ BINDING_POWER = {
+ 'eof': 0,
+ 'unquoted_identifier': 0,
+ 'quoted_identifier': 0,
+ 'literal': 0,
+ 'rbracket': 0,
+ 'rparen': 0,
+ 'comma': 0,
+ 'rbrace': 0,
+ 'number': 0,
+ 'current': 0,
+ 'expref': 0,
+ 'colon': 0,
+ 'pipe': 1,
+ 'or': 2,
+ 'and': 3,
+ 'eq': 5,
+ 'gt': 5,
+ 'lt': 5,
+ 'gte': 5,
+ 'lte': 5,
+ 'ne': 5,
+ 'flatten': 9,
+ # Everything above stops a projection.
+ 'star': 20,
+ 'filter': 21,
+ 'dot': 40,
+ 'not': 45,
+ 'lbrace': 50,
+ 'lbracket': 55,
+ 'lparen': 60,
+ }
+ # The maximum binding power for a token that can stop
+ # a projection.
+ _PROJECTION_STOP = 10
+ # The _MAX_SIZE most recent expressions are cached in
+ # _CACHE dict.
+ _CACHE = {}
+ _MAX_SIZE = 128
+
+ def __init__(self, lookahead=2):
+ self.tokenizer = None
+ self._tokens = [None] * lookahead
+ self._buffer_size = lookahead
+ self._index = 0
+
+ def parse(self, expression):
+ cached = self._CACHE.get(expression)
+ if cached is not None:
+ return cached
+ parsed_result = self._do_parse(expression)
+ self._CACHE[expression] = parsed_result
+ if len(self._CACHE) > self._MAX_SIZE:
+ self._free_cache_entries()
+ return parsed_result
+
+ def _do_parse(self, expression):
+ try:
+ return self._parse(expression)
+ except exceptions.LexerError as e:
+ e.expression = expression
+ raise
+ except exceptions.IncompleteExpressionError as e:
+ e.set_expression(expression)
+ raise
+ except exceptions.ParseError as e:
+ e.expression = expression
+ raise
+
+ def _parse(self, expression):
+ self.tokenizer = lexer.Lexer().tokenize(expression)
+ self._tokens = list(self.tokenizer)
+ self._index = 0
+ parsed = self._expression(binding_power=0)
+ if not self._current_token() == 'eof':
+ t = self._lookahead_token(0)
+ raise exceptions.ParseError(t['start'], t['value'], t['type'],
+ "Unexpected token: %s" % t['value'])
+ return ParsedResult(expression, parsed)
+
+ def _expression(self, binding_power=0):
+ left_token = self._lookahead_token(0)
+ self._advance()
+ nud_function = getattr(
+ self, '_token_nud_%s' % left_token['type'],
+ self._error_nud_token)
+ left = nud_function(left_token)
+ current_token = self._current_token()
+ while binding_power < self.BINDING_POWER[current_token]:
+ led = getattr(self, '_token_led_%s' % current_token, None)
+ if led is None:
+ error_token = self._lookahead_token(0)
+ self._error_led_token(error_token)
+ else:
+ self._advance()
+ left = led(left)
+ current_token = self._current_token()
+ return left
+
+ def _token_nud_literal(self, token):
+ return ast.literal(token['value'])
+
+ def _token_nud_unquoted_identifier(self, token):
+ return ast.field(token['value'])
+
+ def _token_nud_quoted_identifier(self, token):
+ field = ast.field(token['value'])
+ # You can't have a quoted identifier as a function
+ # name.
+ if self._current_token() == 'lparen':
+ t = self._lookahead_token(0)
+ raise exceptions.ParseError(
+ 0, t['value'], t['type'],
+ 'Quoted identifier not allowed for function names.')
+ return field
+
+ def _token_nud_star(self, token):
+ left = ast.identity()
+ if self._current_token() == 'rbracket':
+ right = ast.identity()
+ else:
+ right = self._parse_projection_rhs(self.BINDING_POWER['star'])
+ return ast.value_projection(left, right)
+
+ def _token_nud_filter(self, token):
+ return self._token_led_filter(ast.identity())
+
+ def _token_nud_lbrace(self, token):
+ return self._parse_multi_select_hash()
+
+ def _token_nud_lparen(self, token):
+ expression = self._expression()
+ self._match('rparen')
+ return expression
+
+ def _token_nud_flatten(self, token):
+ left = ast.flatten(ast.identity())
+ right = self._parse_projection_rhs(
+ self.BINDING_POWER['flatten'])
+ return ast.projection(left, right)
+
+ def _token_nud_not(self, token):
+ expr = self._expression(self.BINDING_POWER['not'])
+ return ast.not_expression(expr)
+
+ def _token_nud_lbracket(self, token):
+ if self._current_token() in ['number', 'colon']:
+ right = self._parse_index_expression()
+ # We could optimize this and remove the identity() node.
+ # We don't really need an index_expression node, we can
+ # just use emit an index node here if we're not dealing
+ # with a slice.
+ return self._project_if_slice(ast.identity(), right)
+ elif self._current_token() == 'star' and \
+ self._lookahead(1) == 'rbracket':
+ self._advance()
+ self._advance()
+ right = self._parse_projection_rhs(self.BINDING_POWER['star'])
+ return ast.projection(ast.identity(), right)
+ else:
+ return self._parse_multi_select_list()
+
+ def _parse_index_expression(self):
+ # We're here:
+ # [<current>
+ # ^
+ # | current token
+ if (self._lookahead(0) == 'colon' or
+ self._lookahead(1) == 'colon'):
+ return self._parse_slice_expression()
+ else:
+ # Parse the syntax [number]
+ node = ast.index(self._lookahead_token(0)['value'])
+ self._advance()
+ self._match('rbracket')
+ return node
+
+ def _parse_slice_expression(self):
+ # [start:end:step]
+ # Where start, end, and step are optional.
+ # The last colon is optional as well.
+ parts = [None, None, None]
+ index = 0
+ current_token = self._current_token()
+ while not current_token == 'rbracket' and index < 3:
+ if current_token == 'colon':
+ index += 1
+ if index == 3:
+ self._raise_parse_error_for_token(
+ self._lookahead_token(0), 'syntax error')
+ self._advance()
+ elif current_token == 'number':
+ parts[index] = self._lookahead_token(0)['value']
+ self._advance()
+ else:
+ self._raise_parse_error_for_token(
+ self._lookahead_token(0), 'syntax error')
+ current_token = self._current_token()
+ self._match('rbracket')
+ return ast.slice(*parts)
+
+ def _token_nud_current(self, token):
+ return ast.current_node()
+
+ def _token_nud_expref(self, token):
+ expression = self._expression(self.BINDING_POWER['expref'])
+ return ast.expref(expression)
+
+ def _token_led_dot(self, left):
+ if not self._current_token() == 'star':
+ right = self._parse_dot_rhs(self.BINDING_POWER['dot'])
+ if left['type'] == 'subexpression':
+ left['children'].append(right)
+ return left
+ else:
+ return ast.subexpression([left, right])
+ else:
+ # We're creating a projection.
+ self._advance()
+ right = self._parse_projection_rhs(
+ self.BINDING_POWER['dot'])
+ return ast.value_projection(left, right)
+
+ def _token_led_pipe(self, left):
+ right = self._expression(self.BINDING_POWER['pipe'])
+ return ast.pipe(left, right)
+
+ def _token_led_or(self, left):
+ right = self._expression(self.BINDING_POWER['or'])
+ return ast.or_expression(left, right)
+
+ def _token_led_and(self, left):
+ right = self._expression(self.BINDING_POWER['and'])
+ return ast.and_expression(left, right)
+
+ def _token_led_lparen(self, left):
+ if left['type'] != 'field':
+ # 0 - first func arg or closing paren.
+ # -1 - '(' token
+ # -2 - invalid function "name".
+ prev_t = self._lookahead_token(-2)
+ raise exceptions.ParseError(
+ prev_t['start'], prev_t['value'], prev_t['type'],
+ "Invalid function name '%s'" % prev_t['value'])
+ name = left['value']
+ args = []
+ while not self._current_token() == 'rparen':
+ expression = self._expression()
+ if self._current_token() == 'comma':
+ self._match('comma')
+ args.append(expression)
+ self._match('rparen')
+ function_node = ast.function_expression(name, args)
+ return function_node
+
+ def _token_led_filter(self, left):
+ # Filters are projections.
+ condition = self._expression(0)
+ self._match('rbracket')
+ if self._current_token() == 'flatten':
+ right = ast.identity()
+ else:
+ right = self._parse_projection_rhs(self.BINDING_POWER['filter'])
+ return ast.filter_projection(left, right, condition)
+
+ def _token_led_eq(self, left):
+ return self._parse_comparator(left, 'eq')
+
+ def _token_led_ne(self, left):
+ return self._parse_comparator(left, 'ne')
+
+ def _token_led_gt(self, left):
+ return self._parse_comparator(left, 'gt')
+
+ def _token_led_gte(self, left):
+ return self._parse_comparator(left, 'gte')
+
+ def _token_led_lt(self, left):
+ return self._parse_comparator(left, 'lt')
+
+ def _token_led_lte(self, left):
+ return self._parse_comparator(left, 'lte')
+
+ def _token_led_flatten(self, left):
+ left = ast.flatten(left)
+ right = self._parse_projection_rhs(
+ self.BINDING_POWER['flatten'])
+ return ast.projection(left, right)
+
+ def _token_led_lbracket(self, left):
+ token = self._lookahead_token(0)
+ if token['type'] in ['number', 'colon']:
+ right = self._parse_index_expression()
+ if left['type'] == 'index_expression':
+ # Optimization: if the left node is an index expr,
+ # we can avoid creating another node and instead just add
+ # the right node as a child of the left.
+ left['children'].append(right)
+ return left
+ else:
+ return self._project_if_slice(left, right)
+ else:
+ # We have a projection
+ self._match('star')
+ self._match('rbracket')
+ right = self._parse_projection_rhs(self.BINDING_POWER['star'])
+ return ast.projection(left, right)
+
+ def _project_if_slice(self, left, right):
+ index_expr = ast.index_expression([left, right])
+ if right['type'] == 'slice':
+ return ast.projection(
+ index_expr,
+ self._parse_projection_rhs(self.BINDING_POWER['star']))
+ else:
+ return index_expr
+
+ def _parse_comparator(self, left, comparator):
+ right = self._expression(self.BINDING_POWER[comparator])
+ return ast.comparator(comparator, left, right)
+
+ def _parse_multi_select_list(self):
+ expressions = []
+ while True:
+ expression = self._expression()
+ expressions.append(expression)
+ if self._current_token() == 'rbracket':
+ break
+ else:
+ self._match('comma')
+ self._match('rbracket')
+ return ast.multi_select_list(expressions)
+
+ def _parse_multi_select_hash(self):
+ pairs = []
+ while True:
+ key_token = self._lookahead_token(0)
+ # Before getting the token value, verify it's
+ # an identifier.
+ self._match_multiple_tokens(
+ token_types=['quoted_identifier', 'unquoted_identifier'])
+ key_name = key_token['value']
+ self._match('colon')
+ value = self._expression(0)
+ node = ast.key_val_pair(key_name=key_name, node=value)
+ pairs.append(node)
+ if self._current_token() == 'comma':
+ self._match('comma')
+ elif self._current_token() == 'rbrace':
+ self._match('rbrace')
+ break
+ return ast.multi_select_dict(nodes=pairs)
+
+ def _parse_projection_rhs(self, binding_power):
+ # Parse the right hand side of the projection.
+ if self.BINDING_POWER[self._current_token()] < self._PROJECTION_STOP:
+ # BP of 10 are all the tokens that stop a projection.
+ right = ast.identity()
+ elif self._current_token() == 'lbracket':
+ right = self._expression(binding_power)
+ elif self._current_token() == 'filter':
+ right = self._expression(binding_power)
+ elif self._current_token() == 'dot':
+ self._match('dot')
+ right = self._parse_dot_rhs(binding_power)
+ else:
+ self._raise_parse_error_for_token(self._lookahead_token(0),
+ 'syntax error')
+ return right
+
+ def _parse_dot_rhs(self, binding_power):
+ # From the grammar:
+ # expression '.' ( identifier /
+ # multi-select-list /
+ # multi-select-hash /
+ # function-expression /
+ # *
+ # In terms of tokens that means that after a '.',
+ # you can have:
+ lookahead = self._current_token()
+ # Common case "foo.bar", so first check for an identifier.
+ if lookahead in ['quoted_identifier', 'unquoted_identifier', 'star']:
+ return self._expression(binding_power)
+ elif lookahead == 'lbracket':
+ self._match('lbracket')
+ return self._parse_multi_select_list()
+ elif lookahead == 'lbrace':
+ self._match('lbrace')
+ return self._parse_multi_select_hash()
+ else:
+ t = self._lookahead_token(0)
+ allowed = ['quoted_identifier', 'unquoted_identifier',
+ 'lbracket', 'lbrace']
+ msg = (
+ "Expecting: %s, got: %s" % (allowed, t['type'])
+ )
+ self._raise_parse_error_for_token(t, msg)
+
+ def _error_nud_token(self, token):
+ if token['type'] == 'eof':
+ raise exceptions.IncompleteExpressionError(
+ token['start'], token['value'], token['type'])
+ self._raise_parse_error_for_token(token, 'invalid token')
+
+ def _error_led_token(self, token):
+ self._raise_parse_error_for_token(token, 'invalid token')
+
+ def _match(self, token_type=None):
+ # inline'd self._current_token()
+ if self._current_token() == token_type:
+ # inline'd self._advance()
+ self._advance()
+ else:
+ self._raise_parse_error_maybe_eof(
+ token_type, self._lookahead_token(0))
+
+ def _match_multiple_tokens(self, token_types):
+ if self._current_token() not in token_types:
+ self._raise_parse_error_maybe_eof(
+ token_types, self._lookahead_token(0))
+ self._advance()
+
+ def _advance(self):
+ self._index += 1
+
+ def _current_token(self):
+ return self._tokens[self._index]['type']
+
+ def _lookahead(self, number):
+ return self._tokens[self._index + number]['type']
+
+ def _lookahead_token(self, number):
+ return self._tokens[self._index + number]
+
+ def _raise_parse_error_for_token(self, token, reason):
+ lex_position = token['start']
+ actual_value = token['value']
+ actual_type = token['type']
+ raise exceptions.ParseError(lex_position, actual_value,
+ actual_type, reason)
+
+ def _raise_parse_error_maybe_eof(self, expected_type, token):
+ lex_position = token['start']
+ actual_value = token['value']
+ actual_type = token['type']
+ if actual_type == 'eof':
+ raise exceptions.IncompleteExpressionError(
+ lex_position, actual_value, actual_type)
+ message = 'Expecting: %s, got: %s' % (expected_type,
+ actual_type)
+ raise exceptions.ParseError(
+ lex_position, actual_value, actual_type, message)
+
+ def _free_cache_entries(self):
+ for key in random.sample(self._CACHE.keys(), int(self._MAX_SIZE / 2)):
self._CACHE.pop(key, None)
-
- @classmethod
- def purge(cls):
- """Clear the expression compilation cache."""
- cls._CACHE.clear()
-
-
-@with_repr_method
-class ParsedResult(object):
- def __init__(self, expression, parsed):
- self.expression = expression
- self.parsed = parsed
-
- def search(self, value, options=None):
- interpreter = visitor.TreeInterpreter(options)
- result = interpreter.visit(self.parsed, value)
- return result
-
- def _render_dot_file(self):
- """Render the parsed AST as a dot file.
-
- Note that this is marked as an internal method because
- the AST is an implementation detail and is subject
- to change. This method can be used to help troubleshoot
- or for development purposes, but is not considered part
- of the public supported API. Use at your own risk.
-
- """
- renderer = visitor.GraphvizVisitor()
- contents = renderer.visit(self.parsed)
- return contents
-
- def __repr__(self):
- return repr(self.parsed)
+
+ @classmethod
+ def purge(cls):
+ """Clear the expression compilation cache."""
+ cls._CACHE.clear()
+
+
+@with_repr_method
+class ParsedResult(object):
+ def __init__(self, expression, parsed):
+ self.expression = expression
+ self.parsed = parsed
+
+ def search(self, value, options=None):
+ interpreter = visitor.TreeInterpreter(options)
+ result = interpreter.visit(self.parsed, value)
+ return result
+
+ def _render_dot_file(self):
+ """Render the parsed AST as a dot file.
+
+ Note that this is marked as an internal method because
+ the AST is an implementation detail and is subject
+ to change. This method can be used to help troubleshoot
+ or for development purposes, but is not considered part
+ of the public supported API. Use at your own risk.
+
+ """
+ renderer = visitor.GraphvizVisitor()
+ contents = renderer.visit(self.parsed)
+ return contents
+
+ def __repr__(self):
+ return repr(self.parsed)
diff --git a/contrib/python/jmespath/jmespath/visitor.py b/contrib/python/jmespath/jmespath/visitor.py
index 34f4da5973..b3e846b761 100644
--- a/contrib/python/jmespath/jmespath/visitor.py
+++ b/contrib/python/jmespath/jmespath/visitor.py
@@ -1,328 +1,328 @@
-import operator
-
-from jmespath import functions
-from jmespath.compat import string_type
-from numbers import Number
-
-
-def _equals(x, y):
- if _is_special_integer_case(x, y):
- return False
- else:
- return x == y
-
-
-def _is_special_integer_case(x, y):
- # We need to special case comparing 0 or 1 to
- # True/False. While normally comparing any
- # integer other than 0/1 to True/False will always
- # return False. However 0/1 have this:
- # >>> 0 == True
- # False
- # >>> 0 == False
- # True
- # >>> 1 == True
- # True
- # >>> 1 == False
- # False
- #
- # Also need to consider that:
- # >>> 0 in [True, False]
- # True
+import operator
+
+from jmespath import functions
+from jmespath.compat import string_type
+from numbers import Number
+
+
+def _equals(x, y):
+ if _is_special_integer_case(x, y):
+ return False
+ else:
+ return x == y
+
+
+def _is_special_integer_case(x, y):
+ # We need to special case comparing 0 or 1 to
+ # True/False. While normally comparing any
+ # integer other than 0/1 to True/False will always
+ # return False. However 0/1 have this:
+ # >>> 0 == True
+ # False
+ # >>> 0 == False
+ # True
+ # >>> 1 == True
+ # True
+ # >>> 1 == False
+ # False
+ #
+ # Also need to consider that:
+ # >>> 0 in [True, False]
+ # True
if type(x) is int and (x == 0 or x == 1):
- return y is True or y is False
+ return y is True or y is False
elif type(y) is int and (y == 0 or y == 1):
- return x is True or x is False
-
-
-def _is_comparable(x):
- # The spec doesn't officially support string types yet,
- # but enough people are relying on this behavior that
- # it's been added back. This should eventually become
- # part of the official spec.
- return _is_actual_number(x) or isinstance(x, string_type)
-
-
-def _is_actual_number(x):
- # We need to handle python's quirkiness with booleans,
- # specifically:
- #
- # >>> isinstance(False, int)
- # True
- # >>> isinstance(True, int)
- # True
- if x is True or x is False:
- return False
- return isinstance(x, Number)
-
-
-class Options(object):
- """Options to control how a JMESPath function is evaluated."""
- def __init__(self, dict_cls=None, custom_functions=None):
- #: The class to use when creating a dict. The interpreter
- # may create dictionaries during the evaluation of a JMESPath
- # expression. For example, a multi-select hash will
- # create a dictionary. By default we use a dict() type.
- # You can set this value to change what dict type is used.
- # The most common reason you would change this is if you
- # want to set a collections.OrderedDict so that you can
- # have predictable key ordering.
- self.dict_cls = dict_cls
- self.custom_functions = custom_functions
-
-
-class _Expression(object):
- def __init__(self, expression, interpreter):
- self.expression = expression
- self.interpreter = interpreter
-
- def visit(self, node, *args, **kwargs):
- return self.interpreter.visit(node, *args, **kwargs)
-
-
-class Visitor(object):
- def __init__(self):
- self._method_cache = {}
-
- def visit(self, node, *args, **kwargs):
- node_type = node['type']
- method = self._method_cache.get(node_type)
- if method is None:
- method = getattr(
- self, 'visit_%s' % node['type'], self.default_visit)
- self._method_cache[node_type] = method
- return method(node, *args, **kwargs)
-
- def default_visit(self, node, *args, **kwargs):
- raise NotImplementedError("default_visit")
-
-
-class TreeInterpreter(Visitor):
- COMPARATOR_FUNC = {
- 'eq': _equals,
- 'ne': lambda x, y: not _equals(x, y),
- 'lt': operator.lt,
- 'gt': operator.gt,
- 'lte': operator.le,
- 'gte': operator.ge
- }
- _EQUALITY_OPS = ['eq', 'ne']
- MAP_TYPE = dict
-
- def __init__(self, options=None):
- super(TreeInterpreter, self).__init__()
- self._dict_cls = self.MAP_TYPE
- if options is None:
- options = Options()
- self._options = options
- if options.dict_cls is not None:
- self._dict_cls = self._options.dict_cls
- if options.custom_functions is not None:
- self._functions = self._options.custom_functions
- else:
- self._functions = functions.Functions()
-
- def default_visit(self, node, *args, **kwargs):
- raise NotImplementedError(node['type'])
-
- def visit_subexpression(self, node, value):
- result = value
- for node in node['children']:
- result = self.visit(node, result)
- return result
-
- def visit_field(self, node, value):
- try:
- return value.get(node['value'])
- except AttributeError:
- return None
-
- def visit_comparator(self, node, value):
- # Common case: comparator is == or !=
- comparator_func = self.COMPARATOR_FUNC[node['value']]
- if node['value'] in self._EQUALITY_OPS:
- return comparator_func(
- self.visit(node['children'][0], value),
- self.visit(node['children'][1], value)
- )
- else:
- # Ordering operators are only valid for numbers.
- # Evaluating any other type with a comparison operator
- # will yield a None value.
- left = self.visit(node['children'][0], value)
- right = self.visit(node['children'][1], value)
- num_types = (int, float)
- if not (_is_comparable(left) and
- _is_comparable(right)):
- return None
- return comparator_func(left, right)
-
- def visit_current(self, node, value):
- return value
-
- def visit_expref(self, node, value):
- return _Expression(node['children'][0], self)
-
- def visit_function_expression(self, node, value):
- resolved_args = []
- for child in node['children']:
- current = self.visit(child, value)
- resolved_args.append(current)
- return self._functions.call_function(node['value'], resolved_args)
-
- def visit_filter_projection(self, node, value):
- base = self.visit(node['children'][0], value)
- if not isinstance(base, list):
- return None
- comparator_node = node['children'][2]
- collected = []
- for element in base:
- if self._is_true(self.visit(comparator_node, element)):
- current = self.visit(node['children'][1], element)
- if current is not None:
- collected.append(current)
- return collected
-
- def visit_flatten(self, node, value):
- base = self.visit(node['children'][0], value)
- if not isinstance(base, list):
- # Can't flatten the object if it's not a list.
- return None
- merged_list = []
- for element in base:
- if isinstance(element, list):
- merged_list.extend(element)
- else:
- merged_list.append(element)
- return merged_list
-
- def visit_identity(self, node, value):
- return value
-
- def visit_index(self, node, value):
- # Even though we can index strings, we don't
- # want to support that.
- if not isinstance(value, list):
- return None
- try:
- return value[node['value']]
- except IndexError:
- return None
-
- def visit_index_expression(self, node, value):
- result = value
- for node in node['children']:
- result = self.visit(node, result)
- return result
-
- def visit_slice(self, node, value):
- if not isinstance(value, list):
- return None
- s = slice(*node['children'])
- return value[s]
-
- def visit_key_val_pair(self, node, value):
- return self.visit(node['children'][0], value)
-
- def visit_literal(self, node, value):
- return node['value']
-
- def visit_multi_select_dict(self, node, value):
- if value is None:
- return None
- collected = self._dict_cls()
- for child in node['children']:
- collected[child['value']] = self.visit(child, value)
- return collected
-
- def visit_multi_select_list(self, node, value):
- if value is None:
- return None
- collected = []
- for child in node['children']:
- collected.append(self.visit(child, value))
- return collected
-
- def visit_or_expression(self, node, value):
- matched = self.visit(node['children'][0], value)
- if self._is_false(matched):
- matched = self.visit(node['children'][1], value)
- return matched
-
- def visit_and_expression(self, node, value):
- matched = self.visit(node['children'][0], value)
- if self._is_false(matched):
- return matched
- return self.visit(node['children'][1], value)
-
- def visit_not_expression(self, node, value):
- original_result = self.visit(node['children'][0], value)
+ return x is True or x is False
+
+
+def _is_comparable(x):
+ # The spec doesn't officially support string types yet,
+ # but enough people are relying on this behavior that
+ # it's been added back. This should eventually become
+ # part of the official spec.
+ return _is_actual_number(x) or isinstance(x, string_type)
+
+
+def _is_actual_number(x):
+ # We need to handle python's quirkiness with booleans,
+ # specifically:
+ #
+ # >>> isinstance(False, int)
+ # True
+ # >>> isinstance(True, int)
+ # True
+ if x is True or x is False:
+ return False
+ return isinstance(x, Number)
+
+
+class Options(object):
+ """Options to control how a JMESPath function is evaluated."""
+ def __init__(self, dict_cls=None, custom_functions=None):
+ #: The class to use when creating a dict. The interpreter
+ # may create dictionaries during the evaluation of a JMESPath
+ # expression. For example, a multi-select hash will
+ # create a dictionary. By default we use a dict() type.
+ # You can set this value to change what dict type is used.
+ # The most common reason you would change this is if you
+ # want to set a collections.OrderedDict so that you can
+ # have predictable key ordering.
+ self.dict_cls = dict_cls
+ self.custom_functions = custom_functions
+
+
+class _Expression(object):
+ def __init__(self, expression, interpreter):
+ self.expression = expression
+ self.interpreter = interpreter
+
+ def visit(self, node, *args, **kwargs):
+ return self.interpreter.visit(node, *args, **kwargs)
+
+
+class Visitor(object):
+ def __init__(self):
+ self._method_cache = {}
+
+ def visit(self, node, *args, **kwargs):
+ node_type = node['type']
+ method = self._method_cache.get(node_type)
+ if method is None:
+ method = getattr(
+ self, 'visit_%s' % node['type'], self.default_visit)
+ self._method_cache[node_type] = method
+ return method(node, *args, **kwargs)
+
+ def default_visit(self, node, *args, **kwargs):
+ raise NotImplementedError("default_visit")
+
+
+class TreeInterpreter(Visitor):
+ COMPARATOR_FUNC = {
+ 'eq': _equals,
+ 'ne': lambda x, y: not _equals(x, y),
+ 'lt': operator.lt,
+ 'gt': operator.gt,
+ 'lte': operator.le,
+ 'gte': operator.ge
+ }
+ _EQUALITY_OPS = ['eq', 'ne']
+ MAP_TYPE = dict
+
+ def __init__(self, options=None):
+ super(TreeInterpreter, self).__init__()
+ self._dict_cls = self.MAP_TYPE
+ if options is None:
+ options = Options()
+ self._options = options
+ if options.dict_cls is not None:
+ self._dict_cls = self._options.dict_cls
+ if options.custom_functions is not None:
+ self._functions = self._options.custom_functions
+ else:
+ self._functions = functions.Functions()
+
+ def default_visit(self, node, *args, **kwargs):
+ raise NotImplementedError(node['type'])
+
+ def visit_subexpression(self, node, value):
+ result = value
+ for node in node['children']:
+ result = self.visit(node, result)
+ return result
+
+ def visit_field(self, node, value):
+ try:
+ return value.get(node['value'])
+ except AttributeError:
+ return None
+
+ def visit_comparator(self, node, value):
+ # Common case: comparator is == or !=
+ comparator_func = self.COMPARATOR_FUNC[node['value']]
+ if node['value'] in self._EQUALITY_OPS:
+ return comparator_func(
+ self.visit(node['children'][0], value),
+ self.visit(node['children'][1], value)
+ )
+ else:
+ # Ordering operators are only valid for numbers.
+ # Evaluating any other type with a comparison operator
+ # will yield a None value.
+ left = self.visit(node['children'][0], value)
+ right = self.visit(node['children'][1], value)
+ num_types = (int, float)
+ if not (_is_comparable(left) and
+ _is_comparable(right)):
+ return None
+ return comparator_func(left, right)
+
+ def visit_current(self, node, value):
+ return value
+
+ def visit_expref(self, node, value):
+ return _Expression(node['children'][0], self)
+
+ def visit_function_expression(self, node, value):
+ resolved_args = []
+ for child in node['children']:
+ current = self.visit(child, value)
+ resolved_args.append(current)
+ return self._functions.call_function(node['value'], resolved_args)
+
+ def visit_filter_projection(self, node, value):
+ base = self.visit(node['children'][0], value)
+ if not isinstance(base, list):
+ return None
+ comparator_node = node['children'][2]
+ collected = []
+ for element in base:
+ if self._is_true(self.visit(comparator_node, element)):
+ current = self.visit(node['children'][1], element)
+ if current is not None:
+ collected.append(current)
+ return collected
+
+ def visit_flatten(self, node, value):
+ base = self.visit(node['children'][0], value)
+ if not isinstance(base, list):
+ # Can't flatten the object if it's not a list.
+ return None
+ merged_list = []
+ for element in base:
+ if isinstance(element, list):
+ merged_list.extend(element)
+ else:
+ merged_list.append(element)
+ return merged_list
+
+ def visit_identity(self, node, value):
+ return value
+
+ def visit_index(self, node, value):
+ # Even though we can index strings, we don't
+ # want to support that.
+ if not isinstance(value, list):
+ return None
+ try:
+ return value[node['value']]
+ except IndexError:
+ return None
+
+ def visit_index_expression(self, node, value):
+ result = value
+ for node in node['children']:
+ result = self.visit(node, result)
+ return result
+
+ def visit_slice(self, node, value):
+ if not isinstance(value, list):
+ return None
+ s = slice(*node['children'])
+ return value[s]
+
+ def visit_key_val_pair(self, node, value):
+ return self.visit(node['children'][0], value)
+
+ def visit_literal(self, node, value):
+ return node['value']
+
+ def visit_multi_select_dict(self, node, value):
+ if value is None:
+ return None
+ collected = self._dict_cls()
+ for child in node['children']:
+ collected[child['value']] = self.visit(child, value)
+ return collected
+
+ def visit_multi_select_list(self, node, value):
+ if value is None:
+ return None
+ collected = []
+ for child in node['children']:
+ collected.append(self.visit(child, value))
+ return collected
+
+ def visit_or_expression(self, node, value):
+ matched = self.visit(node['children'][0], value)
+ if self._is_false(matched):
+ matched = self.visit(node['children'][1], value)
+ return matched
+
+ def visit_and_expression(self, node, value):
+ matched = self.visit(node['children'][0], value)
+ if self._is_false(matched):
+ return matched
+ return self.visit(node['children'][1], value)
+
+ def visit_not_expression(self, node, value):
+ original_result = self.visit(node['children'][0], value)
if type(original_result) is int and original_result == 0:
- # Special case for 0, !0 should be false, not true.
- # 0 is not a special cased integer in jmespath.
- return False
- return not original_result
-
- def visit_pipe(self, node, value):
- result = value
- for node in node['children']:
- result = self.visit(node, result)
- return result
-
- def visit_projection(self, node, value):
- base = self.visit(node['children'][0], value)
- if not isinstance(base, list):
- return None
- collected = []
- for element in base:
- current = self.visit(node['children'][1], element)
- if current is not None:
- collected.append(current)
- return collected
-
- def visit_value_projection(self, node, value):
- base = self.visit(node['children'][0], value)
- try:
- base = base.values()
- except AttributeError:
- return None
- collected = []
- for element in base:
- current = self.visit(node['children'][1], element)
- if current is not None:
- collected.append(current)
- return collected
-
- def _is_false(self, value):
- # This looks weird, but we're explicitly using equality checks
- # because the truth/false values are different between
- # python and jmespath.
- return (value == '' or value == [] or value == {} or value is None or
- value is False)
-
- def _is_true(self, value):
- return not self._is_false(value)
-
-
-class GraphvizVisitor(Visitor):
- def __init__(self):
- super(GraphvizVisitor, self).__init__()
- self._lines = []
- self._count = 1
-
- def visit(self, node, *args, **kwargs):
- self._lines.append('digraph AST {')
- current = '%s%s' % (node['type'], self._count)
- self._count += 1
- self._visit(node, current)
- self._lines.append('}')
- return '\n'.join(self._lines)
-
- def _visit(self, node, current):
- self._lines.append('%s [label="%s(%s)"]' % (
- current, node['type'], node.get('value', '')))
- for child in node.get('children', []):
- child_name = '%s%s' % (child['type'], self._count)
- self._count += 1
- self._lines.append(' %s -> %s' % (current, child_name))
- self._visit(child, child_name)
+ # Special case for 0, !0 should be false, not true.
+ # 0 is not a special cased integer in jmespath.
+ return False
+ return not original_result
+
+ def visit_pipe(self, node, value):
+ result = value
+ for node in node['children']:
+ result = self.visit(node, result)
+ return result
+
+ def visit_projection(self, node, value):
+ base = self.visit(node['children'][0], value)
+ if not isinstance(base, list):
+ return None
+ collected = []
+ for element in base:
+ current = self.visit(node['children'][1], element)
+ if current is not None:
+ collected.append(current)
+ return collected
+
+ def visit_value_projection(self, node, value):
+ base = self.visit(node['children'][0], value)
+ try:
+ base = base.values()
+ except AttributeError:
+ return None
+ collected = []
+ for element in base:
+ current = self.visit(node['children'][1], element)
+ if current is not None:
+ collected.append(current)
+ return collected
+
+ def _is_false(self, value):
+ # This looks weird, but we're explicitly using equality checks
+ # because the truth/false values are different between
+ # python and jmespath.
+ return (value == '' or value == [] or value == {} or value is None or
+ value is False)
+
+ def _is_true(self, value):
+ return not self._is_false(value)
+
+
+class GraphvizVisitor(Visitor):
+ def __init__(self):
+ super(GraphvizVisitor, self).__init__()
+ self._lines = []
+ self._count = 1
+
+ def visit(self, node, *args, **kwargs):
+ self._lines.append('digraph AST {')
+ current = '%s%s' % (node['type'], self._count)
+ self._count += 1
+ self._visit(node, current)
+ self._lines.append('}')
+ return '\n'.join(self._lines)
+
+ def _visit(self, node, current):
+ self._lines.append('%s [label="%s(%s)"]' % (
+ current, node['type'], node.get('value', '')))
+ for child in node.get('children', []):
+ child_name = '%s%s' % (child['type'], self._count)
+ self._count += 1
+ self._lines.append(' %s -> %s' % (current, child_name))
+ self._visit(child, child_name)
diff --git a/contrib/python/jmespath/ya.make b/contrib/python/jmespath/ya.make
index c4d8868206..1ffbd236bc 100644
--- a/contrib/python/jmespath/ya.make
+++ b/contrib/python/jmespath/ya.make
@@ -1,32 +1,32 @@
PY23_LIBRARY()
-
+
OWNER(g:python-contrib)
-
+
VERSION(0.10.0)
-
+
LICENSE(MIT)
NO_LINT()
-PY_SRCS(
- TOP_LEVEL
+PY_SRCS(
+ TOP_LEVEL
jmespath/__init__.py
jmespath/ast.py
jmespath/compat.py
- jmespath/exceptions.py
- jmespath/functions.py
- jmespath/lexer.py
- jmespath/parser.py
+ jmespath/exceptions.py
+ jmespath/functions.py
+ jmespath/lexer.py
+ jmespath/parser.py
jmespath/visitor.py
-)
-
+)
+
RESOURCE_FILES(
PREFIX contrib/python/jmespath/
.dist-info/METADATA
.dist-info/top_level.txt
)
-END()
+END()
RECURSE_FOR_TESTS(
tests
diff --git a/contrib/python/ya.make b/contrib/python/ya.make
index dd925a5697..d01ced9f3a 100644
--- a/contrib/python/ya.make
+++ b/contrib/python/ya.make
@@ -44,7 +44,7 @@ RECURSE(
alabaster
alembic
allpairspy
- amqp
+ amqp
aniso8601
annoy
antlr4
@@ -105,7 +105,7 @@ RECURSE(
behave
betamax
betamax-serializers
- billiard
+ billiard
binaryornot
bincopy
biplist
@@ -119,8 +119,8 @@ RECURSE(
boolean.py
bootstrapped
boto
- boto3
- botocore
+ boto3
+ botocore
braceexpand
bravado
bravado-core
@@ -134,7 +134,7 @@ RECURSE(
cattrs
cbor2
cchardet
- celery
+ celery
celery-mock
Cerberus
certifi
@@ -548,7 +548,7 @@ RECURSE(
jinja2-time
jmespath
joblib
- jmespath
+ jmespath
json-rpc
json2html
jsondiff
@@ -573,7 +573,7 @@ RECURSE(
kazoo
Keras-Preprocessing
kiwisolver
- kombu
+ kombu
korean-lunar-calendar
kubernetes
langcodes