commit 0b92fa3b174bf62a8449af50ee14bb7794dcb02d Author: Eric Evans Date: Fri Nov 27 16:25:33 2015 -0600 Imported Upstream version 1.2.2 diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 0000000..aa1f854 --- /dev/null +++ b/.coveragerc @@ -0,0 +1,4 @@ +[run] +branch = True +include = + boto3/* diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..bee41a4 --- /dev/null +++ b/.gitignore @@ -0,0 +1,24 @@ +.DS_Store +*.pyc +__pycache__ +dist +build +docs/build +docs/source/reference/services +tests/cover +tests/.coverage +*.egg-info + +# Test state / virtualenvs +.tox +.coverage +coverage.xml +nosetests.xml + +# Common virtualenv names +venv +env2 +env3 + +# IntelliJ / PyCharm IDE +.idea/ \ No newline at end of file diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 0000000..b50b382 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,10 @@ +language: python +python: + - "2.6" + - "2.7" + - "3.3" + - "3.4" +sudo: false +install: + - python scripts/ci/install +script: python scripts/ci/run-tests diff --git a/CHANGELOG.rst b/CHANGELOG.rst new file mode 100644 index 0000000..5ffc633 --- /dev/null +++ b/CHANGELOG.rst @@ -0,0 +1,429 @@ +Changelog +========= + +1.2.2 - (2015-11-19) +-------------------- + +* feature:Dependencies: Relax version constraint of ``futures`` to support + version 3.x. +* feature:Resources: Allow ``config`` object to be provided when creating + resources + (`issue 325 `__) +* feature:Documentation: Add docstrings for resource collections and waiters + (`issue 267 `__, + `issue 261 `__) + + +1.2.1 - (2015-10-22) +-------------------- + +* bugfix:setup.cfg: Fix issue in formatting that broke PyPI distributable + + +1.2.0 - (2015-10-22) +-------------------- + +* feature:Docstrings: Add docstrings for resource identifiers, attributes, + references, and subresources. + (`issue 239 `__) +* feature:``S3``: Add ability to configure host addressing style when making + requests to Amazon S3. + (`botocore issue 673 `__) +* bugfix:``IAM``: Fix model issue with attached groups, roles, and policies. + (`issue 304 `__) +* bugfix:``EC2.ServiceResource.create_key_pair``: Fix model issue where + creating key pair does not have a ``key_material`` on ``KeyPair`` resource. + (`issue 290 `__) + + +1.1.4 - (2015-09-24) +-------------------- + +* bugfix:Identifier: Make resource identifiers immutable. + (`issue 246 `__) +* feature: Both S3 Bucket and Object obtain upload_file() and download_file() + (`issue 243 `__) + + +1.1.3 - 2015-09-03 +------------------ + +* feature:``aws storagegateway``: Add support for resource tagging. +* feature: Add support for customizable timeouts. + + +1.1.2 - 2015-08-25 +------------------ + +* feature:``session.Session``: Add ``events`` property to access session's + event emitter. + (`issue 204 `__) +* bugfix:``DynamoDB``: Fix misspelling of error class to + ``DynamoDBOperationNotSupportedError``. + (`issue 218 `__) + + +1.1.1 - 2015-07-23 +------------------ + +* bugfix:``EC2.ServiceResource.create_tags``: Fix issue when creating + multiple tags. + (`issue 160 `__) + + +1.1.0 - 2015-07-07 +------------------ +* bugfix:``EC2.Vpc.filter``: Fix issue with clobbering of ``Filtering`` + paramter. + (`issue 154 `https://github.com/boto/boto3/pull/154`__) + + +1.0.1 - 2015-06-24 +------------------ +* feature: Update documentation + + +1.0.0 - 2015-06-22 +------------------ +* feature: Announced GA + + +0.0.22 - 2015-06-12 +------------------- + +* bugfix:``s3.client.upload_file``: Fix double invocation of callbacks when + using signature version 4. + (`issue 133 `__) +* bugfix::``s3.Bucket.load``: Add custom load method for Bucket resource. + (`issue 128 `__) + + +0.0.21 - 2015-06-12 +------------------- + +* bugfix:Installation: Fix regression when installing via older versions of + pip on python 2.6. + (`issue 132 `__) + + +0.0.19 - 2015-06-04 +------------------- + +* breakingchange:Collections: Remove the ``page_count`` and ``limit`` + arguments from ``all()``. Undocument support for the two arguments in the + ``filter()`` method. + (`issue 119 `__) +* feature:DynamoDB: Add batch writer. + (`issue 118 `__) + + +0.0.18 - 2015-06-01 +------------------- + +* feature:DynamoDB: Add document level interface for Table resource + (`issue 103 `__) +* feature:DynamoDB: Add ConditionExpression interface for querying and + filtering Table resource. + (`issue 103 `__) +* feature:Clients: Add support for passing of ``botocore.client.Config`` object + to instantiation of clients. + +0.0.17 - 2015-05-07 +------------------- + +* feature:Botocore: Update to Botocore 0.107.0. + + * Adopt new data structure model. + +0.0.16 - 2015-04-20 +------------------- + +* bugfix:Packaging: Fix release sdist and whl files from 0.0.15. +* feature:Amazon Dynamodb: Add resource model for Amazon DynamoDB. + +0.0.15 - 2015-04-13 +------------------- + +* bugfix:Packaging: Fix an issue with the Amazon S3 ``upload_file`` and + ``download_file`` customization. + (`issue 85 `__) +* bugfix:Resource: Fix an issue with the Amazon S3 ``BucketNofitication`` + resource. +* feature:Botocore: Update to Botocore 0.103.0. + + * Documentation updates for Amazon EC2 Container Service. + +0.0.14 - 2015-04-02 +------------------- + +* feature:Resources: Update to the latest resource models for: + + * AWS CloudFormation + * Amazon EC2 + * AWS IAM + +* feature:Amazon S3: Add an ``upload_file`` and ``download_file`` + to S3 clients that transparently handle parallel multipart transfers. +* feature:Botocore: Update to Botocore 0.102.0. + + * Add support for Amazon Machine Learning. + * Add support for Amazon Workspaces. + * Update ``requests`` to 2.6.0. + * Update AWS Lambda to the latest API. + * Update Amazon EC2 Container Service to the latest API. + * Update Amazon S3 to the latest API. + * Add ``DBSnapshotCompleted`` support to Amazon RDS waiters. + * Fixes for the REST-JSON protocol. + +0.0.13 - 2015-04-02 +------------------- + +* feature:Botocore: Update to Botocore 0.100.0. + + * Update AWS CodeDeploy to the latest service API. + * Update Amazon RDS to support the ``describe_certificates`` + service operation. + * Update Amazon Elastic Transcoder to support PlayReady DRM. + * Update Amazon EC2 to support D2 instance types. + +0.0.12 - 2015-03-26 +------------------- + +* feature:Resources: Add the ability to load resource data from a + ``has`` relationship. This saves a call to ``load`` when available, + and otherwise fixes a problem where there was no way to get at + certain resource data. + (`issue 74 `__, +* feature:Botocore: Update to Botocore 0.99.0 + + * Update service models for amazon Elastic Transcoder, AWS IAM + and AWS OpsWorks to the latest versions. + * Add deprecation warnings for old interface. + +0.0.11 - 2015-03-24 +------------------- + +* feature:Resources: Add Amazon EC2 support for ClassicLink actions + and add a delete action to EC2 ``Volume`` resources. +* feature:Resources: Add a ``load`` operation and ``user`` reference + to AWS IAM's ``CurrentUser`` resource. + (`issue 72 `__, +* feature:Resources: Add resources for AWS IAM managed policies. + (`issue 71 `__) +* feature:Botocore: Update to Botocore 0.97.0 + + * Add new Amazon EC2 waiters. + * Add support for Amazon S3 cross region replication. + * Fix an issue where empty config values could not be specified for + Amazon S3's bucket notifications. + (`botocore issue 495 `__) + * Update Amazon CloudWatch Logs to the latest API. + * Update Amazon Elastic Transcoder to the latest API. + * Update AWS CloudTrail to the latest API. + * Fix bug where explicitly passed ``profile_name`` will now override + any access and secret keys set in environment variables. + (`botocore issue 486 `__) + * Add ``endpoint_url`` to ``client.meta``. + * Better error messages for invalid regions. + * Fix creating clients with unicode service name. + +0.0.10 - 2015-03-05 +------------------- + +* bugfix:Documentation: Name collisions are now handled at the resource + model layer instead of the factory, meaning that the documentation + now uses the correct names. + (`issue 67 `__) +* feature:Session: Add a ``region_name`` option when creating a session. + (`issue 69 `__, + `issue 21 `__) +* feature:Botocore: Update to Botocore 0.94.0 + + * Update to the latest Amazon CloudeSearch API. + * Add support for near-realtime data updates and exporting historical + data from Amazon Cognito Sync. + * **Removed** the ability to clone a low-level client. Instead, create + a new client with the same parameters. + * Add support for URL paths in an endpoint URL. + * Multithreading signature fixes. + * Add support for listing hosted zones by name and getting hosted zone + counts from Amazon Route53. + * Add support for tagging to AWS Data Pipeline. + +0.0.9 - 2015-02-19 +------------------ + +* feature:Botocore: Update to Botocore 0.92.0 + + * Add support for the latest Amazon EC2 Container Service API. + * Allow calling AWS STS ``assume_role_with_saml`` without credentials. + * Update to latest Amazon CloudFront API + * Add support for AWS STS regionalized calls by passing both a region + name and an endpoint URL. + (`botocore issue 464 `__) + * Add support for Amazon Simple Systems Management Service (SSM) + * Fix Amazon S3 auth errors when uploading large files + to the ``eu-central-1`` and ``cn-north-1`` regions. + (`botocore issue 462 `__) + * Add support for AWS IAM managed policies + * Add support for Amazon ElastiCache tagging + * Add support for Amazon Route53 Domains tagging of domains + +0.0.8 - 2015-02-10 +------------------ + +* bugfix:Resources: Fix Amazon S3 resource identifier order. + (`issue 62 `__) +* bugfix:Resources: Fix collection resource hydration path. + (`issue 61 `__) +* bugfix:Resources: Re-enable service-level access to all resources, + allowing e.g. ``obj = s3.Object('bucket', 'key')``. + (`issue 60 `__) +* feature:Botocore: Update to Botocore 0.87.0 + + * Add support for Amazon DynamoDB secondary index scanning. + * Upgrade to ``requests`` 2.5.1. + * Add support for anonymous (unsigned) clients. + (`botocore issue 448 `__) + +0.0.7 - 2015-02-05 +------------------ + +* feature:Resources: Enable support for Amazon Glacier. +* feature:Resources: Support plural references and nested JMESPath + queries for data members when building parameters and identifiers. + (`issue 52 `__) +* feature:Resources: Update to the latest resource JSON format. This is + a **backward-incompatible** change as not all resources are exposed + at the service level anymore. For example, ``s3.Object('bucket', 'key')`` + is now ``s3.Bucket('bucket').Object('key')``. + (`issue 51 `__) +* feature:Resources: Make ``resource.meta`` a proper object. This allows + you to do things like ``resource.meta.client``. This is a **backward- + incompatible** change. + (`issue 45 `__) +* feature:Dependency: Update to JMESPath 0.6.1 +* feature:Botocore: Update to Botocore 0.86.0 + + * Add support for AWS CloudHSM + * Add support for Amazon EC2 and Autoscaling ClassicLink + * Add support for Amazon EC2 Container Service (ECS) + * Add support for encryption at rest and CloudHSM to Amazon RDS + * Add support for Amazon DynamoDB online indexing. + * Add support for AWS ImportExport ``get_shipping_label``. + * Add support for Amazon Glacier. + * Add waiters for AWS ElastiCache. + (`botocore issue 443 `__) + * Fix an issue with Amazon CloudFront waiters. + (`botocore issue 426 `_) + * Allow binary data to be passed to ``UserData``. + (`botocore issue 416 `_) + * Fix Amazon EMR endpoints for ``eu-central-1`` and ``cn-north-1``. + (`botocore issue 423 `__) + * Fix issue with base64 encoding of blob types for Amazon EMR. + (`botocore issue 413 `__) + +0.0.6 - 2014-12-18 +------------------ + +* feature:Amazon SQS: Add ``purge`` action to queue resources +* feature:Waiters: Add documentation for client and resource waiters + (`issue 44 `__) +* feature:Waiters: Add support for resource waiters + (`issue 43 `__) +* bugfix:Installation: Remove dependency on the unused ``six`` module + (`issue 42 `__) +* feature:Botocore: Update to Botocore 0.80.0 + + * Update Amazon Simple Workflow Service (SWF) to the latest version + * Update AWS Storage Gateway to the latest version + * Update Amazon Elastic MapReduce (EMR) to the latest version + * Update AWS Elastic Transcoder to the latest version + * Enable use of ``page_size`` for clients + (`botocore issue 408 `__) + +0.0.5 - 2014-12-09 +------------------ + +* feature: Add support for batch actions on collections. + (`issue 32 `__) +* feature: Update to Botocore 0.78.0 + + * Add support for Amazon Simple Queue Service purge queue which allows + users to delete the messages in their queue. + * Add AWS OpsWorks support for registering and assigning existing Amazon + EC2 instances and on-premises servers. + * Fix issue with expired signatures when retrying failed requests + (`botocore issue 399 `__) + * Port Route53 resource ID customizations from AWS CLI to Botocore. + (`botocore issue 398 `__) + * Fix handling of blob type serialization for JSON services. + (`botocore issue 397 `__) + +0.0.4 - 2014-12-04 +------------------ + +* feature: Update to Botocore 0.77.0 + + * Add support for Kinesis PutRecords operation. It writes multiple + data records from a producer into an Amazon Kinesis stream in a + single call. + * Add support for IAM GetAccountAuthorizationDetails operation. It + retrieves information about all IAM users, groups, and roles in + your account, including their relationships to one another and + their attached policies. + * Add support for updating the comment of a Route53 hosted zone. + * Fix base64 serialization for JSON protocol services. + * Fix issue where certain timestamps were not being accepted as valid input + (`botocore issue 389 `__) + +* feature: Update `Amazon EC2 `_ resource model. +* feature: Support `belongsTo` resource reference as well as `path` + specified in an action's resource definition. +* bugfix: Fix an issue accessing SQS message bodies + (`issue 33 `__) + +0.0.3 - 2014-11-26 +------------------ + +* feature: Update to Botocore 0.76.0. + + * Add support for using AWS Data Pipeline templates to create + pipelines and bind values to parameters in the pipeline + * Add support to Amazon Elastic Transcoder client for encryption of files + in Amazon S3. + * Fix issue where Amazon S3 requests were not being + resigned correctly when using Signature Version 4. + (`botocore issue 388 `__) + * Add support for custom response parsing in Botocore clients. + (`botocore issue 387 `__) + +0.0.2 - 2014-11-20 +------------------ + +* Adds resources for + `AWS CloudFormation `_ and + `AWS OpsWorks `_. +* Update to Botocore 0.73.0 and JMESPath 0.5.0 +* Adds support for + `AWS CodeDeploy `_, + `AWS Config `_, + `AWS KMS `_, + `AWS Lambda `_. +* Make requests with a customized HTTP user-agent + +0.0.1 - 2014-11-11 +------------------ + +* Initial developer preview refresh of Boto 3 +* Supports S3, EC2, SQS, SNS, and IAM resources +* Supports low-level clients for most services diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst new file mode 100644 index 0000000..8df9c42 --- /dev/null +++ b/CONTRIBUTING.rst @@ -0,0 +1,39 @@ +For more information, please see the official docs at +http://boto3.readthedocs.org/ + +Contributing Code +----------------- +A good pull request: + +- Is clear. +- Works across all supported versions of Python. +- Follows the existing style of the code base (PEP-8). +- Has comments included as needed. + +- A test case that demonstrates the previous flaw that now passes with + the included patch, or demonstrates the newly added feature. +- If it adds/changes a public API, it must also include documentation + for those changes. +- Must be appropriately licensed (Apache 2.0). + +Reporting An Issue/Feature +-------------------------- +First, check to see if there's an existing issue/pull request for the +bug/feature. All issues are at +https://github.com/boto/boto3/issues and pull reqs are at +https://github.com/boto/boto3/pulls. + +If there isn't an existing issue there, please file an issue. The +ideal report includes: + +- A description of the problem/suggestion. +- How to recreate the bug. +- If relevant, including the versions of your: + + - Python interpreter + - Boto 3 + - Optionally of the other dependencies involved (e.g. Botocore) + +- If possible, create a pull request with a (failing) test case + demonstrating what's wrong. This makes the process for fixing bugs + quicker & gets issues resolved sooner. diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..fe7176a --- /dev/null +++ b/LICENSE @@ -0,0 +1,12 @@ +Copyright 2013-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"). You +may not use this file except in compliance with the License. A copy of +the License is located at + + http://aws.amazon.com/apache2.0/ + +or in the "license" file accompanying this file. This file is +distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +ANY KIND, either express or implied. See the License for the specific +language governing permissions and limitations under the License. diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 0000000..5133aa0 --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,5 @@ +include CONTRIBUTING.rst +include README.rst +include LICENSE +include requirements.txt +recursive-include boto3/data *.json diff --git a/README.rst b/README.rst new file mode 100644 index 0000000..bc0711f --- /dev/null +++ b/README.rst @@ -0,0 +1,112 @@ +=============================== +Boto 3 - The AWS SDK for Python +=============================== + +|Build Status| |Docs| |Version| + +Boto3 is the Amazon Web Services (AWS) Software Development Kit (SDK) for +Python, which allows Python developers to write software that makes use +of services like Amazon S3 and Amazon EC2. You can find the latest, most +up to date, documentation at `Read the Docs`_, including a list of +services that are supported. + + +.. _boto: https://docs.pythonboto.org/ +.. _`Read the Docs`: https://boto3.readthedocs.org/en/latest/ +.. |Build Status| image:: http://img.shields.io/travis/boto/boto3/develop.svg?style=flat + :target: https://travis-ci.org/boto/boto3 + :alt: Build Status +.. |Docs| image:: https://readthedocs.org/projects/boto3/badge/?version=latest&style=flat + :target: https://boto3.readthedocs.org/en/latest/ + :alt: Read the docs +.. |Downloads| image:: http://img.shields.io/pypi/dm/boto3.svg?style=flat + :target: https://pypi.python.org/pypi/boto3/ + :alt: Downloads +.. |Version| image:: http://img.shields.io/pypi/v/boto3.svg?style=flat + :target: https://pypi.python.org/pypi/boto3/ + :alt: Version +.. |License| image:: http://img.shields.io/pypi/l/boto3.svg?style=flat + :target: https://github.com/boto/boto3/blob/develop/LICENSE + :alt: License + +Quick Start +----------- +First, install the library and set a default region: + +.. code-block:: sh + + $ pip install boto3 + +Next, set up credentials (in e.g. ``~/.aws/credentials``): + +.. code-block:: ini + + [default] + aws_access_key_id = YOUR_KEY + aws_secret_access_key = YOUR_SECRET + +Then, set up a default region (in e.g. ``~/.aws/config``): + +.. code-block:: ini + + [default] + region=us-east-1 + +Then, from a Python interpreter: + +.. code-block:: python + + >>> import boto3 + >>> s3 = boto3.resource('s3') + >>> for bucket in s3.buckets.all(): + print(bucket.name) + +Development +----------- + +Getting Started +~~~~~~~~~~~~~~~ +Assuming that you have Python and ``virtualenv`` installed, set up your +environment and install the required dependencies like this instead of +the ``pip install boto3`` defined above: + +.. code-block:: sh + + $ git clone https://github.com/boto/boto3.git + $ cd boto3 + $ virtualenv venv + ... + $ . venv/bin/activate + $ pip install -r requirements.txt + $ pip install -e . + +Running Tests +~~~~~~~~~~~~~ +You can run tests in all supported Python versions using ``tox``. By default, +it will run all of the unit tests, but you can also specify your own +``nosetests`` options. Note that this requires that you have all supported +versions of Python installed, otherwise you must pass ``-e`` or run the +``nosetests`` command directly: + +.. code-block:: sh + + $ tox + $ tox tests/unit/test_session.py + $ tox -e py26,py33 tests/integration + +You can also run individual tests with your default Python version: + +.. code-block:: sh + + $ nosetests tests/unit + +Generating Documentation +~~~~~~~~~~~~~~~~~~~~~~~~ +Sphinx is used for documentation. You can generate HTML locally with the +following: + +.. code-block:: sh + + $ pip install -r requirements-docs.txt + $ cd docs + $ make html diff --git a/boto3/__init__.py b/boto3/__init__.py new file mode 100644 index 0000000..e5ef2e2 --- /dev/null +++ b/boto3/__init__.py @@ -0,0 +1,96 @@ +# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + +import logging + +from boto3.session import Session + + +__author__ = 'Amazon Web Services' +__version__ = '1.2.2' + + +# The default Boto3 session; autoloaded when needed. +DEFAULT_SESSION = None + +def setup_default_session(**kwargs): + """ + Set up a default session, passing through any parameters to the session + constructor. There is no need to call this unless you wish to pass custom + parameters, because a default session will be created for you. + """ + global DEFAULT_SESSION + DEFAULT_SESSION = Session(**kwargs) + +def set_stream_logger(name='boto3', level=logging.DEBUG, format_string=None): + """ + Add a stream handler for the given name and level to the logging module. + By default, this logs all boto3 messages to ``stdout``. + + >>> import boto3 + >>> boto3.set_stream_logger('boto3.resources', logging.INFO) + + :type name: string + :param name: Log name + :type level: int + :param level: Logging level, e.g. ``logging.INFO`` + :type format_string: str + :param format_string: Log message format + """ + if format_string is None: + format_string = "%(asctime)s %(name)s [%(levelname)s] %(message)s" + + logger = logging.getLogger(name) + logger.setLevel(level) + handler = logging.StreamHandler() + handler.setLevel(level) + formatter = logging.Formatter(format_string) + handler.setFormatter(formatter) + logger.addHandler(handler) + +def _get_default_session(): + """ + Get the default session, creating one if needed. + + :rtype: :py:class:`~boto3.session.Sesssion` + :return: The default session + """ + if DEFAULT_SESSION is None: + setup_default_session() + + return DEFAULT_SESSION + +def client(*args, **kwargs): + """ + Create a low-level service client by name using the default session. + + See :py:meth:`boto3.session.Session.client`. + """ + return _get_default_session().client(*args, **kwargs) + +def resource(*args, **kwargs): + """ + Create a resource service client by name using the default session. + + See :py:meth:`boto3.session.Session.resource`. + """ + return _get_default_session().resource(*args, **kwargs) + +# Set up logging to ``/dev/null`` like a library is supposed to. +# http://docs.python.org/3.3/howto/logging.html#configuring-logging-for-a-library +class NullHandler(logging.Handler): + def emit(self, record): + pass + + +logging.getLogger('boto3').addHandler(NullHandler()) diff --git a/boto3/compat.py b/boto3/compat.py new file mode 100644 index 0000000..324b60a --- /dev/null +++ b/boto3/compat.py @@ -0,0 +1,31 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +import sys +import os +import errno + + +if sys.platform.startswith('win'): + def rename_file(current_filename, new_filename): + try: + os.remove(new_filename) + except OSError as e: + if not e.errno == errno.ENOENT: + # We only want to a ignore trying to remove + # a file that does not exist. If it fails + # for any other reason we should be propagating + # that exception. + raise + os.rename(current_filename, new_filename) +else: + rename_file = os.rename diff --git a/boto3/data/cloudformation/2010-05-15/resources-1.json b/boto3/data/cloudformation/2010-05-15/resources-1.json new file mode 100644 index 0000000..31e017c --- /dev/null +++ b/boto3/data/cloudformation/2010-05-15/resources-1.json @@ -0,0 +1,194 @@ +{ + "service": { + "actions": { + "CreateStack": { + "request": { "operation": "CreateStack" }, + "resource": { + "type": "Stack", + "identifiers": [ + { "target": "Name", "source": "requestParameter", "path": "StackName" } + ] + } + } + }, + "has": { + "Event": { + "resource": { + "type": "Event", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "Stack": { + "resource": { + "type": "Stack", + "identifiers": [ + { "target": "Name", "source": "input" } + ] + } + } + }, + "hasMany": { + "Stacks": { + "request": { "operation": "DescribeStacks" }, + "resource": { + "type": "Stack", + "identifiers": [ + { "target": "Name", "source": "response", "path": "Stacks[].StackName" } + ] + } + } + } + }, + "resources": { + "Event": { + "identifiers": [ + { + "name": "Id", + "memberName": "EventId" + } + ], + "shape": "StackEvent" + }, + "Stack": { + "identifiers": [ + { + "name": "Name", + "memberName": "StackName" + } + ], + "shape": "Stack", + "load": { + "request": { + "operation": "DescribeStacks", + "params": [ + { "target": "StackName", "source": "identifier", "name": "Name" } + ] + }, + "path": "Stacks[0]" + }, + "actions": { + "CancelUpdate": { + "request": { + "operation": "CancelUpdateStack", + "params": [ + { "target": "StackName", "source": "identifier", "name": "Name" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteStack", + "params": [ + { "target": "StackName", "source": "identifier", "name": "Name" } + ] + } + }, + "Update": { + "request": { + "operation": "UpdateStack", + "params": [ + { "target": "StackName", "source": "identifier", "name": "Name" } + ] + } + } + }, + "has": { + "Resource": { + "resource": { + "type": "StackResource", + "identifiers": [ + { "target": "StackName", "source": "identifier", "name": "Name" }, + { "target": "LogicalId", "source": "input" } + ] + } + } + }, + "hasMany": { + "Events": { + "request": { + "operation": "DescribeStackEvents", + "params": [ + { "target": "StackName", "source": "identifier", "name": "Name" } + ] + }, + "resource": { + "type": "Event", + "identifiers": [ + { "target": "Id", "source": "response", "path": "StackEvents[].EventId" } + ], + "path": "StackEvents[]" + } + }, + "ResourceSummaries": { + "request": { + "operation": "ListStackResources", + "params": [ + { "target": "StackName", "source": "identifier", "name": "Name" } + ] + }, + "resource": { + "type": "StackResourceSummary", + "identifiers": [ + { "target": "LogicalId", "source": "response", "path": "StackResourceSummaries[].LogicalResourceId" }, + { "target": "StackName", "source": "requestParameter", "path": "StackName" } + ], + "path": "StackResourceSummaries[]" + } + } + } + }, + "StackResource": { + "identifiers": [ + { "name": "StackName" }, + { + "name": "LogicalId", + "memberName": "LogicalResourceId" + } + ], + "shape": "StackResourceDetail", + "load": { + "request": { + "operation": "DescribeStackResource", + "params": [ + { "target": "LogicalResourceId", "source": "identifier", "name": "LogicalId" }, + { "target": "StackName", "source": "identifier", "name": "StackName" } + ] + }, + "path": "StackResourceDetail" + }, + "has": { + "Stack": { + "resource": { + "type": "Stack", + "identifiers": [ + { "target": "Name", "source": "identifier", "name": "StackName" } + ] + } + } + } + }, + "StackResourceSummary": { + "identifiers": [ + { "name": "StackName" }, + { + "name": "LogicalId", + "memberName": "LogicalResourceId" + } + ], + "shape": "StackResourceSummary", + "has": { + "Resource": { + "resource": { + "type": "StackResource", + "identifiers": [ + { "target": "LogicalId", "source": "identifier", "name": "LogicalId" }, + { "target": "StackName", "source": "identifier", "name": "StackName" } + ] + } + } + } + } + } +} diff --git a/boto3/data/dynamodb/2012-08-10/resources-1.json b/boto3/data/dynamodb/2012-08-10/resources-1.json new file mode 100644 index 0000000..408f1a9 --- /dev/null +++ b/boto3/data/dynamodb/2012-08-10/resources-1.json @@ -0,0 +1,136 @@ +{ + "service": { + "actions": { + "BatchGetItem": { + "request": { "operation": "BatchGetItem" } + }, + "BatchWriteItem": { + "request": { "operation": "BatchWriteItem" } + }, + "CreateTable": { + "request": { "operation": "CreateTable" }, + "resource": { + "type": "Table", + "identifiers": [ + { "target": "Name", "source": "response", "path": "TableDescription.TableName" } + ], + "path": "TableDescription" + } + } + }, + "has": { + "Table": { + "resource": { + "type": "Table", + "identifiers": [ + { "target": "Name", "source": "input" } + ] + } + } + }, + "hasMany": { + "Tables": { + "request": { "operation": "ListTables" }, + "resource": { + "type": "Table", + "identifiers": [ + { "target": "Name", "source": "response", "path": "TableNames[]" } + ] + } + } + } + }, + "resources": { + "Table": { + "identifiers": [ + { + "name": "Name", + "memberName": "TableName" + } + ], + "shape": "TableDescription", + "load": { + "request": { + "operation": "DescribeTable", + "params": [ + { "target": "TableName", "source": "identifier", "name": "Name" } + ] + }, + "path": "Table" + }, + "actions": { + "Delete": { + "request": { + "operation": "DeleteTable", + "params": [ + { "target": "TableName", "source": "identifier", "name": "Name" } + ] + } + }, + "DeleteItem": { + "request": { + "operation": "DeleteItem", + "params": [ + { "target": "TableName", "source": "identifier", "name": "Name" } + ] + } + }, + "GetItem": { + "request": { + "operation": "GetItem", + "params": [ + { "target": "TableName", "source": "identifier", "name": "Name" } + ] + } + }, + "PutItem": { + "request": { + "operation": "PutItem", + "params": [ + { "target": "TableName", "source": "identifier", "name": "Name" } + ] + } + }, + "Query": { + "request": { + "operation": "Query", + "params": [ + { "target": "TableName", "source": "identifier", "name": "Name" } + ] + } + }, + "Scan": { + "request": { + "operation": "Scan", + "params": [ + { "target": "TableName", "source": "identifier", "name": "Name" } + ] + } + }, + "Update": { + "request": { + "operation": "UpdateTable", + "params": [ + { "target": "TableName", "source": "identifier", "name": "Name" } + ] + }, + "resource": { + "type": "Table", + "identifiers": [ + { "target": "Name", "source": "identifier", "name": "Name" } + ], + "path": "TableDescription" + } + }, + "UpdateItem": { + "request": { + "operation": "UpdateItem", + "params": [ + { "target": "TableName", "source": "identifier", "name": "Name" } + ] + } + } + } + } + } +} diff --git a/boto3/data/ec2/2014-10-01/resources-1.json b/boto3/data/ec2/2014-10-01/resources-1.json new file mode 100644 index 0000000..8ccf160 --- /dev/null +++ b/boto3/data/ec2/2014-10-01/resources-1.json @@ -0,0 +1,2289 @@ +{ + "service": { + "actions": { + "CreateDhcpOptions": { + "request": { "operation": "CreateDhcpOptions" }, + "resource": { + "type": "DhcpOptions", + "identifiers": [ + { "target": "Id", "source": "response", "path": "DhcpOptions.DhcpOptionsId" } + ], + "path": "DhcpOptions" + } + }, + "CreateInstances": { + "request": { "operation": "RunInstances" }, + "resource": { + "type": "Instance", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Instances[].InstanceId" } + ], + "path": "Instances[]" + } + }, + "CreateInternetGateway": { + "request": { "operation": "CreateInternetGateway" }, + "resource": { + "type": "InternetGateway", + "identifiers": [ + { "target": "Id", "source": "response", "path": "InternetGateway.InternetGatewayId" } + ], + "path": "InternetGateway" + } + }, + "CreateKeyPair": { + "request": { "operation": "CreateKeyPair" }, + "resource": { + "type": "KeyPair", + "identifiers": [ + { "target": "Name", "source": "response", "path": "KeyName" } + ] + } + }, + "CreateNetworkAcl": { + "request": { "operation": "CreateNetworkAcl" }, + "resource": { + "type": "NetworkAcl", + "identifiers": [ + { "target": "Id", "source": "response", "path": "NetworkAcl.NetworkAclId" } + ], + "path": "NetworkAcl" + } + }, + "CreateNetworkInterface": { + "request": { "operation": "CreateNetworkInterface" }, + "resource": { + "type": "NetworkInterface", + "identifiers": [ + { "target": "Id", "source": "response", "path": "NetworkInterface.NetworkInterfaceId" } + ], + "path": "NetworkInterface" + } + }, + "CreatePlacementGroup": { + "request": { "operation": "CreatePlacementGroup" }, + "resource": { + "type": "PlacementGroup", + "identifiers": [ + { "target": "Name", "source": "requestParameter", "path": "GroupName" } + ] + } + }, + "CreateRouteTable": { + "request": { "operation": "CreateRouteTable" }, + "resource": { + "type": "RouteTable", + "identifiers": [ + { "target": "Id", "source": "response", "path": "RouteTable.RouteTableId" } + ], + "path": "RouteTable" + } + }, + "CreateSecurityGroup": { + "request": { "operation": "CreateSecurityGroup" }, + "resource": { + "type": "SecurityGroup", + "identifiers": [ + { "target": "Id", "source": "response", "path": "GroupId" } + ] + } + }, + "CreateSnapshot": { + "request": { "operation": "CreateSnapshot" }, + "resource": { + "type": "Snapshot", + "identifiers": [ + { "target": "Id", "source": "response", "path": "SnapshotId" } + ], + "path": "@" + } + }, + "CreateSubnet": { + "request": { "operation": "CreateSubnet" }, + "resource": { + "type": "Subnet", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Subnet.SubnetId" } + ], + "path": "Subnet" + } + }, + "CreateTags": { + "request": { "operation": "CreateTags" }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "requestParameter", "path": "Resources[]" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "CreateVolume": { + "request": { "operation": "CreateVolume" }, + "resource": { + "type": "Volume", + "identifiers": [ + { "target": "Id", "source": "response", "path": "VolumeId" } + ], + "path": "@" + } + }, + "CreateVpc": { + "request": { "operation": "CreateVpc" }, + "resource": { + "type": "Vpc", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Vpc.VpcId" } + ], + "path": "Vpc" + } + }, + "CreateVpcPeeringConnection": { + "request": { "operation": "CreateVpcPeeringConnection" }, + "resource": { + "type": "VpcPeeringConnection", + "identifiers": [ + { "target": "Id", "source": "response", "path": "VpcPeeringConnection.VpcPeeringConnectionId" } + ], + "path": "VpcPeeringConnection" + } + }, + "DisassociateRouteTable": { + "request": { "operation": "DisassociateRouteTable" } + }, + "ImportKeyPair": { + "request": { "operation": "ImportKeyPair" }, + "resource": { + "type": "KeyPair", + "identifiers": [ + { "target": "Name", "source": "response", "path": "KeyName" } + ] + } + }, + "RegisterImage": { + "request": { "operation": "RegisterImage" }, + "resource": { + "type": "Image", + "identifiers": [ + { "target": "Id", "source": "response", "path": "ImageId" } + ] + } + } + }, + "has": { + "DhcpOptions": { + "resource": { + "type": "DhcpOptions", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "Image": { + "resource": { + "type": "Image", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "Instance": { + "resource": { + "type": "Instance", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "InternetGateway": { + "resource": { + "type": "InternetGateway", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "KeyPair": { + "resource": { + "type": "KeyPair", + "identifiers": [ + { "target": "Name", "source": "input" } + ] + } + }, + "NetworkAcl": { + "resource": { + "type": "NetworkAcl", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "NetworkInterface": { + "resource": { + "type": "NetworkInterface", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "PlacementGroup": { + "resource": { + "type": "PlacementGroup", + "identifiers": [ + { "target": "Name", "source": "input" } + ] + } + }, + "RouteTable": { + "resource": { + "type": "RouteTable", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "RouteTableAssociation": { + "resource": { + "type": "RouteTableAssociation", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "SecurityGroup": { + "resource": { + "type": "SecurityGroup", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "Snapshot": { + "resource": { + "type": "Snapshot", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "Subnet": { + "resource": { + "type": "Subnet", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "Volume": { + "resource": { + "type": "Volume", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "Vpc": { + "resource": { + "type": "Vpc", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "VpcPeeringConnection": { + "resource": { + "type": "VpcPeeringConnection", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + } + }, + "hasMany": { + "DhcpOptionsSets": { + "request": { "operation": "DescribeDhcpOptions" }, + "resource": { + "type": "DhcpOptions", + "identifiers": [ + { "target": "Id", "source": "response", "path": "DhcpOptions[].DhcpOptionsId" } + ], + "path": "DhcpOptions[]" + } + }, + "Images": { + "request": { "operation": "DescribeImages" }, + "resource": { + "type": "Image", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Images[].ImageId" } + ], + "path": "Images[]" + } + }, + "Instances": { + "request": { "operation": "DescribeInstances" }, + "resource": { + "type": "Instance", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Reservations[].Instances[].InstanceId" } + ], + "path": "Reservations[].Instances[]" + } + }, + "InternetGateways": { + "request": { "operation": "DescribeInternetGateways" }, + "resource": { + "type": "InternetGateway", + "identifiers": [ + { "target": "Id", "source": "response", "path": "InternetGateways[].InternetGatewayId" } + ], + "path": "InternetGateways[]" + } + }, + "KeyPairs": { + "request": { "operation": "DescribeKeyPairs" }, + "resource": { + "type": "KeyPair", + "identifiers": [ + { "target": "Name", "source": "response", "path": "KeyPairs[].KeyName" } + ], + "path": "KeyPairs[]" + } + }, + "NetworkAcls": { + "request": { "operation": "DescribeNetworkAcls" }, + "resource": { + "type": "NetworkAcl", + "identifiers": [ + { "target": "Id", "source": "response", "path": "NetworkAcls[].NetworkAclId" } + ], + "path": "NetworkAcls[]" + } + }, + "NetworkInterfaces": { + "request": { "operation": "DescribeNetworkInterfaces" }, + "resource": { + "type": "NetworkInterface", + "identifiers": [ + { "target": "Id", "source": "response", "path": "NetworkInterfaces[].NetworkInterfaceId" } + ], + "path": "NetworkInterfaces[]" + } + }, + "PlacementGroups": { + "request": { "operation": "DescribePlacementGroups" }, + "resource": { + "type": "PlacementGroup", + "identifiers": [ + { "target": "Name", "source": "response", "path": "PlacementGroups[].GroupName" } + ], + "path": "PlacementGroups[]" + } + }, + "RouteTables": { + "request": { "operation": "DescribeRouteTables" }, + "resource": { + "type": "RouteTable", + "identifiers": [ + { "target": "Id", "source": "response", "path": "RouteTables[].RouteTableId" } + ], + "path": "RouteTables[]" + } + }, + "SecurityGroups": { + "request": { "operation": "DescribeSecurityGroups" }, + "resource": { + "type": "SecurityGroup", + "identifiers": [ + { "target": "Id", "source": "response", "path": "SecurityGroups[].GroupId" } + ], + "path": "SecurityGroups[]" + } + }, + "Snapshots": { + "request": { "operation": "DescribeSnapshots" }, + "resource": { + "type": "Snapshot", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Snapshots[].SnapshotId" } + ], + "path": "Snapshots[]" + } + }, + "Subnets": { + "request": { "operation": "DescribeSubnets" }, + "resource": { + "type": "Subnet", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Subnets[].SubnetId" } + ], + "path": "Subnets[]" + } + }, + "Volumes": { + "request": { "operation": "DescribeVolumes" }, + "resource": { + "type": "Volume", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Volumes[].VolumeId" } + ], + "path": "Volumes[]" + } + }, + "VpcPeeringConnections": { + "request": { "operation": "DescribeVpcPeeringConnections" }, + "resource": { + "type": "VpcPeeringConnection", + "identifiers": [ + { "target": "Id", "source": "response", "path": "VpcPeeringConnections[].VpcPeeringConnectionId" } + ], + "path": "VpcPeeringConnections[]" + } + }, + "Vpcs": { + "request": { "operation": "DescribeVpcs" }, + "resource": { + "type": "Vpc", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Vpcs[].VpcId" } + ], + "path": "Vpcs[]" + } + } + } + }, + "resources": { + "DhcpOptions": { + "identifiers": [ + { + "name": "Id", + "memberName": "DhcpOptionsId" + } + ], + "shape": "DhcpOptions", + "load": { + "request": { + "operation": "DescribeDhcpOptions", + "params": [ + { "target": "DhcpOptionsIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "DhcpOptions[0]" + }, + "actions": { + "AssociateWithVpc": { + "request": { + "operation": "AssociateDhcpOptions", + "params": [ + { "target": "DhcpOptionsId", "source": "identifier", "name": "Id" } + ] + } + }, + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "identifier", "name": "Id" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteDhcpOptions", + "params": [ + { "target": "DhcpOptionsId", "source": "identifier", "name": "Id" } + ] + } + } + } + }, + "Image": { + "identifiers": [ + { + "name": "Id", + "memberName": "ImageId" + } + ], + "shape": "Image", + "load": { + "request": { + "operation": "DescribeImages", + "params": [ + { "target": "ImageIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "Images[0]" + }, + "actions": { + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "identifier", "name": "Id" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "Deregister": { + "request": { + "operation": "DeregisterImage", + "params": [ + { "target": "ImageId", "source": "identifier", "name": "Id" } + ] + } + }, + "DescribeAttribute": { + "request": { + "operation": "DescribeImageAttribute", + "params": [ + { "target": "ImageId", "source": "identifier", "name": "Id" } + ] + } + }, + "ModifyAttribute": { + "request": { + "operation": "ModifyImageAttribute", + "params": [ + { "target": "ImageId", "source": "identifier", "name": "Id" } + ] + } + }, + "ResetAttribute": { + "request": { + "operation": "ResetImageAttribute", + "params": [ + { "target": "ImageId", "source": "identifier", "name": "Id" } + ] + } + } + } + }, + "Instance": { + "identifiers": [ + { + "name": "Id", + "memberName": "InstanceId" + } + ], + "shape": "Instance", + "load": { + "request": { + "operation": "DescribeInstances", + "params": [ + { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "Reservations[0].Instances[0]" + }, + "actions": { + "AttachClassicLinkVpc": { + "request": { + "operation": "AttachClassicLinkVpc", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" } + ] + } + }, + "AttachVolume": { + "request": { + "operation": "AttachVolume", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" } + ] + } + }, + "ConsoleOutput": { + "request": { + "operation": "GetConsoleOutput", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" } + ] + } + }, + "CreateImage": { + "request": { + "operation": "CreateImage", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Image", + "identifiers": [ + { "target": "Id", "source": "response", "path": "ImageId" } + ] + } + }, + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "identifier", "name": "Id" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "DescribeAttribute": { + "request": { + "operation": "DescribeInstanceAttribute", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" } + ] + } + }, + "DetachClassicLinkVpc": { + "request": { + "operation": "DetachClassicLinkVpc", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" } + ] + } + }, + "DetachVolume": { + "request": { + "operation": "DetachVolume", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" } + ] + } + }, + "ModifyAttribute": { + "request": { + "operation": "ModifyInstanceAttribute", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" } + ] + } + }, + "Monitor": { + "request": { + "operation": "MonitorInstances", + "params": [ + { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } + ] + } + }, + "PasswordData": { + "request": { + "operation": "GetPasswordData", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" } + ] + } + }, + "Reboot": { + "request": { + "operation": "RebootInstances", + "params": [ + { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } + ] + } + }, + "ReportStatus": { + "request": { + "operation": "ReportInstanceStatus", + "params": [ + { "target": "Instances[0]", "source": "identifier", "name": "Id" } + ] + } + }, + "ResetAttribute": { + "request": { + "operation": "ResetInstanceAttribute", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" } + ] + } + }, + "ResetKernel": { + "request": { + "operation": "ResetInstanceAttribute", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" }, + { "target": "Attribute", "source": "string", "value": "kernel" } + ] + } + }, + "ResetRamdisk": { + "request": { + "operation": "ResetInstanceAttribute", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" }, + { "target": "Attribute", "source": "string", "value": "ramdisk" } + ] + } + }, + "ResetSourceDestCheck": { + "request": { + "operation": "ResetInstanceAttribute", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" }, + { "target": "Attribute", "source": "string", "value": "sourceDestCheck" } + ] + } + }, + "Start": { + "request": { + "operation": "StartInstances", + "params": [ + { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } + ] + } + }, + "Stop": { + "request": { + "operation": "StopInstances", + "params": [ + { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } + ] + } + }, + "Terminate": { + "request": { + "operation": "TerminateInstances", + "params": [ + { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } + ] + } + }, + "Unmonitor": { + "request": { + "operation": "UnmonitorInstances", + "params": [ + { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } + ] + } + } + }, + "batchActions": { + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[]", "source": "identifier", "name": "Id" } + ] + } + }, + "Monitor": { + "request": { + "operation": "MonitorInstances", + "params": [ + { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } + ] + } + }, + "Reboot": { + "request": { + "operation": "RebootInstances", + "params": [ + { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } + ] + } + }, + "Start": { + "request": { + "operation": "StartInstances", + "params": [ + { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } + ] + } + }, + "Stop": { + "request": { + "operation": "StopInstances", + "params": [ + { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } + ] + } + }, + "Terminate": { + "request": { + "operation": "TerminateInstances", + "params": [ + { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } + ] + } + }, + "Unmonitor": { + "request": { + "operation": "UnmonitorInstances", + "params": [ + { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } + ] + } + } + }, + "waiters": { + "Exists": { + "waiterName": "InstanceExists", + "params": [ + { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } + ], + "path": "Reservations[0].Instances[0]" + }, + "Running": { + "waiterName": "InstanceRunning", + "params": [ + { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } + ], + "path": "Reservations[0].Instances[0]" + }, + "Stopped": { + "waiterName": "InstanceStopped", + "params": [ + { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } + ], + "path": "Reservations[0].Instances[0]" + }, + "Terminated": { + "waiterName": "InstanceTerminated", + "params": [ + { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } + ], + "path": "Reservations[0].Instances[0]" + } + }, + "has": { + "Image": { + "resource": { + "type": "Image", + "identifiers": [ + { "target": "Id", "source": "data", "path": "ImageId" } + ] + } + }, + "KeyPair": { + "resource": { + "type": "KeyPair", + "identifiers": [ + { "target": "Name", "source": "data", "path": "KeyName" } + ] + } + }, + "PlacementGroup": { + "resource": { + "type": "PlacementGroup", + "identifiers": [ + { "target": "Name", "source": "data", "path": "Placement.GroupName" } + ] + } + }, + "Subnet": { + "resource": { + "type": "Subnet", + "identifiers": [ + { "target": "Id", "source": "data", "path": "SubnetId" } + ] + } + }, + "Vpc": { + "resource": { + "type": "Vpc", + "identifiers": [ + { "target": "Id", "source": "data", "path": "VpcId" } + ] + } + } + }, + "hasMany": { + "Volumes": { + "request": { + "operation": "DescribeVolumes", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "attachment.instance-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Volume", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Volumes[].VolumeId" } + ], + "path": "Volumes[]" + } + } + } + }, + "InternetGateway": { + "identifiers": [ + { + "name": "Id", + "memberName": "InternetGatewayId" + } + ], + "shape": "InternetGateway", + "load": { + "request": { + "operation": "DescribeInternetGateways", + "params": [ + { "target": "InternetGatewayIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "InternetGateways[0]" + }, + "actions": { + "AttachToVpc": { + "request": { + "operation": "AttachInternetGateway", + "params": [ + { "target": "InternetGatewayId", "source": "identifier", "name": "Id" } + ] + } + }, + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "identifier", "name": "Id" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteInternetGateway", + "params": [ + { "target": "InternetGatewayId", "source": "identifier", "name": "Id" } + ] + } + }, + "DetachFromVpc": { + "request": { + "operation": "DetachInternetGateway", + "params": [ + { "target": "InternetGatewayId", "source": "identifier", "name": "Id" } + ] + } + } + } + }, + "KeyPair": { + "identifiers": [ + { + "name": "Name", + "memberName": "KeyName" + } + ], + "shape": "KeyPairInfo", + "load": { + "request": { + "operation": "DescribeKeyPairs", + "params": [ + { "target": "KeyNames[0]", "source": "identifier", "name": "Name" } + ] + }, + "path": "KeyPairs[0]" + }, + "actions": { + "Delete": { + "request": { + "operation": "DeleteKeyPair", + "params": [ + { "target": "KeyName", "source": "identifier", "name": "Name" } + ] + } + } + } + }, + "NetworkAcl": { + "identifiers": [ + { + "name": "Id", + "memberName": "NetworkAclId" + } + ], + "shape": "NetworkAcl", + "load": { + "request": { + "operation": "DescribeNetworkAcls", + "params": [ + { "target": "NetworkAclIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "NetworkAcls[0]" + }, + "actions": { + "CreateEntry": { + "request": { + "operation": "CreateNetworkAclEntry", + "params": [ + { "target": "NetworkAclId", "source": "identifier", "name": "Id" } + ] + } + }, + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "identifier", "name": "Id" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteNetworkAcl", + "params": [ + { "target": "NetworkAclId", "source": "identifier", "name": "Id" } + ] + } + }, + "DeleteEntry": { + "request": { + "operation": "DeleteNetworkAclEntry", + "params": [ + { "target": "NetworkAclId", "source": "identifier", "name": "Id" } + ] + } + }, + "ReplaceAssociation": { + "request": { + "operation": "ReplaceNetworkAclAssociation", + "params": [ + { "target": "NetworkAclId", "source": "identifier", "name": "Id" } + ] + } + }, + "ReplaceEntry": { + "request": { + "operation": "ReplaceNetworkAclEntry", + "params": [ + { "target": "NetworkAclId", "source": "identifier", "name": "Id" } + ] + } + } + }, + "has": { + "Vpc": { + "resource": { + "type": "Vpc", + "identifiers": [ + { "target": "Id", "source": "data", "path": "VpcId" } + ] + } + } + } + }, + "NetworkInterface": { + "identifiers": [ + { + "name": "Id", + "memberName": "NetworkInterfaceId" + } + ], + "shape": "NetworkInterface", + "load": { + "request": { + "operation": "DescribeNetworkInterfaces", + "params": [ + { "target": "NetworkInterfaceIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "NetworkInterfaces[0]" + }, + "actions": { + "AssignPrivateIpAddresses": { + "request": { + "operation": "AssignPrivateIpAddresses", + "params": [ + { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } + ] + } + }, + "Attach": { + "request": { + "operation": "AttachNetworkInterface", + "params": [ + { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } + ] + } + }, + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "identifier", "name": "Id" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteNetworkInterface", + "params": [ + { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } + ] + } + }, + "DescribeAttribute": { + "request": { + "operation": "DescribeNetworkInterfaceAttribute", + "params": [ + { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } + ] + } + }, + "Detach": { + "request": { + "operation": "DetachNetworkInterface", + "params": [ + { "target": "AttachmentId", "source": "data", "path": "Attachment.AttachmentId" } + ] + } + }, + "ModifyAttribute": { + "request": { + "operation": "ModifyNetworkInterfaceAttribute", + "params": [ + { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } + ] + } + }, + "ResetAttribute": { + "request": { + "operation": "ResetNetworkInterfaceAttribute", + "params": [ + { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } + ] + } + }, + "UnassignPrivateIpAddresses": { + "request": { + "operation": "UnassignPrivateIpAddresses", + "params": [ + { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } + ] + } + } + }, + "has": { + "Subnet": { + "resource": { + "type": "Subnet", + "identifiers": [ + { "target": "Id", "source": "data", "path": "SubnetId" } + ] + } + }, + "Vpc": { + "resource": { + "type": "Vpc", + "identifiers": [ + { "target": "Id", "source": "data", "path": "VpcId" } + ] + } + } + } + }, + "PlacementGroup": { + "identifiers": [ + { + "name": "Name", + "memberName": "GroupName" + } + ], + "shape": "PlacementGroup", + "load": { + "request": { + "operation": "DescribePlacementGroups", + "params": [ + { "target": "GroupNames[0]", "source": "identifier", "name": "Name" } + ] + }, + "path": "PlacementGroups[0]" + }, + "actions": { + "Delete": { + "request": { + "operation": "DeletePlacementGroup", + "params": [ + { "target": "GroupName", "source": "identifier", "name": "Name" } + ] + } + } + }, + "hasMany": { + "Instances": { + "request": { + "operation": "DescribeInstances", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "placement-group-name" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Name" } + ] + }, + "resource": { + "type": "Instance", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Reservations[].Instances[].InstanceId" } + ], + "path": "Reservations[].Instances[]" + } + } + } + }, + "RouteTable": { + "identifiers": [ + { + "name": "Id", + "memberName": "RouteTableId" + } + ], + "shape": "RouteTable", + "load": { + "request": { + "operation": "DescribeRouteTables", + "params": [ + { "target": "RouteTableIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "RouteTables[0]" + }, + "actions": { + "AssociateWithSubnet": { + "request": { + "operation": "AssociateRouteTable", + "params": [ + { "target": "RouteTableId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "RouteTableAssociation", + "identifiers": [ + { "target": "Id", "source": "response", "path": "AssociationId" } + ] + } + }, + "CreateRoute": { + "request": { + "operation": "CreateRoute", + "params": [ + { "target": "RouteTableId", "source": "identifier", "name": "Id" } + ] + } + }, + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "identifier", "name": "Id" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteRouteTable", + "params": [ + { "target": "RouteTableId", "source": "identifier", "name": "Id" } + ] + } + } + }, + "has": { + "Vpc": { + "resource": { + "type": "Vpc", + "identifiers": [ + { "target": "Id", "source": "data", "path": "VpcId" } + ] + } + } + }, + "hasMany": { + "Associations": { + "request": { + "operation": "DescribeRouteTables", + "params": [ + { "target": "RouteTableIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "RouteTableAssociation", + "identifiers": [ + { "target": "Id", "source": "response", "path": "RouteTables[0].Associations[].RouteTableAssociationId" } + ], + "path": "RouteTables[0].Associations[]" + } + } + } + }, + "RouteTableAssociation": { + "identifiers": [ + { + "name": "Id", + "memberName": "RouteTableAssociationId" + } + ], + "shape": "RouteTableAssociation", + "actions": { + "Delete": { + "request": { + "operation": "DisassociateRouteTable", + "params": [ + { "target": "AssociationId", "source": "identifier", "name": "Id" } + ] + } + }, + "ReplaceSubnet": { + "request": { + "operation": "ReplaceRouteTableAssociation", + "params": [ + { "target": "AssociationId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "RouteTableAssociation", + "identifiers": [ + { "target": "Id", "source": "response", "path": "NewAssociationId" } + ] + } + } + }, + "has": { + "RouteTable": { + "resource": { + "type": "RouteTable", + "identifiers": [ + { "target": "Id", "source": "data", "path": "RouteTableId" } + ] + } + }, + "Subnet": { + "resource": { + "type": "Subnet", + "identifiers": [ + { "target": "Id", "source": "data", "path": "SubnetId" } + ] + } + } + } + }, + "SecurityGroup": { + "identifiers": [ + { + "name": "Id", + "memberName": "GroupId" + } + ], + "shape": "SecurityGroup", + "load": { + "request": { + "operation": "DescribeSecurityGroups", + "params": [ + { "target": "GroupIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "SecurityGroups[0]" + }, + "actions": { + "AuthorizeEgress": { + "request": { + "operation": "AuthorizeSecurityGroupEgress", + "params": [ + { "target": "GroupId", "source": "identifier", "name": "Id" } + ] + } + }, + "AuthorizeIngress": { + "request": { + "operation": "AuthorizeSecurityGroupIngress", + "params": [ + { "target": "GroupId", "source": "identifier", "name": "Id" } + ] + } + }, + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "identifier", "name": "Id" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteSecurityGroup", + "params": [ + { "target": "GroupId", "source": "identifier", "name": "Id" } + ] + } + }, + "RevokeEgress": { + "request": { + "operation": "RevokeSecurityGroupEgress", + "params": [ + { "target": "GroupId", "source": "identifier", "name": "Id" } + ] + } + }, + "RevokeIngress": { + "request": { + "operation": "RevokeSecurityGroupIngress", + "params": [ + { "target": "GroupId", "source": "identifier", "name": "Id" } + ] + } + } + } + }, + "Snapshot": { + "identifiers": [ + { + "name": "Id", + "memberName": "SnapshotId" + } + ], + "shape": "Snapshot", + "load": { + "request": { + "operation": "DescribeSnapshots", + "params": [ + { "target": "SnapshotIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "Snapshots[0]" + }, + "actions": { + "Copy": { + "request": { + "operation": "CopySnapshot", + "params": [ + { "target": "SourceSnapshotId", "source": "identifier", "name": "Id" } + ] + } + }, + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "identifier", "name": "Id" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteSnapshot", + "params": [ + { "target": "SnapshotId", "source": "identifier", "name": "Id" } + ] + } + }, + "DescribeAttribute": { + "request": { + "operation": "DescribeSnapshotAttribute", + "params": [ + { "target": "SnapshotId", "source": "identifier", "name": "Id" } + ] + } + }, + "ModifyAttribute": { + "request": { + "operation": "ModifySnapshotAttribute", + "params": [ + { "target": "SnapshotId", "source": "identifier", "name": "Id" } + ] + } + }, + "ResetAttribute": { + "request": { + "operation": "ResetSnapshotAttribute", + "params": [ + { "target": "SnapshotId", "source": "identifier", "name": "Id" } + ] + } + } + }, + "waiters": { + "Completed": { + "waiterName": "SnapshotCompleted", + "params": [ + { "target": "SnapshotIds[]", "source": "identifier", "name": "Id" } + ], + "path": "Snapshots[]" + } + }, + "has": { + "Volume": { + "resource": { + "type": "Volume", + "identifiers": [ + { "target": "Id", "source": "data", "path": "VolumeId" } + ] + } + } + } + }, + "Subnet": { + "identifiers": [ + { + "name": "Id", + "memberName": "SubnetId" + } + ], + "shape": "Subnet", + "load": { + "request": { + "operation": "DescribeSubnets", + "params": [ + { "target": "SubnetIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "Subnets[0]" + }, + "actions": { + "CreateInstances": { + "request": { + "operation": "RunInstances", + "params": [ + { "target": "SubnetId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Instance", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Instances[].InstanceId" } + ], + "path": "Instances[]" + } + }, + "CreateNetworkInterface": { + "request": { + "operation": "CreateNetworkInterface", + "params": [ + { "target": "SubnetId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "NetworkInterface", + "identifiers": [ + { "target": "Id", "source": "response", "path": "NetworkInterface.NetworkInterfaceId" } + ], + "path": "NetworkInterface" + } + }, + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "identifier", "name": "Id" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteSubnet", + "params": [ + { "target": "SubnetId", "source": "identifier", "name": "Id" } + ] + } + } + }, + "has": { + "Vpc": { + "resource": { + "type": "Vpc", + "identifiers": [ + { "target": "Id", "source": "data", "path": "VpcId" } + ] + } + } + }, + "hasMany": { + "Instances": { + "request": { + "operation": "DescribeInstances", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "subnet-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Instance", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Reservations[].Instances[].InstanceId" } + ], + "path": "Reservations[].Instances[]" + } + }, + "NetworkInterfaces": { + "request": { + "operation": "DescribeNetworkInterfaces", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "subnet-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "NetworkInterface", + "identifiers": [ + { "target": "Id", "source": "response", "path": "NetworkInterfaces[].NetworkInterfaceId" } + ], + "path": "NetworkInterfaces[]" + } + } + } + }, + "Tag": { + "identifiers": [ + { + "name": "ResourceId", + "memberName": "ResourceId" + }, + { + "name": "Key", + "memberName": "Key" + }, + { + "name": "Value", + "memberName": "Value" + } + ], + "shape": "TagDescription", + "load": { + "request": { + "operation": "DescribeTags", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "key" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Key" }, + { "target": "Filters[1].Name", "source": "string", "value": "value" }, + { "target": "Filters[1].Values[0]", "source": "identifier", "name": "Value" } + ] + }, + "path": "Tags[0]" + }, + "actions": { + "Delete": { + "request": { + "operation": "DeleteTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "ResourceId" }, + { "target": "Tags[0].Key", "source": "identifier", "name": "Key" }, + { "target": "Tags[0].Value", "source": "identifier", "name": "Value" } + ] + } + } + }, + "batchActions": { + "Delete": { + "request": { + "operation": "DeleteTags", + "params": [ + { "target": "Resources[]", "source": "identifier", "name": "ResourceId" }, + { "target": "Tags[*].Key", "source": "identifier", "name": "Key" }, + { "target": "Tags[*].Value", "source": "identifier", "name": "Value" } + ] + } + } + } + }, + "Volume": { + "identifiers": [ + { + "name": "Id", + "memberName": "VolumeId" + } + ], + "shape": "Volume", + "load": { + "request": { + "operation": "DescribeVolumes", + "params": [ + { "target": "VolumeIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "Volumes[0]" + }, + "actions": { + "AttachToInstance": { + "request": { + "operation": "AttachVolume", + "params": [ + { "target": "VolumeId", "source": "identifier", "name": "Id" } + ] + } + }, + "CreateSnapshot": { + "request": { + "operation": "CreateSnapshot", + "params": [ + { "target": "VolumeId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Snapshot", + "identifiers": [ + { "target": "Id", "source": "response", "path": "SnapshotId" } + ], + "path": "@" + } + }, + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "identifier", "name": "Id" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteVolume", + "params": [ + { "target": "VolumeId", "source": "identifier", "name": "Id" } + ] + } + }, + "DescribeAttribute": { + "request": { + "operation": "DescribeVolumeAttribute", + "params": [ + { "target": "VolumeId", "source": "identifier", "name": "Id" } + ] + } + }, + "DescribeStatus": { + "request": { + "operation": "DescribeVolumeStatus", + "params": [ + { "target": "VolumeIds[0]", "source": "identifier", "name": "Id" } + ] + } + }, + "DetachFromInstance": { + "request": { + "operation": "DetachVolume", + "params": [ + { "target": "VolumeId", "source": "identifier", "name": "Id" } + ] + } + }, + "EnableIo": { + "request": { + "operation": "EnableVolumeIO", + "params": [ + { "target": "VolumeId", "source": "identifier", "name": "Id" } + ] + } + }, + "ModifyAttribute": { + "request": { + "operation": "ModifyVolumeAttribute", + "params": [ + { "target": "VolumeId", "source": "identifier", "name": "Id" } + ] + } + } + }, + "hasMany": { + "Snapshots": { + "request": { + "operation": "DescribeSnapshots", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "volume-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Snapshot", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Snapshots[].SnapshotId" } + ], + "path": "Snapshots[]" + } + } + } + }, + "Vpc": { + "identifiers": [ + { + "name": "Id", + "memberName": "VpcId" + } + ], + "shape": "Vpc", + "load": { + "request": { + "operation": "DescribeVpcs", + "params": [ + { "target": "VpcIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "Vpcs[0]" + }, + "actions": { + "AssociateDhcpOptions": { + "request": { + "operation": "AssociateDhcpOptions", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + } + }, + "AttachClassicLinkInstance": { + "request": { + "operation": "AttachClassicLinkVpc", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + } + }, + "AttachInternetGateway": { + "request": { + "operation": "AttachInternetGateway", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + } + }, + "CreateNetworkAcl": { + "request": { + "operation": "CreateNetworkAcl", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "NetworkAcl", + "identifiers": [ + { "target": "Id", "source": "response", "path": "NetworkAcl.NetworkAclId" } + ], + "path": "NetworkAcl" + } + }, + "CreateRouteTable": { + "request": { + "operation": "CreateRouteTable", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "RouteTable", + "identifiers": [ + { "target": "Id", "source": "response", "path": "RouteTable.RouteTableId" } + ], + "path": "RouteTable" + } + }, + "CreateSecurityGroup": { + "request": { + "operation": "CreateSecurityGroup", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "SecurityGroup", + "identifiers": [ + { "target": "Id", "source": "response", "path": "GroupId" } + ] + } + }, + "CreateSubnet": { + "request": { + "operation": "CreateSubnet", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Subnet", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Subnet.SubnetId" } + ], + "path": "Subnet" + } + }, + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "identifier", "name": "Id" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteVpc", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + } + }, + "DescribeAttribute": { + "request": { + "operation": "DescribeVpcAttribute", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + } + }, + "DetachClassicLinkInstance": { + "request": { + "operation": "DetachClassicLinkVpc", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + } + }, + "DetachInternetGateway": { + "request": { + "operation": "DetachInternetGateway", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + } + }, + "DisableClassicLink": { + "request": { + "operation": "DisableVpcClassicLink", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + } + }, + "EnableClassicLink": { + "request": { + "operation": "EnableVpcClassicLink", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + } + }, + "ModifyAttribute": { + "request": { + "operation": "ModifyVpcAttribute", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + } + }, + "RequestVpcPeeringConnection": { + "request": { + "operation": "CreateVpcPeeringConnection", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "VpcPeeringConnection", + "identifiers": [ + { "target": "Id", "source": "response", "path": "VpcPeeringConnection.VpcPeeringConnectionId" } + ], + "path": "VpcPeeringConnection" + } + } + }, + "has": { + "DhcpOptions": { + "resource": { + "type": "DhcpOptions", + "identifiers": [ + { "target": "Id", "source": "data", "path": "DhcpOptionsId" } + ] + } + } + }, + "hasMany": { + "AcceptedVpcPeeringConnections": { + "request": { + "operation": "DescribeVpcPeeringConnections", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "accepter-vpc-info.vpc-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "VpcPeeringConnection", + "identifiers": [ + { "target": "Id", "source": "response", "path": "VpcPeeringConnections[].VpcPeeringConnectionId" } + ], + "path": "VpcPeeringConnections[]" + } + }, + "Instances": { + "request": { + "operation": "DescribeInstances", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Instance", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Reservations[].Instances[].InstanceId" } + ], + "path": "Reservations[].Instances[]" + } + }, + "InternetGateways": { + "request": { + "operation": "DescribeInternetGateways", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "attachment.vpc-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "InternetGateway", + "identifiers": [ + { "target": "Id", "source": "response", "path": "InternetGateways[].InternetGatewayId" } + ], + "path": "InternetGateways[]" + } + }, + "NetworkAcls": { + "request": { + "operation": "DescribeNetworkAcls", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "NetworkAcl", + "identifiers": [ + { "target": "Id", "source": "response", "path": "NetworkAcls[].NetworkAclId" } + ], + "path": "NetworkAcls[]" + } + }, + "NetworkInterfaces": { + "request": { + "operation": "DescribeNetworkInterfaces", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "NetworkInterface", + "identifiers": [ + { "target": "Id", "source": "response", "path": "NetworkInterfaces[].NetworkInterfaceId" } + ], + "path": "NetworkInterfaces[]" + } + }, + "RequestedVpcPeeringConnections": { + "request": { + "operation": "DescribeVpcPeeringConnections", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "requester-vpc-info.vpc-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "VpcPeeringConnection", + "identifiers": [ + { "target": "Id", "source": "response", "path": "VpcPeeringConnections[].VpcPeeringConnectionId" } + ], + "path": "VpcPeeringConnections[]" + } + }, + "RouteTables": { + "request": { + "operation": "DescribeRouteTables", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "RouteTable", + "identifiers": [ + { "target": "Id", "source": "response", "path": "RouteTables[].RouteTableId" } + ], + "path": "RouteTables[]" + } + }, + "SecurityGroups": { + "request": { + "operation": "DescribeSecurityGroups", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "SecurityGroup", + "identifiers": [ + { "target": "Id", "source": "response", "path": "SecurityGroups[].GroupId" } + ], + "path": "SecurityGroups[]" + } + }, + "Subnets": { + "request": { + "operation": "DescribeSubnets", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Subnet", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Subnets[].SubnetId" } + ], + "path": "Subnets[]" + } + } + } + }, + "VpcPeeringConnection": { + "identifiers": [ + { + "name": "Id", + "memberName": "VpcPeeringConnectionId" + } + ], + "shape": "VpcPeeringConnection", + "load": { + "request": { + "operation": "DescribeVpcPeeringConnections", + "params": [ + { "target": "VpcPeeringConnectionIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "VpcPeeringConnections[0]" + }, + "actions": { + "Accept": { + "request": { + "operation": "AcceptVpcPeeringConnection", + "params": [ + { "target": "VpcPeeringConnectionId", "source": "identifier", "name": "Id" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteVpcPeeringConnection", + "params": [ + { "target": "VpcPeeringConnectionId", "source": "identifier", "name": "Id" } + ] + } + }, + "Reject": { + "request": { + "operation": "RejectVpcPeeringConnection", + "params": [ + { "target": "VpcPeeringConnectionId", "source": "identifier", "name": "Id" } + ] + } + } + }, + "has": { + "AccepterVpc": { + "resource": { + "type": "Vpc", + "identifiers": [ + { "target": "Id", "source": "data", "path": "AccepterVpcInfo.VpcId" } + ] + } + }, + "RequesterVpc": { + "resource": { + "type": "Vpc", + "identifiers": [ + { "target": "Id", "source": "data", "path": "RequesterVpcInfo.VpcId" } + ] + } + } + } + } + } +} diff --git a/boto3/data/ec2/2015-03-01/resources-1.json b/boto3/data/ec2/2015-03-01/resources-1.json new file mode 100644 index 0000000..8ccf160 --- /dev/null +++ b/boto3/data/ec2/2015-03-01/resources-1.json @@ -0,0 +1,2289 @@ +{ + "service": { + "actions": { + "CreateDhcpOptions": { + "request": { "operation": "CreateDhcpOptions" }, + "resource": { + "type": "DhcpOptions", + "identifiers": [ + { "target": "Id", "source": "response", "path": "DhcpOptions.DhcpOptionsId" } + ], + "path": "DhcpOptions" + } + }, + "CreateInstances": { + "request": { "operation": "RunInstances" }, + "resource": { + "type": "Instance", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Instances[].InstanceId" } + ], + "path": "Instances[]" + } + }, + "CreateInternetGateway": { + "request": { "operation": "CreateInternetGateway" }, + "resource": { + "type": "InternetGateway", + "identifiers": [ + { "target": "Id", "source": "response", "path": "InternetGateway.InternetGatewayId" } + ], + "path": "InternetGateway" + } + }, + "CreateKeyPair": { + "request": { "operation": "CreateKeyPair" }, + "resource": { + "type": "KeyPair", + "identifiers": [ + { "target": "Name", "source": "response", "path": "KeyName" } + ] + } + }, + "CreateNetworkAcl": { + "request": { "operation": "CreateNetworkAcl" }, + "resource": { + "type": "NetworkAcl", + "identifiers": [ + { "target": "Id", "source": "response", "path": "NetworkAcl.NetworkAclId" } + ], + "path": "NetworkAcl" + } + }, + "CreateNetworkInterface": { + "request": { "operation": "CreateNetworkInterface" }, + "resource": { + "type": "NetworkInterface", + "identifiers": [ + { "target": "Id", "source": "response", "path": "NetworkInterface.NetworkInterfaceId" } + ], + "path": "NetworkInterface" + } + }, + "CreatePlacementGroup": { + "request": { "operation": "CreatePlacementGroup" }, + "resource": { + "type": "PlacementGroup", + "identifiers": [ + { "target": "Name", "source": "requestParameter", "path": "GroupName" } + ] + } + }, + "CreateRouteTable": { + "request": { "operation": "CreateRouteTable" }, + "resource": { + "type": "RouteTable", + "identifiers": [ + { "target": "Id", "source": "response", "path": "RouteTable.RouteTableId" } + ], + "path": "RouteTable" + } + }, + "CreateSecurityGroup": { + "request": { "operation": "CreateSecurityGroup" }, + "resource": { + "type": "SecurityGroup", + "identifiers": [ + { "target": "Id", "source": "response", "path": "GroupId" } + ] + } + }, + "CreateSnapshot": { + "request": { "operation": "CreateSnapshot" }, + "resource": { + "type": "Snapshot", + "identifiers": [ + { "target": "Id", "source": "response", "path": "SnapshotId" } + ], + "path": "@" + } + }, + "CreateSubnet": { + "request": { "operation": "CreateSubnet" }, + "resource": { + "type": "Subnet", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Subnet.SubnetId" } + ], + "path": "Subnet" + } + }, + "CreateTags": { + "request": { "operation": "CreateTags" }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "requestParameter", "path": "Resources[]" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "CreateVolume": { + "request": { "operation": "CreateVolume" }, + "resource": { + "type": "Volume", + "identifiers": [ + { "target": "Id", "source": "response", "path": "VolumeId" } + ], + "path": "@" + } + }, + "CreateVpc": { + "request": { "operation": "CreateVpc" }, + "resource": { + "type": "Vpc", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Vpc.VpcId" } + ], + "path": "Vpc" + } + }, + "CreateVpcPeeringConnection": { + "request": { "operation": "CreateVpcPeeringConnection" }, + "resource": { + "type": "VpcPeeringConnection", + "identifiers": [ + { "target": "Id", "source": "response", "path": "VpcPeeringConnection.VpcPeeringConnectionId" } + ], + "path": "VpcPeeringConnection" + } + }, + "DisassociateRouteTable": { + "request": { "operation": "DisassociateRouteTable" } + }, + "ImportKeyPair": { + "request": { "operation": "ImportKeyPair" }, + "resource": { + "type": "KeyPair", + "identifiers": [ + { "target": "Name", "source": "response", "path": "KeyName" } + ] + } + }, + "RegisterImage": { + "request": { "operation": "RegisterImage" }, + "resource": { + "type": "Image", + "identifiers": [ + { "target": "Id", "source": "response", "path": "ImageId" } + ] + } + } + }, + "has": { + "DhcpOptions": { + "resource": { + "type": "DhcpOptions", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "Image": { + "resource": { + "type": "Image", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "Instance": { + "resource": { + "type": "Instance", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "InternetGateway": { + "resource": { + "type": "InternetGateway", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "KeyPair": { + "resource": { + "type": "KeyPair", + "identifiers": [ + { "target": "Name", "source": "input" } + ] + } + }, + "NetworkAcl": { + "resource": { + "type": "NetworkAcl", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "NetworkInterface": { + "resource": { + "type": "NetworkInterface", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "PlacementGroup": { + "resource": { + "type": "PlacementGroup", + "identifiers": [ + { "target": "Name", "source": "input" } + ] + } + }, + "RouteTable": { + "resource": { + "type": "RouteTable", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "RouteTableAssociation": { + "resource": { + "type": "RouteTableAssociation", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "SecurityGroup": { + "resource": { + "type": "SecurityGroup", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "Snapshot": { + "resource": { + "type": "Snapshot", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "Subnet": { + "resource": { + "type": "Subnet", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "Volume": { + "resource": { + "type": "Volume", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "Vpc": { + "resource": { + "type": "Vpc", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "VpcPeeringConnection": { + "resource": { + "type": "VpcPeeringConnection", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + } + }, + "hasMany": { + "DhcpOptionsSets": { + "request": { "operation": "DescribeDhcpOptions" }, + "resource": { + "type": "DhcpOptions", + "identifiers": [ + { "target": "Id", "source": "response", "path": "DhcpOptions[].DhcpOptionsId" } + ], + "path": "DhcpOptions[]" + } + }, + "Images": { + "request": { "operation": "DescribeImages" }, + "resource": { + "type": "Image", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Images[].ImageId" } + ], + "path": "Images[]" + } + }, + "Instances": { + "request": { "operation": "DescribeInstances" }, + "resource": { + "type": "Instance", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Reservations[].Instances[].InstanceId" } + ], + "path": "Reservations[].Instances[]" + } + }, + "InternetGateways": { + "request": { "operation": "DescribeInternetGateways" }, + "resource": { + "type": "InternetGateway", + "identifiers": [ + { "target": "Id", "source": "response", "path": "InternetGateways[].InternetGatewayId" } + ], + "path": "InternetGateways[]" + } + }, + "KeyPairs": { + "request": { "operation": "DescribeKeyPairs" }, + "resource": { + "type": "KeyPair", + "identifiers": [ + { "target": "Name", "source": "response", "path": "KeyPairs[].KeyName" } + ], + "path": "KeyPairs[]" + } + }, + "NetworkAcls": { + "request": { "operation": "DescribeNetworkAcls" }, + "resource": { + "type": "NetworkAcl", + "identifiers": [ + { "target": "Id", "source": "response", "path": "NetworkAcls[].NetworkAclId" } + ], + "path": "NetworkAcls[]" + } + }, + "NetworkInterfaces": { + "request": { "operation": "DescribeNetworkInterfaces" }, + "resource": { + "type": "NetworkInterface", + "identifiers": [ + { "target": "Id", "source": "response", "path": "NetworkInterfaces[].NetworkInterfaceId" } + ], + "path": "NetworkInterfaces[]" + } + }, + "PlacementGroups": { + "request": { "operation": "DescribePlacementGroups" }, + "resource": { + "type": "PlacementGroup", + "identifiers": [ + { "target": "Name", "source": "response", "path": "PlacementGroups[].GroupName" } + ], + "path": "PlacementGroups[]" + } + }, + "RouteTables": { + "request": { "operation": "DescribeRouteTables" }, + "resource": { + "type": "RouteTable", + "identifiers": [ + { "target": "Id", "source": "response", "path": "RouteTables[].RouteTableId" } + ], + "path": "RouteTables[]" + } + }, + "SecurityGroups": { + "request": { "operation": "DescribeSecurityGroups" }, + "resource": { + "type": "SecurityGroup", + "identifiers": [ + { "target": "Id", "source": "response", "path": "SecurityGroups[].GroupId" } + ], + "path": "SecurityGroups[]" + } + }, + "Snapshots": { + "request": { "operation": "DescribeSnapshots" }, + "resource": { + "type": "Snapshot", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Snapshots[].SnapshotId" } + ], + "path": "Snapshots[]" + } + }, + "Subnets": { + "request": { "operation": "DescribeSubnets" }, + "resource": { + "type": "Subnet", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Subnets[].SubnetId" } + ], + "path": "Subnets[]" + } + }, + "Volumes": { + "request": { "operation": "DescribeVolumes" }, + "resource": { + "type": "Volume", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Volumes[].VolumeId" } + ], + "path": "Volumes[]" + } + }, + "VpcPeeringConnections": { + "request": { "operation": "DescribeVpcPeeringConnections" }, + "resource": { + "type": "VpcPeeringConnection", + "identifiers": [ + { "target": "Id", "source": "response", "path": "VpcPeeringConnections[].VpcPeeringConnectionId" } + ], + "path": "VpcPeeringConnections[]" + } + }, + "Vpcs": { + "request": { "operation": "DescribeVpcs" }, + "resource": { + "type": "Vpc", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Vpcs[].VpcId" } + ], + "path": "Vpcs[]" + } + } + } + }, + "resources": { + "DhcpOptions": { + "identifiers": [ + { + "name": "Id", + "memberName": "DhcpOptionsId" + } + ], + "shape": "DhcpOptions", + "load": { + "request": { + "operation": "DescribeDhcpOptions", + "params": [ + { "target": "DhcpOptionsIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "DhcpOptions[0]" + }, + "actions": { + "AssociateWithVpc": { + "request": { + "operation": "AssociateDhcpOptions", + "params": [ + { "target": "DhcpOptionsId", "source": "identifier", "name": "Id" } + ] + } + }, + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "identifier", "name": "Id" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteDhcpOptions", + "params": [ + { "target": "DhcpOptionsId", "source": "identifier", "name": "Id" } + ] + } + } + } + }, + "Image": { + "identifiers": [ + { + "name": "Id", + "memberName": "ImageId" + } + ], + "shape": "Image", + "load": { + "request": { + "operation": "DescribeImages", + "params": [ + { "target": "ImageIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "Images[0]" + }, + "actions": { + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "identifier", "name": "Id" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "Deregister": { + "request": { + "operation": "DeregisterImage", + "params": [ + { "target": "ImageId", "source": "identifier", "name": "Id" } + ] + } + }, + "DescribeAttribute": { + "request": { + "operation": "DescribeImageAttribute", + "params": [ + { "target": "ImageId", "source": "identifier", "name": "Id" } + ] + } + }, + "ModifyAttribute": { + "request": { + "operation": "ModifyImageAttribute", + "params": [ + { "target": "ImageId", "source": "identifier", "name": "Id" } + ] + } + }, + "ResetAttribute": { + "request": { + "operation": "ResetImageAttribute", + "params": [ + { "target": "ImageId", "source": "identifier", "name": "Id" } + ] + } + } + } + }, + "Instance": { + "identifiers": [ + { + "name": "Id", + "memberName": "InstanceId" + } + ], + "shape": "Instance", + "load": { + "request": { + "operation": "DescribeInstances", + "params": [ + { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "Reservations[0].Instances[0]" + }, + "actions": { + "AttachClassicLinkVpc": { + "request": { + "operation": "AttachClassicLinkVpc", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" } + ] + } + }, + "AttachVolume": { + "request": { + "operation": "AttachVolume", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" } + ] + } + }, + "ConsoleOutput": { + "request": { + "operation": "GetConsoleOutput", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" } + ] + } + }, + "CreateImage": { + "request": { + "operation": "CreateImage", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Image", + "identifiers": [ + { "target": "Id", "source": "response", "path": "ImageId" } + ] + } + }, + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "identifier", "name": "Id" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "DescribeAttribute": { + "request": { + "operation": "DescribeInstanceAttribute", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" } + ] + } + }, + "DetachClassicLinkVpc": { + "request": { + "operation": "DetachClassicLinkVpc", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" } + ] + } + }, + "DetachVolume": { + "request": { + "operation": "DetachVolume", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" } + ] + } + }, + "ModifyAttribute": { + "request": { + "operation": "ModifyInstanceAttribute", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" } + ] + } + }, + "Monitor": { + "request": { + "operation": "MonitorInstances", + "params": [ + { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } + ] + } + }, + "PasswordData": { + "request": { + "operation": "GetPasswordData", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" } + ] + } + }, + "Reboot": { + "request": { + "operation": "RebootInstances", + "params": [ + { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } + ] + } + }, + "ReportStatus": { + "request": { + "operation": "ReportInstanceStatus", + "params": [ + { "target": "Instances[0]", "source": "identifier", "name": "Id" } + ] + } + }, + "ResetAttribute": { + "request": { + "operation": "ResetInstanceAttribute", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" } + ] + } + }, + "ResetKernel": { + "request": { + "operation": "ResetInstanceAttribute", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" }, + { "target": "Attribute", "source": "string", "value": "kernel" } + ] + } + }, + "ResetRamdisk": { + "request": { + "operation": "ResetInstanceAttribute", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" }, + { "target": "Attribute", "source": "string", "value": "ramdisk" } + ] + } + }, + "ResetSourceDestCheck": { + "request": { + "operation": "ResetInstanceAttribute", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" }, + { "target": "Attribute", "source": "string", "value": "sourceDestCheck" } + ] + } + }, + "Start": { + "request": { + "operation": "StartInstances", + "params": [ + { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } + ] + } + }, + "Stop": { + "request": { + "operation": "StopInstances", + "params": [ + { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } + ] + } + }, + "Terminate": { + "request": { + "operation": "TerminateInstances", + "params": [ + { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } + ] + } + }, + "Unmonitor": { + "request": { + "operation": "UnmonitorInstances", + "params": [ + { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } + ] + } + } + }, + "batchActions": { + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[]", "source": "identifier", "name": "Id" } + ] + } + }, + "Monitor": { + "request": { + "operation": "MonitorInstances", + "params": [ + { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } + ] + } + }, + "Reboot": { + "request": { + "operation": "RebootInstances", + "params": [ + { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } + ] + } + }, + "Start": { + "request": { + "operation": "StartInstances", + "params": [ + { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } + ] + } + }, + "Stop": { + "request": { + "operation": "StopInstances", + "params": [ + { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } + ] + } + }, + "Terminate": { + "request": { + "operation": "TerminateInstances", + "params": [ + { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } + ] + } + }, + "Unmonitor": { + "request": { + "operation": "UnmonitorInstances", + "params": [ + { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } + ] + } + } + }, + "waiters": { + "Exists": { + "waiterName": "InstanceExists", + "params": [ + { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } + ], + "path": "Reservations[0].Instances[0]" + }, + "Running": { + "waiterName": "InstanceRunning", + "params": [ + { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } + ], + "path": "Reservations[0].Instances[0]" + }, + "Stopped": { + "waiterName": "InstanceStopped", + "params": [ + { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } + ], + "path": "Reservations[0].Instances[0]" + }, + "Terminated": { + "waiterName": "InstanceTerminated", + "params": [ + { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } + ], + "path": "Reservations[0].Instances[0]" + } + }, + "has": { + "Image": { + "resource": { + "type": "Image", + "identifiers": [ + { "target": "Id", "source": "data", "path": "ImageId" } + ] + } + }, + "KeyPair": { + "resource": { + "type": "KeyPair", + "identifiers": [ + { "target": "Name", "source": "data", "path": "KeyName" } + ] + } + }, + "PlacementGroup": { + "resource": { + "type": "PlacementGroup", + "identifiers": [ + { "target": "Name", "source": "data", "path": "Placement.GroupName" } + ] + } + }, + "Subnet": { + "resource": { + "type": "Subnet", + "identifiers": [ + { "target": "Id", "source": "data", "path": "SubnetId" } + ] + } + }, + "Vpc": { + "resource": { + "type": "Vpc", + "identifiers": [ + { "target": "Id", "source": "data", "path": "VpcId" } + ] + } + } + }, + "hasMany": { + "Volumes": { + "request": { + "operation": "DescribeVolumes", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "attachment.instance-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Volume", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Volumes[].VolumeId" } + ], + "path": "Volumes[]" + } + } + } + }, + "InternetGateway": { + "identifiers": [ + { + "name": "Id", + "memberName": "InternetGatewayId" + } + ], + "shape": "InternetGateway", + "load": { + "request": { + "operation": "DescribeInternetGateways", + "params": [ + { "target": "InternetGatewayIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "InternetGateways[0]" + }, + "actions": { + "AttachToVpc": { + "request": { + "operation": "AttachInternetGateway", + "params": [ + { "target": "InternetGatewayId", "source": "identifier", "name": "Id" } + ] + } + }, + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "identifier", "name": "Id" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteInternetGateway", + "params": [ + { "target": "InternetGatewayId", "source": "identifier", "name": "Id" } + ] + } + }, + "DetachFromVpc": { + "request": { + "operation": "DetachInternetGateway", + "params": [ + { "target": "InternetGatewayId", "source": "identifier", "name": "Id" } + ] + } + } + } + }, + "KeyPair": { + "identifiers": [ + { + "name": "Name", + "memberName": "KeyName" + } + ], + "shape": "KeyPairInfo", + "load": { + "request": { + "operation": "DescribeKeyPairs", + "params": [ + { "target": "KeyNames[0]", "source": "identifier", "name": "Name" } + ] + }, + "path": "KeyPairs[0]" + }, + "actions": { + "Delete": { + "request": { + "operation": "DeleteKeyPair", + "params": [ + { "target": "KeyName", "source": "identifier", "name": "Name" } + ] + } + } + } + }, + "NetworkAcl": { + "identifiers": [ + { + "name": "Id", + "memberName": "NetworkAclId" + } + ], + "shape": "NetworkAcl", + "load": { + "request": { + "operation": "DescribeNetworkAcls", + "params": [ + { "target": "NetworkAclIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "NetworkAcls[0]" + }, + "actions": { + "CreateEntry": { + "request": { + "operation": "CreateNetworkAclEntry", + "params": [ + { "target": "NetworkAclId", "source": "identifier", "name": "Id" } + ] + } + }, + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "identifier", "name": "Id" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteNetworkAcl", + "params": [ + { "target": "NetworkAclId", "source": "identifier", "name": "Id" } + ] + } + }, + "DeleteEntry": { + "request": { + "operation": "DeleteNetworkAclEntry", + "params": [ + { "target": "NetworkAclId", "source": "identifier", "name": "Id" } + ] + } + }, + "ReplaceAssociation": { + "request": { + "operation": "ReplaceNetworkAclAssociation", + "params": [ + { "target": "NetworkAclId", "source": "identifier", "name": "Id" } + ] + } + }, + "ReplaceEntry": { + "request": { + "operation": "ReplaceNetworkAclEntry", + "params": [ + { "target": "NetworkAclId", "source": "identifier", "name": "Id" } + ] + } + } + }, + "has": { + "Vpc": { + "resource": { + "type": "Vpc", + "identifiers": [ + { "target": "Id", "source": "data", "path": "VpcId" } + ] + } + } + } + }, + "NetworkInterface": { + "identifiers": [ + { + "name": "Id", + "memberName": "NetworkInterfaceId" + } + ], + "shape": "NetworkInterface", + "load": { + "request": { + "operation": "DescribeNetworkInterfaces", + "params": [ + { "target": "NetworkInterfaceIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "NetworkInterfaces[0]" + }, + "actions": { + "AssignPrivateIpAddresses": { + "request": { + "operation": "AssignPrivateIpAddresses", + "params": [ + { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } + ] + } + }, + "Attach": { + "request": { + "operation": "AttachNetworkInterface", + "params": [ + { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } + ] + } + }, + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "identifier", "name": "Id" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteNetworkInterface", + "params": [ + { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } + ] + } + }, + "DescribeAttribute": { + "request": { + "operation": "DescribeNetworkInterfaceAttribute", + "params": [ + { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } + ] + } + }, + "Detach": { + "request": { + "operation": "DetachNetworkInterface", + "params": [ + { "target": "AttachmentId", "source": "data", "path": "Attachment.AttachmentId" } + ] + } + }, + "ModifyAttribute": { + "request": { + "operation": "ModifyNetworkInterfaceAttribute", + "params": [ + { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } + ] + } + }, + "ResetAttribute": { + "request": { + "operation": "ResetNetworkInterfaceAttribute", + "params": [ + { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } + ] + } + }, + "UnassignPrivateIpAddresses": { + "request": { + "operation": "UnassignPrivateIpAddresses", + "params": [ + { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } + ] + } + } + }, + "has": { + "Subnet": { + "resource": { + "type": "Subnet", + "identifiers": [ + { "target": "Id", "source": "data", "path": "SubnetId" } + ] + } + }, + "Vpc": { + "resource": { + "type": "Vpc", + "identifiers": [ + { "target": "Id", "source": "data", "path": "VpcId" } + ] + } + } + } + }, + "PlacementGroup": { + "identifiers": [ + { + "name": "Name", + "memberName": "GroupName" + } + ], + "shape": "PlacementGroup", + "load": { + "request": { + "operation": "DescribePlacementGroups", + "params": [ + { "target": "GroupNames[0]", "source": "identifier", "name": "Name" } + ] + }, + "path": "PlacementGroups[0]" + }, + "actions": { + "Delete": { + "request": { + "operation": "DeletePlacementGroup", + "params": [ + { "target": "GroupName", "source": "identifier", "name": "Name" } + ] + } + } + }, + "hasMany": { + "Instances": { + "request": { + "operation": "DescribeInstances", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "placement-group-name" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Name" } + ] + }, + "resource": { + "type": "Instance", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Reservations[].Instances[].InstanceId" } + ], + "path": "Reservations[].Instances[]" + } + } + } + }, + "RouteTable": { + "identifiers": [ + { + "name": "Id", + "memberName": "RouteTableId" + } + ], + "shape": "RouteTable", + "load": { + "request": { + "operation": "DescribeRouteTables", + "params": [ + { "target": "RouteTableIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "RouteTables[0]" + }, + "actions": { + "AssociateWithSubnet": { + "request": { + "operation": "AssociateRouteTable", + "params": [ + { "target": "RouteTableId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "RouteTableAssociation", + "identifiers": [ + { "target": "Id", "source": "response", "path": "AssociationId" } + ] + } + }, + "CreateRoute": { + "request": { + "operation": "CreateRoute", + "params": [ + { "target": "RouteTableId", "source": "identifier", "name": "Id" } + ] + } + }, + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "identifier", "name": "Id" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteRouteTable", + "params": [ + { "target": "RouteTableId", "source": "identifier", "name": "Id" } + ] + } + } + }, + "has": { + "Vpc": { + "resource": { + "type": "Vpc", + "identifiers": [ + { "target": "Id", "source": "data", "path": "VpcId" } + ] + } + } + }, + "hasMany": { + "Associations": { + "request": { + "operation": "DescribeRouteTables", + "params": [ + { "target": "RouteTableIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "RouteTableAssociation", + "identifiers": [ + { "target": "Id", "source": "response", "path": "RouteTables[0].Associations[].RouteTableAssociationId" } + ], + "path": "RouteTables[0].Associations[]" + } + } + } + }, + "RouteTableAssociation": { + "identifiers": [ + { + "name": "Id", + "memberName": "RouteTableAssociationId" + } + ], + "shape": "RouteTableAssociation", + "actions": { + "Delete": { + "request": { + "operation": "DisassociateRouteTable", + "params": [ + { "target": "AssociationId", "source": "identifier", "name": "Id" } + ] + } + }, + "ReplaceSubnet": { + "request": { + "operation": "ReplaceRouteTableAssociation", + "params": [ + { "target": "AssociationId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "RouteTableAssociation", + "identifiers": [ + { "target": "Id", "source": "response", "path": "NewAssociationId" } + ] + } + } + }, + "has": { + "RouteTable": { + "resource": { + "type": "RouteTable", + "identifiers": [ + { "target": "Id", "source": "data", "path": "RouteTableId" } + ] + } + }, + "Subnet": { + "resource": { + "type": "Subnet", + "identifiers": [ + { "target": "Id", "source": "data", "path": "SubnetId" } + ] + } + } + } + }, + "SecurityGroup": { + "identifiers": [ + { + "name": "Id", + "memberName": "GroupId" + } + ], + "shape": "SecurityGroup", + "load": { + "request": { + "operation": "DescribeSecurityGroups", + "params": [ + { "target": "GroupIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "SecurityGroups[0]" + }, + "actions": { + "AuthorizeEgress": { + "request": { + "operation": "AuthorizeSecurityGroupEgress", + "params": [ + { "target": "GroupId", "source": "identifier", "name": "Id" } + ] + } + }, + "AuthorizeIngress": { + "request": { + "operation": "AuthorizeSecurityGroupIngress", + "params": [ + { "target": "GroupId", "source": "identifier", "name": "Id" } + ] + } + }, + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "identifier", "name": "Id" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteSecurityGroup", + "params": [ + { "target": "GroupId", "source": "identifier", "name": "Id" } + ] + } + }, + "RevokeEgress": { + "request": { + "operation": "RevokeSecurityGroupEgress", + "params": [ + { "target": "GroupId", "source": "identifier", "name": "Id" } + ] + } + }, + "RevokeIngress": { + "request": { + "operation": "RevokeSecurityGroupIngress", + "params": [ + { "target": "GroupId", "source": "identifier", "name": "Id" } + ] + } + } + } + }, + "Snapshot": { + "identifiers": [ + { + "name": "Id", + "memberName": "SnapshotId" + } + ], + "shape": "Snapshot", + "load": { + "request": { + "operation": "DescribeSnapshots", + "params": [ + { "target": "SnapshotIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "Snapshots[0]" + }, + "actions": { + "Copy": { + "request": { + "operation": "CopySnapshot", + "params": [ + { "target": "SourceSnapshotId", "source": "identifier", "name": "Id" } + ] + } + }, + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "identifier", "name": "Id" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteSnapshot", + "params": [ + { "target": "SnapshotId", "source": "identifier", "name": "Id" } + ] + } + }, + "DescribeAttribute": { + "request": { + "operation": "DescribeSnapshotAttribute", + "params": [ + { "target": "SnapshotId", "source": "identifier", "name": "Id" } + ] + } + }, + "ModifyAttribute": { + "request": { + "operation": "ModifySnapshotAttribute", + "params": [ + { "target": "SnapshotId", "source": "identifier", "name": "Id" } + ] + } + }, + "ResetAttribute": { + "request": { + "operation": "ResetSnapshotAttribute", + "params": [ + { "target": "SnapshotId", "source": "identifier", "name": "Id" } + ] + } + } + }, + "waiters": { + "Completed": { + "waiterName": "SnapshotCompleted", + "params": [ + { "target": "SnapshotIds[]", "source": "identifier", "name": "Id" } + ], + "path": "Snapshots[]" + } + }, + "has": { + "Volume": { + "resource": { + "type": "Volume", + "identifiers": [ + { "target": "Id", "source": "data", "path": "VolumeId" } + ] + } + } + } + }, + "Subnet": { + "identifiers": [ + { + "name": "Id", + "memberName": "SubnetId" + } + ], + "shape": "Subnet", + "load": { + "request": { + "operation": "DescribeSubnets", + "params": [ + { "target": "SubnetIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "Subnets[0]" + }, + "actions": { + "CreateInstances": { + "request": { + "operation": "RunInstances", + "params": [ + { "target": "SubnetId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Instance", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Instances[].InstanceId" } + ], + "path": "Instances[]" + } + }, + "CreateNetworkInterface": { + "request": { + "operation": "CreateNetworkInterface", + "params": [ + { "target": "SubnetId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "NetworkInterface", + "identifiers": [ + { "target": "Id", "source": "response", "path": "NetworkInterface.NetworkInterfaceId" } + ], + "path": "NetworkInterface" + } + }, + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "identifier", "name": "Id" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteSubnet", + "params": [ + { "target": "SubnetId", "source": "identifier", "name": "Id" } + ] + } + } + }, + "has": { + "Vpc": { + "resource": { + "type": "Vpc", + "identifiers": [ + { "target": "Id", "source": "data", "path": "VpcId" } + ] + } + } + }, + "hasMany": { + "Instances": { + "request": { + "operation": "DescribeInstances", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "subnet-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Instance", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Reservations[].Instances[].InstanceId" } + ], + "path": "Reservations[].Instances[]" + } + }, + "NetworkInterfaces": { + "request": { + "operation": "DescribeNetworkInterfaces", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "subnet-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "NetworkInterface", + "identifiers": [ + { "target": "Id", "source": "response", "path": "NetworkInterfaces[].NetworkInterfaceId" } + ], + "path": "NetworkInterfaces[]" + } + } + } + }, + "Tag": { + "identifiers": [ + { + "name": "ResourceId", + "memberName": "ResourceId" + }, + { + "name": "Key", + "memberName": "Key" + }, + { + "name": "Value", + "memberName": "Value" + } + ], + "shape": "TagDescription", + "load": { + "request": { + "operation": "DescribeTags", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "key" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Key" }, + { "target": "Filters[1].Name", "source": "string", "value": "value" }, + { "target": "Filters[1].Values[0]", "source": "identifier", "name": "Value" } + ] + }, + "path": "Tags[0]" + }, + "actions": { + "Delete": { + "request": { + "operation": "DeleteTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "ResourceId" }, + { "target": "Tags[0].Key", "source": "identifier", "name": "Key" }, + { "target": "Tags[0].Value", "source": "identifier", "name": "Value" } + ] + } + } + }, + "batchActions": { + "Delete": { + "request": { + "operation": "DeleteTags", + "params": [ + { "target": "Resources[]", "source": "identifier", "name": "ResourceId" }, + { "target": "Tags[*].Key", "source": "identifier", "name": "Key" }, + { "target": "Tags[*].Value", "source": "identifier", "name": "Value" } + ] + } + } + } + }, + "Volume": { + "identifiers": [ + { + "name": "Id", + "memberName": "VolumeId" + } + ], + "shape": "Volume", + "load": { + "request": { + "operation": "DescribeVolumes", + "params": [ + { "target": "VolumeIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "Volumes[0]" + }, + "actions": { + "AttachToInstance": { + "request": { + "operation": "AttachVolume", + "params": [ + { "target": "VolumeId", "source": "identifier", "name": "Id" } + ] + } + }, + "CreateSnapshot": { + "request": { + "operation": "CreateSnapshot", + "params": [ + { "target": "VolumeId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Snapshot", + "identifiers": [ + { "target": "Id", "source": "response", "path": "SnapshotId" } + ], + "path": "@" + } + }, + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "identifier", "name": "Id" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteVolume", + "params": [ + { "target": "VolumeId", "source": "identifier", "name": "Id" } + ] + } + }, + "DescribeAttribute": { + "request": { + "operation": "DescribeVolumeAttribute", + "params": [ + { "target": "VolumeId", "source": "identifier", "name": "Id" } + ] + } + }, + "DescribeStatus": { + "request": { + "operation": "DescribeVolumeStatus", + "params": [ + { "target": "VolumeIds[0]", "source": "identifier", "name": "Id" } + ] + } + }, + "DetachFromInstance": { + "request": { + "operation": "DetachVolume", + "params": [ + { "target": "VolumeId", "source": "identifier", "name": "Id" } + ] + } + }, + "EnableIo": { + "request": { + "operation": "EnableVolumeIO", + "params": [ + { "target": "VolumeId", "source": "identifier", "name": "Id" } + ] + } + }, + "ModifyAttribute": { + "request": { + "operation": "ModifyVolumeAttribute", + "params": [ + { "target": "VolumeId", "source": "identifier", "name": "Id" } + ] + } + } + }, + "hasMany": { + "Snapshots": { + "request": { + "operation": "DescribeSnapshots", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "volume-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Snapshot", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Snapshots[].SnapshotId" } + ], + "path": "Snapshots[]" + } + } + } + }, + "Vpc": { + "identifiers": [ + { + "name": "Id", + "memberName": "VpcId" + } + ], + "shape": "Vpc", + "load": { + "request": { + "operation": "DescribeVpcs", + "params": [ + { "target": "VpcIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "Vpcs[0]" + }, + "actions": { + "AssociateDhcpOptions": { + "request": { + "operation": "AssociateDhcpOptions", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + } + }, + "AttachClassicLinkInstance": { + "request": { + "operation": "AttachClassicLinkVpc", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + } + }, + "AttachInternetGateway": { + "request": { + "operation": "AttachInternetGateway", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + } + }, + "CreateNetworkAcl": { + "request": { + "operation": "CreateNetworkAcl", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "NetworkAcl", + "identifiers": [ + { "target": "Id", "source": "response", "path": "NetworkAcl.NetworkAclId" } + ], + "path": "NetworkAcl" + } + }, + "CreateRouteTable": { + "request": { + "operation": "CreateRouteTable", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "RouteTable", + "identifiers": [ + { "target": "Id", "source": "response", "path": "RouteTable.RouteTableId" } + ], + "path": "RouteTable" + } + }, + "CreateSecurityGroup": { + "request": { + "operation": "CreateSecurityGroup", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "SecurityGroup", + "identifiers": [ + { "target": "Id", "source": "response", "path": "GroupId" } + ] + } + }, + "CreateSubnet": { + "request": { + "operation": "CreateSubnet", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Subnet", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Subnet.SubnetId" } + ], + "path": "Subnet" + } + }, + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "identifier", "name": "Id" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteVpc", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + } + }, + "DescribeAttribute": { + "request": { + "operation": "DescribeVpcAttribute", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + } + }, + "DetachClassicLinkInstance": { + "request": { + "operation": "DetachClassicLinkVpc", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + } + }, + "DetachInternetGateway": { + "request": { + "operation": "DetachInternetGateway", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + } + }, + "DisableClassicLink": { + "request": { + "operation": "DisableVpcClassicLink", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + } + }, + "EnableClassicLink": { + "request": { + "operation": "EnableVpcClassicLink", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + } + }, + "ModifyAttribute": { + "request": { + "operation": "ModifyVpcAttribute", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + } + }, + "RequestVpcPeeringConnection": { + "request": { + "operation": "CreateVpcPeeringConnection", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "VpcPeeringConnection", + "identifiers": [ + { "target": "Id", "source": "response", "path": "VpcPeeringConnection.VpcPeeringConnectionId" } + ], + "path": "VpcPeeringConnection" + } + } + }, + "has": { + "DhcpOptions": { + "resource": { + "type": "DhcpOptions", + "identifiers": [ + { "target": "Id", "source": "data", "path": "DhcpOptionsId" } + ] + } + } + }, + "hasMany": { + "AcceptedVpcPeeringConnections": { + "request": { + "operation": "DescribeVpcPeeringConnections", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "accepter-vpc-info.vpc-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "VpcPeeringConnection", + "identifiers": [ + { "target": "Id", "source": "response", "path": "VpcPeeringConnections[].VpcPeeringConnectionId" } + ], + "path": "VpcPeeringConnections[]" + } + }, + "Instances": { + "request": { + "operation": "DescribeInstances", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Instance", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Reservations[].Instances[].InstanceId" } + ], + "path": "Reservations[].Instances[]" + } + }, + "InternetGateways": { + "request": { + "operation": "DescribeInternetGateways", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "attachment.vpc-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "InternetGateway", + "identifiers": [ + { "target": "Id", "source": "response", "path": "InternetGateways[].InternetGatewayId" } + ], + "path": "InternetGateways[]" + } + }, + "NetworkAcls": { + "request": { + "operation": "DescribeNetworkAcls", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "NetworkAcl", + "identifiers": [ + { "target": "Id", "source": "response", "path": "NetworkAcls[].NetworkAclId" } + ], + "path": "NetworkAcls[]" + } + }, + "NetworkInterfaces": { + "request": { + "operation": "DescribeNetworkInterfaces", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "NetworkInterface", + "identifiers": [ + { "target": "Id", "source": "response", "path": "NetworkInterfaces[].NetworkInterfaceId" } + ], + "path": "NetworkInterfaces[]" + } + }, + "RequestedVpcPeeringConnections": { + "request": { + "operation": "DescribeVpcPeeringConnections", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "requester-vpc-info.vpc-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "VpcPeeringConnection", + "identifiers": [ + { "target": "Id", "source": "response", "path": "VpcPeeringConnections[].VpcPeeringConnectionId" } + ], + "path": "VpcPeeringConnections[]" + } + }, + "RouteTables": { + "request": { + "operation": "DescribeRouteTables", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "RouteTable", + "identifiers": [ + { "target": "Id", "source": "response", "path": "RouteTables[].RouteTableId" } + ], + "path": "RouteTables[]" + } + }, + "SecurityGroups": { + "request": { + "operation": "DescribeSecurityGroups", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "SecurityGroup", + "identifiers": [ + { "target": "Id", "source": "response", "path": "SecurityGroups[].GroupId" } + ], + "path": "SecurityGroups[]" + } + }, + "Subnets": { + "request": { + "operation": "DescribeSubnets", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Subnet", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Subnets[].SubnetId" } + ], + "path": "Subnets[]" + } + } + } + }, + "VpcPeeringConnection": { + "identifiers": [ + { + "name": "Id", + "memberName": "VpcPeeringConnectionId" + } + ], + "shape": "VpcPeeringConnection", + "load": { + "request": { + "operation": "DescribeVpcPeeringConnections", + "params": [ + { "target": "VpcPeeringConnectionIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "VpcPeeringConnections[0]" + }, + "actions": { + "Accept": { + "request": { + "operation": "AcceptVpcPeeringConnection", + "params": [ + { "target": "VpcPeeringConnectionId", "source": "identifier", "name": "Id" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteVpcPeeringConnection", + "params": [ + { "target": "VpcPeeringConnectionId", "source": "identifier", "name": "Id" } + ] + } + }, + "Reject": { + "request": { + "operation": "RejectVpcPeeringConnection", + "params": [ + { "target": "VpcPeeringConnectionId", "source": "identifier", "name": "Id" } + ] + } + } + }, + "has": { + "AccepterVpc": { + "resource": { + "type": "Vpc", + "identifiers": [ + { "target": "Id", "source": "data", "path": "AccepterVpcInfo.VpcId" } + ] + } + }, + "RequesterVpc": { + "resource": { + "type": "Vpc", + "identifiers": [ + { "target": "Id", "source": "data", "path": "RequesterVpcInfo.VpcId" } + ] + } + } + } + } + } +} diff --git a/boto3/data/ec2/2015-04-15/resources-1.json b/boto3/data/ec2/2015-04-15/resources-1.json new file mode 100644 index 0000000..8ccf160 --- /dev/null +++ b/boto3/data/ec2/2015-04-15/resources-1.json @@ -0,0 +1,2289 @@ +{ + "service": { + "actions": { + "CreateDhcpOptions": { + "request": { "operation": "CreateDhcpOptions" }, + "resource": { + "type": "DhcpOptions", + "identifiers": [ + { "target": "Id", "source": "response", "path": "DhcpOptions.DhcpOptionsId" } + ], + "path": "DhcpOptions" + } + }, + "CreateInstances": { + "request": { "operation": "RunInstances" }, + "resource": { + "type": "Instance", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Instances[].InstanceId" } + ], + "path": "Instances[]" + } + }, + "CreateInternetGateway": { + "request": { "operation": "CreateInternetGateway" }, + "resource": { + "type": "InternetGateway", + "identifiers": [ + { "target": "Id", "source": "response", "path": "InternetGateway.InternetGatewayId" } + ], + "path": "InternetGateway" + } + }, + "CreateKeyPair": { + "request": { "operation": "CreateKeyPair" }, + "resource": { + "type": "KeyPair", + "identifiers": [ + { "target": "Name", "source": "response", "path": "KeyName" } + ] + } + }, + "CreateNetworkAcl": { + "request": { "operation": "CreateNetworkAcl" }, + "resource": { + "type": "NetworkAcl", + "identifiers": [ + { "target": "Id", "source": "response", "path": "NetworkAcl.NetworkAclId" } + ], + "path": "NetworkAcl" + } + }, + "CreateNetworkInterface": { + "request": { "operation": "CreateNetworkInterface" }, + "resource": { + "type": "NetworkInterface", + "identifiers": [ + { "target": "Id", "source": "response", "path": "NetworkInterface.NetworkInterfaceId" } + ], + "path": "NetworkInterface" + } + }, + "CreatePlacementGroup": { + "request": { "operation": "CreatePlacementGroup" }, + "resource": { + "type": "PlacementGroup", + "identifiers": [ + { "target": "Name", "source": "requestParameter", "path": "GroupName" } + ] + } + }, + "CreateRouteTable": { + "request": { "operation": "CreateRouteTable" }, + "resource": { + "type": "RouteTable", + "identifiers": [ + { "target": "Id", "source": "response", "path": "RouteTable.RouteTableId" } + ], + "path": "RouteTable" + } + }, + "CreateSecurityGroup": { + "request": { "operation": "CreateSecurityGroup" }, + "resource": { + "type": "SecurityGroup", + "identifiers": [ + { "target": "Id", "source": "response", "path": "GroupId" } + ] + } + }, + "CreateSnapshot": { + "request": { "operation": "CreateSnapshot" }, + "resource": { + "type": "Snapshot", + "identifiers": [ + { "target": "Id", "source": "response", "path": "SnapshotId" } + ], + "path": "@" + } + }, + "CreateSubnet": { + "request": { "operation": "CreateSubnet" }, + "resource": { + "type": "Subnet", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Subnet.SubnetId" } + ], + "path": "Subnet" + } + }, + "CreateTags": { + "request": { "operation": "CreateTags" }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "requestParameter", "path": "Resources[]" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "CreateVolume": { + "request": { "operation": "CreateVolume" }, + "resource": { + "type": "Volume", + "identifiers": [ + { "target": "Id", "source": "response", "path": "VolumeId" } + ], + "path": "@" + } + }, + "CreateVpc": { + "request": { "operation": "CreateVpc" }, + "resource": { + "type": "Vpc", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Vpc.VpcId" } + ], + "path": "Vpc" + } + }, + "CreateVpcPeeringConnection": { + "request": { "operation": "CreateVpcPeeringConnection" }, + "resource": { + "type": "VpcPeeringConnection", + "identifiers": [ + { "target": "Id", "source": "response", "path": "VpcPeeringConnection.VpcPeeringConnectionId" } + ], + "path": "VpcPeeringConnection" + } + }, + "DisassociateRouteTable": { + "request": { "operation": "DisassociateRouteTable" } + }, + "ImportKeyPair": { + "request": { "operation": "ImportKeyPair" }, + "resource": { + "type": "KeyPair", + "identifiers": [ + { "target": "Name", "source": "response", "path": "KeyName" } + ] + } + }, + "RegisterImage": { + "request": { "operation": "RegisterImage" }, + "resource": { + "type": "Image", + "identifiers": [ + { "target": "Id", "source": "response", "path": "ImageId" } + ] + } + } + }, + "has": { + "DhcpOptions": { + "resource": { + "type": "DhcpOptions", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "Image": { + "resource": { + "type": "Image", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "Instance": { + "resource": { + "type": "Instance", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "InternetGateway": { + "resource": { + "type": "InternetGateway", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "KeyPair": { + "resource": { + "type": "KeyPair", + "identifiers": [ + { "target": "Name", "source": "input" } + ] + } + }, + "NetworkAcl": { + "resource": { + "type": "NetworkAcl", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "NetworkInterface": { + "resource": { + "type": "NetworkInterface", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "PlacementGroup": { + "resource": { + "type": "PlacementGroup", + "identifiers": [ + { "target": "Name", "source": "input" } + ] + } + }, + "RouteTable": { + "resource": { + "type": "RouteTable", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "RouteTableAssociation": { + "resource": { + "type": "RouteTableAssociation", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "SecurityGroup": { + "resource": { + "type": "SecurityGroup", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "Snapshot": { + "resource": { + "type": "Snapshot", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "Subnet": { + "resource": { + "type": "Subnet", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "Volume": { + "resource": { + "type": "Volume", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "Vpc": { + "resource": { + "type": "Vpc", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "VpcPeeringConnection": { + "resource": { + "type": "VpcPeeringConnection", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + } + }, + "hasMany": { + "DhcpOptionsSets": { + "request": { "operation": "DescribeDhcpOptions" }, + "resource": { + "type": "DhcpOptions", + "identifiers": [ + { "target": "Id", "source": "response", "path": "DhcpOptions[].DhcpOptionsId" } + ], + "path": "DhcpOptions[]" + } + }, + "Images": { + "request": { "operation": "DescribeImages" }, + "resource": { + "type": "Image", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Images[].ImageId" } + ], + "path": "Images[]" + } + }, + "Instances": { + "request": { "operation": "DescribeInstances" }, + "resource": { + "type": "Instance", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Reservations[].Instances[].InstanceId" } + ], + "path": "Reservations[].Instances[]" + } + }, + "InternetGateways": { + "request": { "operation": "DescribeInternetGateways" }, + "resource": { + "type": "InternetGateway", + "identifiers": [ + { "target": "Id", "source": "response", "path": "InternetGateways[].InternetGatewayId" } + ], + "path": "InternetGateways[]" + } + }, + "KeyPairs": { + "request": { "operation": "DescribeKeyPairs" }, + "resource": { + "type": "KeyPair", + "identifiers": [ + { "target": "Name", "source": "response", "path": "KeyPairs[].KeyName" } + ], + "path": "KeyPairs[]" + } + }, + "NetworkAcls": { + "request": { "operation": "DescribeNetworkAcls" }, + "resource": { + "type": "NetworkAcl", + "identifiers": [ + { "target": "Id", "source": "response", "path": "NetworkAcls[].NetworkAclId" } + ], + "path": "NetworkAcls[]" + } + }, + "NetworkInterfaces": { + "request": { "operation": "DescribeNetworkInterfaces" }, + "resource": { + "type": "NetworkInterface", + "identifiers": [ + { "target": "Id", "source": "response", "path": "NetworkInterfaces[].NetworkInterfaceId" } + ], + "path": "NetworkInterfaces[]" + } + }, + "PlacementGroups": { + "request": { "operation": "DescribePlacementGroups" }, + "resource": { + "type": "PlacementGroup", + "identifiers": [ + { "target": "Name", "source": "response", "path": "PlacementGroups[].GroupName" } + ], + "path": "PlacementGroups[]" + } + }, + "RouteTables": { + "request": { "operation": "DescribeRouteTables" }, + "resource": { + "type": "RouteTable", + "identifiers": [ + { "target": "Id", "source": "response", "path": "RouteTables[].RouteTableId" } + ], + "path": "RouteTables[]" + } + }, + "SecurityGroups": { + "request": { "operation": "DescribeSecurityGroups" }, + "resource": { + "type": "SecurityGroup", + "identifiers": [ + { "target": "Id", "source": "response", "path": "SecurityGroups[].GroupId" } + ], + "path": "SecurityGroups[]" + } + }, + "Snapshots": { + "request": { "operation": "DescribeSnapshots" }, + "resource": { + "type": "Snapshot", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Snapshots[].SnapshotId" } + ], + "path": "Snapshots[]" + } + }, + "Subnets": { + "request": { "operation": "DescribeSubnets" }, + "resource": { + "type": "Subnet", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Subnets[].SubnetId" } + ], + "path": "Subnets[]" + } + }, + "Volumes": { + "request": { "operation": "DescribeVolumes" }, + "resource": { + "type": "Volume", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Volumes[].VolumeId" } + ], + "path": "Volumes[]" + } + }, + "VpcPeeringConnections": { + "request": { "operation": "DescribeVpcPeeringConnections" }, + "resource": { + "type": "VpcPeeringConnection", + "identifiers": [ + { "target": "Id", "source": "response", "path": "VpcPeeringConnections[].VpcPeeringConnectionId" } + ], + "path": "VpcPeeringConnections[]" + } + }, + "Vpcs": { + "request": { "operation": "DescribeVpcs" }, + "resource": { + "type": "Vpc", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Vpcs[].VpcId" } + ], + "path": "Vpcs[]" + } + } + } + }, + "resources": { + "DhcpOptions": { + "identifiers": [ + { + "name": "Id", + "memberName": "DhcpOptionsId" + } + ], + "shape": "DhcpOptions", + "load": { + "request": { + "operation": "DescribeDhcpOptions", + "params": [ + { "target": "DhcpOptionsIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "DhcpOptions[0]" + }, + "actions": { + "AssociateWithVpc": { + "request": { + "operation": "AssociateDhcpOptions", + "params": [ + { "target": "DhcpOptionsId", "source": "identifier", "name": "Id" } + ] + } + }, + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "identifier", "name": "Id" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteDhcpOptions", + "params": [ + { "target": "DhcpOptionsId", "source": "identifier", "name": "Id" } + ] + } + } + } + }, + "Image": { + "identifiers": [ + { + "name": "Id", + "memberName": "ImageId" + } + ], + "shape": "Image", + "load": { + "request": { + "operation": "DescribeImages", + "params": [ + { "target": "ImageIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "Images[0]" + }, + "actions": { + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "identifier", "name": "Id" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "Deregister": { + "request": { + "operation": "DeregisterImage", + "params": [ + { "target": "ImageId", "source": "identifier", "name": "Id" } + ] + } + }, + "DescribeAttribute": { + "request": { + "operation": "DescribeImageAttribute", + "params": [ + { "target": "ImageId", "source": "identifier", "name": "Id" } + ] + } + }, + "ModifyAttribute": { + "request": { + "operation": "ModifyImageAttribute", + "params": [ + { "target": "ImageId", "source": "identifier", "name": "Id" } + ] + } + }, + "ResetAttribute": { + "request": { + "operation": "ResetImageAttribute", + "params": [ + { "target": "ImageId", "source": "identifier", "name": "Id" } + ] + } + } + } + }, + "Instance": { + "identifiers": [ + { + "name": "Id", + "memberName": "InstanceId" + } + ], + "shape": "Instance", + "load": { + "request": { + "operation": "DescribeInstances", + "params": [ + { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "Reservations[0].Instances[0]" + }, + "actions": { + "AttachClassicLinkVpc": { + "request": { + "operation": "AttachClassicLinkVpc", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" } + ] + } + }, + "AttachVolume": { + "request": { + "operation": "AttachVolume", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" } + ] + } + }, + "ConsoleOutput": { + "request": { + "operation": "GetConsoleOutput", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" } + ] + } + }, + "CreateImage": { + "request": { + "operation": "CreateImage", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Image", + "identifiers": [ + { "target": "Id", "source": "response", "path": "ImageId" } + ] + } + }, + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "identifier", "name": "Id" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "DescribeAttribute": { + "request": { + "operation": "DescribeInstanceAttribute", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" } + ] + } + }, + "DetachClassicLinkVpc": { + "request": { + "operation": "DetachClassicLinkVpc", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" } + ] + } + }, + "DetachVolume": { + "request": { + "operation": "DetachVolume", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" } + ] + } + }, + "ModifyAttribute": { + "request": { + "operation": "ModifyInstanceAttribute", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" } + ] + } + }, + "Monitor": { + "request": { + "operation": "MonitorInstances", + "params": [ + { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } + ] + } + }, + "PasswordData": { + "request": { + "operation": "GetPasswordData", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" } + ] + } + }, + "Reboot": { + "request": { + "operation": "RebootInstances", + "params": [ + { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } + ] + } + }, + "ReportStatus": { + "request": { + "operation": "ReportInstanceStatus", + "params": [ + { "target": "Instances[0]", "source": "identifier", "name": "Id" } + ] + } + }, + "ResetAttribute": { + "request": { + "operation": "ResetInstanceAttribute", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" } + ] + } + }, + "ResetKernel": { + "request": { + "operation": "ResetInstanceAttribute", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" }, + { "target": "Attribute", "source": "string", "value": "kernel" } + ] + } + }, + "ResetRamdisk": { + "request": { + "operation": "ResetInstanceAttribute", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" }, + { "target": "Attribute", "source": "string", "value": "ramdisk" } + ] + } + }, + "ResetSourceDestCheck": { + "request": { + "operation": "ResetInstanceAttribute", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" }, + { "target": "Attribute", "source": "string", "value": "sourceDestCheck" } + ] + } + }, + "Start": { + "request": { + "operation": "StartInstances", + "params": [ + { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } + ] + } + }, + "Stop": { + "request": { + "operation": "StopInstances", + "params": [ + { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } + ] + } + }, + "Terminate": { + "request": { + "operation": "TerminateInstances", + "params": [ + { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } + ] + } + }, + "Unmonitor": { + "request": { + "operation": "UnmonitorInstances", + "params": [ + { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } + ] + } + } + }, + "batchActions": { + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[]", "source": "identifier", "name": "Id" } + ] + } + }, + "Monitor": { + "request": { + "operation": "MonitorInstances", + "params": [ + { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } + ] + } + }, + "Reboot": { + "request": { + "operation": "RebootInstances", + "params": [ + { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } + ] + } + }, + "Start": { + "request": { + "operation": "StartInstances", + "params": [ + { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } + ] + } + }, + "Stop": { + "request": { + "operation": "StopInstances", + "params": [ + { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } + ] + } + }, + "Terminate": { + "request": { + "operation": "TerminateInstances", + "params": [ + { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } + ] + } + }, + "Unmonitor": { + "request": { + "operation": "UnmonitorInstances", + "params": [ + { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } + ] + } + } + }, + "waiters": { + "Exists": { + "waiterName": "InstanceExists", + "params": [ + { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } + ], + "path": "Reservations[0].Instances[0]" + }, + "Running": { + "waiterName": "InstanceRunning", + "params": [ + { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } + ], + "path": "Reservations[0].Instances[0]" + }, + "Stopped": { + "waiterName": "InstanceStopped", + "params": [ + { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } + ], + "path": "Reservations[0].Instances[0]" + }, + "Terminated": { + "waiterName": "InstanceTerminated", + "params": [ + { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } + ], + "path": "Reservations[0].Instances[0]" + } + }, + "has": { + "Image": { + "resource": { + "type": "Image", + "identifiers": [ + { "target": "Id", "source": "data", "path": "ImageId" } + ] + } + }, + "KeyPair": { + "resource": { + "type": "KeyPair", + "identifiers": [ + { "target": "Name", "source": "data", "path": "KeyName" } + ] + } + }, + "PlacementGroup": { + "resource": { + "type": "PlacementGroup", + "identifiers": [ + { "target": "Name", "source": "data", "path": "Placement.GroupName" } + ] + } + }, + "Subnet": { + "resource": { + "type": "Subnet", + "identifiers": [ + { "target": "Id", "source": "data", "path": "SubnetId" } + ] + } + }, + "Vpc": { + "resource": { + "type": "Vpc", + "identifiers": [ + { "target": "Id", "source": "data", "path": "VpcId" } + ] + } + } + }, + "hasMany": { + "Volumes": { + "request": { + "operation": "DescribeVolumes", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "attachment.instance-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Volume", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Volumes[].VolumeId" } + ], + "path": "Volumes[]" + } + } + } + }, + "InternetGateway": { + "identifiers": [ + { + "name": "Id", + "memberName": "InternetGatewayId" + } + ], + "shape": "InternetGateway", + "load": { + "request": { + "operation": "DescribeInternetGateways", + "params": [ + { "target": "InternetGatewayIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "InternetGateways[0]" + }, + "actions": { + "AttachToVpc": { + "request": { + "operation": "AttachInternetGateway", + "params": [ + { "target": "InternetGatewayId", "source": "identifier", "name": "Id" } + ] + } + }, + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "identifier", "name": "Id" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteInternetGateway", + "params": [ + { "target": "InternetGatewayId", "source": "identifier", "name": "Id" } + ] + } + }, + "DetachFromVpc": { + "request": { + "operation": "DetachInternetGateway", + "params": [ + { "target": "InternetGatewayId", "source": "identifier", "name": "Id" } + ] + } + } + } + }, + "KeyPair": { + "identifiers": [ + { + "name": "Name", + "memberName": "KeyName" + } + ], + "shape": "KeyPairInfo", + "load": { + "request": { + "operation": "DescribeKeyPairs", + "params": [ + { "target": "KeyNames[0]", "source": "identifier", "name": "Name" } + ] + }, + "path": "KeyPairs[0]" + }, + "actions": { + "Delete": { + "request": { + "operation": "DeleteKeyPair", + "params": [ + { "target": "KeyName", "source": "identifier", "name": "Name" } + ] + } + } + } + }, + "NetworkAcl": { + "identifiers": [ + { + "name": "Id", + "memberName": "NetworkAclId" + } + ], + "shape": "NetworkAcl", + "load": { + "request": { + "operation": "DescribeNetworkAcls", + "params": [ + { "target": "NetworkAclIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "NetworkAcls[0]" + }, + "actions": { + "CreateEntry": { + "request": { + "operation": "CreateNetworkAclEntry", + "params": [ + { "target": "NetworkAclId", "source": "identifier", "name": "Id" } + ] + } + }, + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "identifier", "name": "Id" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteNetworkAcl", + "params": [ + { "target": "NetworkAclId", "source": "identifier", "name": "Id" } + ] + } + }, + "DeleteEntry": { + "request": { + "operation": "DeleteNetworkAclEntry", + "params": [ + { "target": "NetworkAclId", "source": "identifier", "name": "Id" } + ] + } + }, + "ReplaceAssociation": { + "request": { + "operation": "ReplaceNetworkAclAssociation", + "params": [ + { "target": "NetworkAclId", "source": "identifier", "name": "Id" } + ] + } + }, + "ReplaceEntry": { + "request": { + "operation": "ReplaceNetworkAclEntry", + "params": [ + { "target": "NetworkAclId", "source": "identifier", "name": "Id" } + ] + } + } + }, + "has": { + "Vpc": { + "resource": { + "type": "Vpc", + "identifiers": [ + { "target": "Id", "source": "data", "path": "VpcId" } + ] + } + } + } + }, + "NetworkInterface": { + "identifiers": [ + { + "name": "Id", + "memberName": "NetworkInterfaceId" + } + ], + "shape": "NetworkInterface", + "load": { + "request": { + "operation": "DescribeNetworkInterfaces", + "params": [ + { "target": "NetworkInterfaceIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "NetworkInterfaces[0]" + }, + "actions": { + "AssignPrivateIpAddresses": { + "request": { + "operation": "AssignPrivateIpAddresses", + "params": [ + { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } + ] + } + }, + "Attach": { + "request": { + "operation": "AttachNetworkInterface", + "params": [ + { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } + ] + } + }, + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "identifier", "name": "Id" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteNetworkInterface", + "params": [ + { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } + ] + } + }, + "DescribeAttribute": { + "request": { + "operation": "DescribeNetworkInterfaceAttribute", + "params": [ + { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } + ] + } + }, + "Detach": { + "request": { + "operation": "DetachNetworkInterface", + "params": [ + { "target": "AttachmentId", "source": "data", "path": "Attachment.AttachmentId" } + ] + } + }, + "ModifyAttribute": { + "request": { + "operation": "ModifyNetworkInterfaceAttribute", + "params": [ + { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } + ] + } + }, + "ResetAttribute": { + "request": { + "operation": "ResetNetworkInterfaceAttribute", + "params": [ + { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } + ] + } + }, + "UnassignPrivateIpAddresses": { + "request": { + "operation": "UnassignPrivateIpAddresses", + "params": [ + { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } + ] + } + } + }, + "has": { + "Subnet": { + "resource": { + "type": "Subnet", + "identifiers": [ + { "target": "Id", "source": "data", "path": "SubnetId" } + ] + } + }, + "Vpc": { + "resource": { + "type": "Vpc", + "identifiers": [ + { "target": "Id", "source": "data", "path": "VpcId" } + ] + } + } + } + }, + "PlacementGroup": { + "identifiers": [ + { + "name": "Name", + "memberName": "GroupName" + } + ], + "shape": "PlacementGroup", + "load": { + "request": { + "operation": "DescribePlacementGroups", + "params": [ + { "target": "GroupNames[0]", "source": "identifier", "name": "Name" } + ] + }, + "path": "PlacementGroups[0]" + }, + "actions": { + "Delete": { + "request": { + "operation": "DeletePlacementGroup", + "params": [ + { "target": "GroupName", "source": "identifier", "name": "Name" } + ] + } + } + }, + "hasMany": { + "Instances": { + "request": { + "operation": "DescribeInstances", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "placement-group-name" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Name" } + ] + }, + "resource": { + "type": "Instance", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Reservations[].Instances[].InstanceId" } + ], + "path": "Reservations[].Instances[]" + } + } + } + }, + "RouteTable": { + "identifiers": [ + { + "name": "Id", + "memberName": "RouteTableId" + } + ], + "shape": "RouteTable", + "load": { + "request": { + "operation": "DescribeRouteTables", + "params": [ + { "target": "RouteTableIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "RouteTables[0]" + }, + "actions": { + "AssociateWithSubnet": { + "request": { + "operation": "AssociateRouteTable", + "params": [ + { "target": "RouteTableId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "RouteTableAssociation", + "identifiers": [ + { "target": "Id", "source": "response", "path": "AssociationId" } + ] + } + }, + "CreateRoute": { + "request": { + "operation": "CreateRoute", + "params": [ + { "target": "RouteTableId", "source": "identifier", "name": "Id" } + ] + } + }, + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "identifier", "name": "Id" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteRouteTable", + "params": [ + { "target": "RouteTableId", "source": "identifier", "name": "Id" } + ] + } + } + }, + "has": { + "Vpc": { + "resource": { + "type": "Vpc", + "identifiers": [ + { "target": "Id", "source": "data", "path": "VpcId" } + ] + } + } + }, + "hasMany": { + "Associations": { + "request": { + "operation": "DescribeRouteTables", + "params": [ + { "target": "RouteTableIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "RouteTableAssociation", + "identifiers": [ + { "target": "Id", "source": "response", "path": "RouteTables[0].Associations[].RouteTableAssociationId" } + ], + "path": "RouteTables[0].Associations[]" + } + } + } + }, + "RouteTableAssociation": { + "identifiers": [ + { + "name": "Id", + "memberName": "RouteTableAssociationId" + } + ], + "shape": "RouteTableAssociation", + "actions": { + "Delete": { + "request": { + "operation": "DisassociateRouteTable", + "params": [ + { "target": "AssociationId", "source": "identifier", "name": "Id" } + ] + } + }, + "ReplaceSubnet": { + "request": { + "operation": "ReplaceRouteTableAssociation", + "params": [ + { "target": "AssociationId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "RouteTableAssociation", + "identifiers": [ + { "target": "Id", "source": "response", "path": "NewAssociationId" } + ] + } + } + }, + "has": { + "RouteTable": { + "resource": { + "type": "RouteTable", + "identifiers": [ + { "target": "Id", "source": "data", "path": "RouteTableId" } + ] + } + }, + "Subnet": { + "resource": { + "type": "Subnet", + "identifiers": [ + { "target": "Id", "source": "data", "path": "SubnetId" } + ] + } + } + } + }, + "SecurityGroup": { + "identifiers": [ + { + "name": "Id", + "memberName": "GroupId" + } + ], + "shape": "SecurityGroup", + "load": { + "request": { + "operation": "DescribeSecurityGroups", + "params": [ + { "target": "GroupIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "SecurityGroups[0]" + }, + "actions": { + "AuthorizeEgress": { + "request": { + "operation": "AuthorizeSecurityGroupEgress", + "params": [ + { "target": "GroupId", "source": "identifier", "name": "Id" } + ] + } + }, + "AuthorizeIngress": { + "request": { + "operation": "AuthorizeSecurityGroupIngress", + "params": [ + { "target": "GroupId", "source": "identifier", "name": "Id" } + ] + } + }, + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "identifier", "name": "Id" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteSecurityGroup", + "params": [ + { "target": "GroupId", "source": "identifier", "name": "Id" } + ] + } + }, + "RevokeEgress": { + "request": { + "operation": "RevokeSecurityGroupEgress", + "params": [ + { "target": "GroupId", "source": "identifier", "name": "Id" } + ] + } + }, + "RevokeIngress": { + "request": { + "operation": "RevokeSecurityGroupIngress", + "params": [ + { "target": "GroupId", "source": "identifier", "name": "Id" } + ] + } + } + } + }, + "Snapshot": { + "identifiers": [ + { + "name": "Id", + "memberName": "SnapshotId" + } + ], + "shape": "Snapshot", + "load": { + "request": { + "operation": "DescribeSnapshots", + "params": [ + { "target": "SnapshotIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "Snapshots[0]" + }, + "actions": { + "Copy": { + "request": { + "operation": "CopySnapshot", + "params": [ + { "target": "SourceSnapshotId", "source": "identifier", "name": "Id" } + ] + } + }, + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "identifier", "name": "Id" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteSnapshot", + "params": [ + { "target": "SnapshotId", "source": "identifier", "name": "Id" } + ] + } + }, + "DescribeAttribute": { + "request": { + "operation": "DescribeSnapshotAttribute", + "params": [ + { "target": "SnapshotId", "source": "identifier", "name": "Id" } + ] + } + }, + "ModifyAttribute": { + "request": { + "operation": "ModifySnapshotAttribute", + "params": [ + { "target": "SnapshotId", "source": "identifier", "name": "Id" } + ] + } + }, + "ResetAttribute": { + "request": { + "operation": "ResetSnapshotAttribute", + "params": [ + { "target": "SnapshotId", "source": "identifier", "name": "Id" } + ] + } + } + }, + "waiters": { + "Completed": { + "waiterName": "SnapshotCompleted", + "params": [ + { "target": "SnapshotIds[]", "source": "identifier", "name": "Id" } + ], + "path": "Snapshots[]" + } + }, + "has": { + "Volume": { + "resource": { + "type": "Volume", + "identifiers": [ + { "target": "Id", "source": "data", "path": "VolumeId" } + ] + } + } + } + }, + "Subnet": { + "identifiers": [ + { + "name": "Id", + "memberName": "SubnetId" + } + ], + "shape": "Subnet", + "load": { + "request": { + "operation": "DescribeSubnets", + "params": [ + { "target": "SubnetIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "Subnets[0]" + }, + "actions": { + "CreateInstances": { + "request": { + "operation": "RunInstances", + "params": [ + { "target": "SubnetId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Instance", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Instances[].InstanceId" } + ], + "path": "Instances[]" + } + }, + "CreateNetworkInterface": { + "request": { + "operation": "CreateNetworkInterface", + "params": [ + { "target": "SubnetId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "NetworkInterface", + "identifiers": [ + { "target": "Id", "source": "response", "path": "NetworkInterface.NetworkInterfaceId" } + ], + "path": "NetworkInterface" + } + }, + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "identifier", "name": "Id" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteSubnet", + "params": [ + { "target": "SubnetId", "source": "identifier", "name": "Id" } + ] + } + } + }, + "has": { + "Vpc": { + "resource": { + "type": "Vpc", + "identifiers": [ + { "target": "Id", "source": "data", "path": "VpcId" } + ] + } + } + }, + "hasMany": { + "Instances": { + "request": { + "operation": "DescribeInstances", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "subnet-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Instance", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Reservations[].Instances[].InstanceId" } + ], + "path": "Reservations[].Instances[]" + } + }, + "NetworkInterfaces": { + "request": { + "operation": "DescribeNetworkInterfaces", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "subnet-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "NetworkInterface", + "identifiers": [ + { "target": "Id", "source": "response", "path": "NetworkInterfaces[].NetworkInterfaceId" } + ], + "path": "NetworkInterfaces[]" + } + } + } + }, + "Tag": { + "identifiers": [ + { + "name": "ResourceId", + "memberName": "ResourceId" + }, + { + "name": "Key", + "memberName": "Key" + }, + { + "name": "Value", + "memberName": "Value" + } + ], + "shape": "TagDescription", + "load": { + "request": { + "operation": "DescribeTags", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "key" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Key" }, + { "target": "Filters[1].Name", "source": "string", "value": "value" }, + { "target": "Filters[1].Values[0]", "source": "identifier", "name": "Value" } + ] + }, + "path": "Tags[0]" + }, + "actions": { + "Delete": { + "request": { + "operation": "DeleteTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "ResourceId" }, + { "target": "Tags[0].Key", "source": "identifier", "name": "Key" }, + { "target": "Tags[0].Value", "source": "identifier", "name": "Value" } + ] + } + } + }, + "batchActions": { + "Delete": { + "request": { + "operation": "DeleteTags", + "params": [ + { "target": "Resources[]", "source": "identifier", "name": "ResourceId" }, + { "target": "Tags[*].Key", "source": "identifier", "name": "Key" }, + { "target": "Tags[*].Value", "source": "identifier", "name": "Value" } + ] + } + } + } + }, + "Volume": { + "identifiers": [ + { + "name": "Id", + "memberName": "VolumeId" + } + ], + "shape": "Volume", + "load": { + "request": { + "operation": "DescribeVolumes", + "params": [ + { "target": "VolumeIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "Volumes[0]" + }, + "actions": { + "AttachToInstance": { + "request": { + "operation": "AttachVolume", + "params": [ + { "target": "VolumeId", "source": "identifier", "name": "Id" } + ] + } + }, + "CreateSnapshot": { + "request": { + "operation": "CreateSnapshot", + "params": [ + { "target": "VolumeId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Snapshot", + "identifiers": [ + { "target": "Id", "source": "response", "path": "SnapshotId" } + ], + "path": "@" + } + }, + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "identifier", "name": "Id" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteVolume", + "params": [ + { "target": "VolumeId", "source": "identifier", "name": "Id" } + ] + } + }, + "DescribeAttribute": { + "request": { + "operation": "DescribeVolumeAttribute", + "params": [ + { "target": "VolumeId", "source": "identifier", "name": "Id" } + ] + } + }, + "DescribeStatus": { + "request": { + "operation": "DescribeVolumeStatus", + "params": [ + { "target": "VolumeIds[0]", "source": "identifier", "name": "Id" } + ] + } + }, + "DetachFromInstance": { + "request": { + "operation": "DetachVolume", + "params": [ + { "target": "VolumeId", "source": "identifier", "name": "Id" } + ] + } + }, + "EnableIo": { + "request": { + "operation": "EnableVolumeIO", + "params": [ + { "target": "VolumeId", "source": "identifier", "name": "Id" } + ] + } + }, + "ModifyAttribute": { + "request": { + "operation": "ModifyVolumeAttribute", + "params": [ + { "target": "VolumeId", "source": "identifier", "name": "Id" } + ] + } + } + }, + "hasMany": { + "Snapshots": { + "request": { + "operation": "DescribeSnapshots", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "volume-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Snapshot", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Snapshots[].SnapshotId" } + ], + "path": "Snapshots[]" + } + } + } + }, + "Vpc": { + "identifiers": [ + { + "name": "Id", + "memberName": "VpcId" + } + ], + "shape": "Vpc", + "load": { + "request": { + "operation": "DescribeVpcs", + "params": [ + { "target": "VpcIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "Vpcs[0]" + }, + "actions": { + "AssociateDhcpOptions": { + "request": { + "operation": "AssociateDhcpOptions", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + } + }, + "AttachClassicLinkInstance": { + "request": { + "operation": "AttachClassicLinkVpc", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + } + }, + "AttachInternetGateway": { + "request": { + "operation": "AttachInternetGateway", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + } + }, + "CreateNetworkAcl": { + "request": { + "operation": "CreateNetworkAcl", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "NetworkAcl", + "identifiers": [ + { "target": "Id", "source": "response", "path": "NetworkAcl.NetworkAclId" } + ], + "path": "NetworkAcl" + } + }, + "CreateRouteTable": { + "request": { + "operation": "CreateRouteTable", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "RouteTable", + "identifiers": [ + { "target": "Id", "source": "response", "path": "RouteTable.RouteTableId" } + ], + "path": "RouteTable" + } + }, + "CreateSecurityGroup": { + "request": { + "operation": "CreateSecurityGroup", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "SecurityGroup", + "identifiers": [ + { "target": "Id", "source": "response", "path": "GroupId" } + ] + } + }, + "CreateSubnet": { + "request": { + "operation": "CreateSubnet", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Subnet", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Subnet.SubnetId" } + ], + "path": "Subnet" + } + }, + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "identifier", "name": "Id" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteVpc", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + } + }, + "DescribeAttribute": { + "request": { + "operation": "DescribeVpcAttribute", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + } + }, + "DetachClassicLinkInstance": { + "request": { + "operation": "DetachClassicLinkVpc", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + } + }, + "DetachInternetGateway": { + "request": { + "operation": "DetachInternetGateway", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + } + }, + "DisableClassicLink": { + "request": { + "operation": "DisableVpcClassicLink", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + } + }, + "EnableClassicLink": { + "request": { + "operation": "EnableVpcClassicLink", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + } + }, + "ModifyAttribute": { + "request": { + "operation": "ModifyVpcAttribute", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + } + }, + "RequestVpcPeeringConnection": { + "request": { + "operation": "CreateVpcPeeringConnection", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "VpcPeeringConnection", + "identifiers": [ + { "target": "Id", "source": "response", "path": "VpcPeeringConnection.VpcPeeringConnectionId" } + ], + "path": "VpcPeeringConnection" + } + } + }, + "has": { + "DhcpOptions": { + "resource": { + "type": "DhcpOptions", + "identifiers": [ + { "target": "Id", "source": "data", "path": "DhcpOptionsId" } + ] + } + } + }, + "hasMany": { + "AcceptedVpcPeeringConnections": { + "request": { + "operation": "DescribeVpcPeeringConnections", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "accepter-vpc-info.vpc-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "VpcPeeringConnection", + "identifiers": [ + { "target": "Id", "source": "response", "path": "VpcPeeringConnections[].VpcPeeringConnectionId" } + ], + "path": "VpcPeeringConnections[]" + } + }, + "Instances": { + "request": { + "operation": "DescribeInstances", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Instance", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Reservations[].Instances[].InstanceId" } + ], + "path": "Reservations[].Instances[]" + } + }, + "InternetGateways": { + "request": { + "operation": "DescribeInternetGateways", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "attachment.vpc-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "InternetGateway", + "identifiers": [ + { "target": "Id", "source": "response", "path": "InternetGateways[].InternetGatewayId" } + ], + "path": "InternetGateways[]" + } + }, + "NetworkAcls": { + "request": { + "operation": "DescribeNetworkAcls", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "NetworkAcl", + "identifiers": [ + { "target": "Id", "source": "response", "path": "NetworkAcls[].NetworkAclId" } + ], + "path": "NetworkAcls[]" + } + }, + "NetworkInterfaces": { + "request": { + "operation": "DescribeNetworkInterfaces", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "NetworkInterface", + "identifiers": [ + { "target": "Id", "source": "response", "path": "NetworkInterfaces[].NetworkInterfaceId" } + ], + "path": "NetworkInterfaces[]" + } + }, + "RequestedVpcPeeringConnections": { + "request": { + "operation": "DescribeVpcPeeringConnections", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "requester-vpc-info.vpc-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "VpcPeeringConnection", + "identifiers": [ + { "target": "Id", "source": "response", "path": "VpcPeeringConnections[].VpcPeeringConnectionId" } + ], + "path": "VpcPeeringConnections[]" + } + }, + "RouteTables": { + "request": { + "operation": "DescribeRouteTables", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "RouteTable", + "identifiers": [ + { "target": "Id", "source": "response", "path": "RouteTables[].RouteTableId" } + ], + "path": "RouteTables[]" + } + }, + "SecurityGroups": { + "request": { + "operation": "DescribeSecurityGroups", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "SecurityGroup", + "identifiers": [ + { "target": "Id", "source": "response", "path": "SecurityGroups[].GroupId" } + ], + "path": "SecurityGroups[]" + } + }, + "Subnets": { + "request": { + "operation": "DescribeSubnets", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Subnet", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Subnets[].SubnetId" } + ], + "path": "Subnets[]" + } + } + } + }, + "VpcPeeringConnection": { + "identifiers": [ + { + "name": "Id", + "memberName": "VpcPeeringConnectionId" + } + ], + "shape": "VpcPeeringConnection", + "load": { + "request": { + "operation": "DescribeVpcPeeringConnections", + "params": [ + { "target": "VpcPeeringConnectionIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "VpcPeeringConnections[0]" + }, + "actions": { + "Accept": { + "request": { + "operation": "AcceptVpcPeeringConnection", + "params": [ + { "target": "VpcPeeringConnectionId", "source": "identifier", "name": "Id" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteVpcPeeringConnection", + "params": [ + { "target": "VpcPeeringConnectionId", "source": "identifier", "name": "Id" } + ] + } + }, + "Reject": { + "request": { + "operation": "RejectVpcPeeringConnection", + "params": [ + { "target": "VpcPeeringConnectionId", "source": "identifier", "name": "Id" } + ] + } + } + }, + "has": { + "AccepterVpc": { + "resource": { + "type": "Vpc", + "identifiers": [ + { "target": "Id", "source": "data", "path": "AccepterVpcInfo.VpcId" } + ] + } + }, + "RequesterVpc": { + "resource": { + "type": "Vpc", + "identifiers": [ + { "target": "Id", "source": "data", "path": "RequesterVpcInfo.VpcId" } + ] + } + } + } + } + } +} diff --git a/boto3/data/ec2/2015-10-01/resources-1.json b/boto3/data/ec2/2015-10-01/resources-1.json new file mode 100644 index 0000000..b30bb12 --- /dev/null +++ b/boto3/data/ec2/2015-10-01/resources-1.json @@ -0,0 +1,2491 @@ +{ + "service": { + "actions": { + "CreateDhcpOptions": { + "request": { "operation": "CreateDhcpOptions" }, + "resource": { + "type": "DhcpOptions", + "identifiers": [ + { "target": "Id", "source": "response", "path": "DhcpOptions.DhcpOptionsId" } + ], + "path": "DhcpOptions" + } + }, + "CreateInstances": { + "request": { "operation": "RunInstances" }, + "resource": { + "type": "Instance", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Instances[].InstanceId" } + ], + "path": "Instances[]" + } + }, + "CreateInternetGateway": { + "request": { "operation": "CreateInternetGateway" }, + "resource": { + "type": "InternetGateway", + "identifiers": [ + { "target": "Id", "source": "response", "path": "InternetGateway.InternetGatewayId" } + ], + "path": "InternetGateway" + } + }, + "CreateKeyPair": { + "request": { "operation": "CreateKeyPair" }, + "resource": { + "type": "KeyPair", + "identifiers": [ + { "target": "Name", "source": "response", "path": "KeyName" } + ], + "path": "@" + } + }, + "CreateNetworkAcl": { + "request": { "operation": "CreateNetworkAcl" }, + "resource": { + "type": "NetworkAcl", + "identifiers": [ + { "target": "Id", "source": "response", "path": "NetworkAcl.NetworkAclId" } + ], + "path": "NetworkAcl" + } + }, + "CreateNetworkInterface": { + "request": { "operation": "CreateNetworkInterface" }, + "resource": { + "type": "NetworkInterface", + "identifiers": [ + { "target": "Id", "source": "response", "path": "NetworkInterface.NetworkInterfaceId" } + ], + "path": "NetworkInterface" + } + }, + "CreatePlacementGroup": { + "request": { "operation": "CreatePlacementGroup" }, + "resource": { + "type": "PlacementGroup", + "identifiers": [ + { "target": "Name", "source": "requestParameter", "path": "GroupName" } + ] + } + }, + "CreateRouteTable": { + "request": { "operation": "CreateRouteTable" }, + "resource": { + "type": "RouteTable", + "identifiers": [ + { "target": "Id", "source": "response", "path": "RouteTable.RouteTableId" } + ], + "path": "RouteTable" + } + }, + "CreateSecurityGroup": { + "request": { "operation": "CreateSecurityGroup" }, + "resource": { + "type": "SecurityGroup", + "identifiers": [ + { "target": "Id", "source": "response", "path": "GroupId" } + ] + } + }, + "CreateSnapshot": { + "request": { "operation": "CreateSnapshot" }, + "resource": { + "type": "Snapshot", + "identifiers": [ + { "target": "Id", "source": "response", "path": "SnapshotId" } + ], + "path": "@" + } + }, + "CreateSubnet": { + "request": { "operation": "CreateSubnet" }, + "resource": { + "type": "Subnet", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Subnet.SubnetId" } + ], + "path": "Subnet" + } + }, + "CreateTags": { + "request": { "operation": "CreateTags" } + }, + "CreateVolume": { + "request": { "operation": "CreateVolume" }, + "resource": { + "type": "Volume", + "identifiers": [ + { "target": "Id", "source": "response", "path": "VolumeId" } + ], + "path": "@" + } + }, + "CreateVpc": { + "request": { "operation": "CreateVpc" }, + "resource": { + "type": "Vpc", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Vpc.VpcId" } + ], + "path": "Vpc" + } + }, + "CreateVpcPeeringConnection": { + "request": { "operation": "CreateVpcPeeringConnection" }, + "resource": { + "type": "VpcPeeringConnection", + "identifiers": [ + { "target": "Id", "source": "response", "path": "VpcPeeringConnection.VpcPeeringConnectionId" } + ], + "path": "VpcPeeringConnection" + } + }, + "DisassociateRouteTable": { + "request": { "operation": "DisassociateRouteTable" } + }, + "ImportKeyPair": { + "request": { "operation": "ImportKeyPair" }, + "resource": { + "type": "KeyPairInfo", + "identifiers": [ + { "target": "Name", "source": "response", "path": "KeyName" } + ] + } + }, + "RegisterImage": { + "request": { "operation": "RegisterImage" }, + "resource": { + "type": "Image", + "identifiers": [ + { "target": "Id", "source": "response", "path": "ImageId" } + ] + } + } + }, + "has": { + "DhcpOptions": { + "resource": { + "type": "DhcpOptions", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "Image": { + "resource": { + "type": "Image", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "Instance": { + "resource": { + "type": "Instance", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "InternetGateway": { + "resource": { + "type": "InternetGateway", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "KeyPair": { + "resource": { + "type": "KeyPairInfo", + "identifiers": [ + { "target": "Name", "source": "input" } + ] + } + }, + "NetworkAcl": { + "resource": { + "type": "NetworkAcl", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "NetworkInterface": { + "resource": { + "type": "NetworkInterface", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "PlacementGroup": { + "resource": { + "type": "PlacementGroup", + "identifiers": [ + { "target": "Name", "source": "input" } + ] + } + }, + "RouteTable": { + "resource": { + "type": "RouteTable", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "RouteTableAssociation": { + "resource": { + "type": "RouteTableAssociation", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "SecurityGroup": { + "resource": { + "type": "SecurityGroup", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "Snapshot": { + "resource": { + "type": "Snapshot", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "Subnet": { + "resource": { + "type": "Subnet", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "Volume": { + "resource": { + "type": "Volume", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "Vpc": { + "resource": { + "type": "Vpc", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "VpcPeeringConnection": { + "resource": { + "type": "VpcPeeringConnection", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + } + }, + "hasMany": { + "ClassicAddresses": { + "request": { + "operation": "DescribeAddresses", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "domain" }, + { "target": "Filters[0].Values[0]", "source": "string", "value": "standard" } + ] + }, + "resource": { + "type": "ClassicAddress", + "identifiers": [ + { "target": "PublicIp", "source": "response", "path": "Addresses[].PublicIp" } + ], + "path": "Addresses[]" + } + }, + "DhcpOptionsSets": { + "request": { "operation": "DescribeDhcpOptions" }, + "resource": { + "type": "DhcpOptions", + "identifiers": [ + { "target": "Id", "source": "response", "path": "DhcpOptions[].DhcpOptionsId" } + ], + "path": "DhcpOptions[]" + } + }, + "Images": { + "request": { "operation": "DescribeImages" }, + "resource": { + "type": "Image", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Images[].ImageId" } + ], + "path": "Images[]" + } + }, + "Instances": { + "request": { "operation": "DescribeInstances" }, + "resource": { + "type": "Instance", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Reservations[].Instances[].InstanceId" } + ], + "path": "Reservations[].Instances[]" + } + }, + "InternetGateways": { + "request": { "operation": "DescribeInternetGateways" }, + "resource": { + "type": "InternetGateway", + "identifiers": [ + { "target": "Id", "source": "response", "path": "InternetGateways[].InternetGatewayId" } + ], + "path": "InternetGateways[]" + } + }, + "KeyPairs": { + "request": { "operation": "DescribeKeyPairs" }, + "resource": { + "type": "KeyPairInfo", + "identifiers": [ + { "target": "Name", "source": "response", "path": "KeyPairs[].KeyName" } + ], + "path": "KeyPairs[]" + } + }, + "NetworkAcls": { + "request": { "operation": "DescribeNetworkAcls" }, + "resource": { + "type": "NetworkAcl", + "identifiers": [ + { "target": "Id", "source": "response", "path": "NetworkAcls[].NetworkAclId" } + ], + "path": "NetworkAcls[]" + } + }, + "NetworkInterfaces": { + "request": { "operation": "DescribeNetworkInterfaces" }, + "resource": { + "type": "NetworkInterface", + "identifiers": [ + { "target": "Id", "source": "response", "path": "NetworkInterfaces[].NetworkInterfaceId" } + ], + "path": "NetworkInterfaces[]" + } + }, + "PlacementGroups": { + "request": { "operation": "DescribePlacementGroups" }, + "resource": { + "type": "PlacementGroup", + "identifiers": [ + { "target": "Name", "source": "response", "path": "PlacementGroups[].GroupName" } + ], + "path": "PlacementGroups[]" + } + }, + "RouteTables": { + "request": { "operation": "DescribeRouteTables" }, + "resource": { + "type": "RouteTable", + "identifiers": [ + { "target": "Id", "source": "response", "path": "RouteTables[].RouteTableId" } + ], + "path": "RouteTables[]" + } + }, + "SecurityGroups": { + "request": { "operation": "DescribeSecurityGroups" }, + "resource": { + "type": "SecurityGroup", + "identifiers": [ + { "target": "Id", "source": "response", "path": "SecurityGroups[].GroupId" } + ], + "path": "SecurityGroups[]" + } + }, + "Snapshots": { + "request": { "operation": "DescribeSnapshots" }, + "resource": { + "type": "Snapshot", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Snapshots[].SnapshotId" } + ], + "path": "Snapshots[]" + } + }, + "Subnets": { + "request": { "operation": "DescribeSubnets" }, + "resource": { + "type": "Subnet", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Subnets[].SubnetId" } + ], + "path": "Subnets[]" + } + }, + "Volumes": { + "request": { "operation": "DescribeVolumes" }, + "resource": { + "type": "Volume", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Volumes[].VolumeId" } + ], + "path": "Volumes[]" + } + }, + "VpcAddresses": { + "request": { + "operation": "DescribeAddresses", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "domain" }, + { "target": "Filters[0].Values[0]", "source": "string", "value": "vpc" } + ] + }, + "resource": { + "type": "VpcAddress", + "identifiers": [ + { "target": "AllocationId", "source": "response", "path": "Addresses[].AllocationId" } + ], + "path": "Addresses[]" + } + }, + "VpcPeeringConnections": { + "request": { "operation": "DescribeVpcPeeringConnections" }, + "resource": { + "type": "VpcPeeringConnection", + "identifiers": [ + { "target": "Id", "source": "response", "path": "VpcPeeringConnections[].VpcPeeringConnectionId" } + ], + "path": "VpcPeeringConnections[]" + } + }, + "Vpcs": { + "request": { "operation": "DescribeVpcs" }, + "resource": { + "type": "Vpc", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Vpcs[].VpcId" } + ], + "path": "Vpcs[]" + } + } + } + }, + "resources": { + "ClassicAddress": { + "identifiers": [ + { + "name": "PublicIp" + } + ], + "shape": "Address", + "load": { + "request": { + "operation": "DescribeAddresses", + "params": [ + { "target": "PublicIp", "source": "identifier", "name": "PublicIp" } + ] + }, + "path": "Addresses[0]" + }, + "actions": { + "Associate": { + "request": { + "operation": "AssociateAddress", + "params": [ + { "target": "PublicIp", "source": "identifier", "name": "PublicIp" } + ] + } + }, + "Disassociate": { + "request": { + "operation": "DisassociateAddress", + "params": [ + { "target": "PublicIp", "source": "data", "path": "PublicIp" } + ] + } + }, + "Release": { + "request": { + "operation": "ReleaseAddress", + "params": [ + { "target": "PublicIp", "source": "data", "path": "PublicIp" } + ] + } + } + } + }, + "DhcpOptions": { + "identifiers": [ + { + "name": "Id", + "memberName": "DhcpOptionsId" + } + ], + "shape": "DhcpOptions", + "load": { + "request": { + "operation": "DescribeDhcpOptions", + "params": [ + { "target": "DhcpOptionsIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "DhcpOptions[0]" + }, + "actions": { + "AssociateWithVpc": { + "request": { + "operation": "AssociateDhcpOptions", + "params": [ + { "target": "DhcpOptionsId", "source": "identifier", "name": "Id" } + ] + } + }, + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "identifier", "name": "Id" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteDhcpOptions", + "params": [ + { "target": "DhcpOptionsId", "source": "identifier", "name": "Id" } + ] + } + } + } + }, + "Image": { + "identifiers": [ + { + "name": "Id", + "memberName": "ImageId" + } + ], + "shape": "Image", + "load": { + "request": { + "operation": "DescribeImages", + "params": [ + { "target": "ImageIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "Images[0]" + }, + "actions": { + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "identifier", "name": "Id" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "Deregister": { + "request": { + "operation": "DeregisterImage", + "params": [ + { "target": "ImageId", "source": "identifier", "name": "Id" } + ] + } + }, + "DescribeAttribute": { + "request": { + "operation": "DescribeImageAttribute", + "params": [ + { "target": "ImageId", "source": "identifier", "name": "Id" } + ] + } + }, + "ModifyAttribute": { + "request": { + "operation": "ModifyImageAttribute", + "params": [ + { "target": "ImageId", "source": "identifier", "name": "Id" } + ] + } + }, + "ResetAttribute": { + "request": { + "operation": "ResetImageAttribute", + "params": [ + { "target": "ImageId", "source": "identifier", "name": "Id" } + ] + } + } + } + }, + "Instance": { + "identifiers": [ + { + "name": "Id", + "memberName": "InstanceId" + } + ], + "shape": "Instance", + "load": { + "request": { + "operation": "DescribeInstances", + "params": [ + { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "Reservations[0].Instances[0]" + }, + "actions": { + "AttachClassicLinkVpc": { + "request": { + "operation": "AttachClassicLinkVpc", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" } + ] + } + }, + "AttachVolume": { + "request": { + "operation": "AttachVolume", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" } + ] + } + }, + "ConsoleOutput": { + "request": { + "operation": "GetConsoleOutput", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" } + ] + } + }, + "CreateImage": { + "request": { + "operation": "CreateImage", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Image", + "identifiers": [ + { "target": "Id", "source": "response", "path": "ImageId" } + ] + } + }, + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "identifier", "name": "Id" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "DescribeAttribute": { + "request": { + "operation": "DescribeInstanceAttribute", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" } + ] + } + }, + "DetachClassicLinkVpc": { + "request": { + "operation": "DetachClassicLinkVpc", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" } + ] + } + }, + "DetachVolume": { + "request": { + "operation": "DetachVolume", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" } + ] + } + }, + "ModifyAttribute": { + "request": { + "operation": "ModifyInstanceAttribute", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" } + ] + } + }, + "Monitor": { + "request": { + "operation": "MonitorInstances", + "params": [ + { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } + ] + } + }, + "PasswordData": { + "request": { + "operation": "GetPasswordData", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" } + ] + } + }, + "Reboot": { + "request": { + "operation": "RebootInstances", + "params": [ + { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } + ] + } + }, + "ReportStatus": { + "request": { + "operation": "ReportInstanceStatus", + "params": [ + { "target": "Instances[0]", "source": "identifier", "name": "Id" } + ] + } + }, + "ResetAttribute": { + "request": { + "operation": "ResetInstanceAttribute", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" } + ] + } + }, + "ResetKernel": { + "request": { + "operation": "ResetInstanceAttribute", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" }, + { "target": "Attribute", "source": "string", "value": "kernel" } + ] + } + }, + "ResetRamdisk": { + "request": { + "operation": "ResetInstanceAttribute", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" }, + { "target": "Attribute", "source": "string", "value": "ramdisk" } + ] + } + }, + "ResetSourceDestCheck": { + "request": { + "operation": "ResetInstanceAttribute", + "params": [ + { "target": "InstanceId", "source": "identifier", "name": "Id" }, + { "target": "Attribute", "source": "string", "value": "sourceDestCheck" } + ] + } + }, + "Start": { + "request": { + "operation": "StartInstances", + "params": [ + { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } + ] + } + }, + "Stop": { + "request": { + "operation": "StopInstances", + "params": [ + { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } + ] + } + }, + "Terminate": { + "request": { + "operation": "TerminateInstances", + "params": [ + { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } + ] + } + }, + "Unmonitor": { + "request": { + "operation": "UnmonitorInstances", + "params": [ + { "target": "InstanceIds[0]", "source": "identifier", "name": "Id" } + ] + } + } + }, + "batchActions": { + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[]", "source": "identifier", "name": "Id" } + ] + } + }, + "Monitor": { + "request": { + "operation": "MonitorInstances", + "params": [ + { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } + ] + } + }, + "Reboot": { + "request": { + "operation": "RebootInstances", + "params": [ + { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } + ] + } + }, + "Start": { + "request": { + "operation": "StartInstances", + "params": [ + { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } + ] + } + }, + "Stop": { + "request": { + "operation": "StopInstances", + "params": [ + { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } + ] + } + }, + "Terminate": { + "request": { + "operation": "TerminateInstances", + "params": [ + { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } + ] + } + }, + "Unmonitor": { + "request": { + "operation": "UnmonitorInstances", + "params": [ + { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } + ] + } + } + }, + "waiters": { + "Exists": { + "waiterName": "InstanceExists", + "params": [ + { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } + ], + "path": "Reservations[0].Instances[0]" + }, + "Running": { + "waiterName": "InstanceRunning", + "params": [ + { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } + ], + "path": "Reservations[0].Instances[0]" + }, + "Stopped": { + "waiterName": "InstanceStopped", + "params": [ + { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } + ], + "path": "Reservations[0].Instances[0]" + }, + "Terminated": { + "waiterName": "InstanceTerminated", + "params": [ + { "target": "InstanceIds[]", "source": "identifier", "name": "Id" } + ], + "path": "Reservations[0].Instances[0]" + } + }, + "has": { + "ClassicAddress": { + "resource": { + "type": "ClassicAddress", + "identifiers": [ + { "target": "PublicIp", "source": "data", "path": "PublicIpAddress" } + ] + } + }, + "Image": { + "resource": { + "type": "Image", + "identifiers": [ + { "target": "Id", "source": "data", "path": "ImageId" } + ] + } + }, + "KeyPair": { + "resource": { + "type": "KeyPairInfo", + "identifiers": [ + { "target": "Name", "source": "data", "path": "KeyName" } + ] + } + }, + "NetworkInterfaces": { + "resource": { + "type": "NetworkInterface", + "identifiers": [ + { "target": "Id", "source": "data", "path": "NetworkInterfaces[].NetworkInterfaceId" } + ], + "path": "NetworkInterfaces[]" + } + }, + "PlacementGroup": { + "resource": { + "type": "PlacementGroup", + "identifiers": [ + { "target": "Name", "source": "data", "path": "Placement.GroupName" } + ] + } + }, + "Subnet": { + "resource": { + "type": "Subnet", + "identifiers": [ + { "target": "Id", "source": "data", "path": "SubnetId" } + ] + } + }, + "Vpc": { + "resource": { + "type": "Vpc", + "identifiers": [ + { "target": "Id", "source": "data", "path": "VpcId" } + ] + } + } + }, + "hasMany": { + "Volumes": { + "request": { + "operation": "DescribeVolumes", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "attachment.instance-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Volume", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Volumes[].VolumeId" } + ], + "path": "Volumes[]" + } + }, + "VpcAddresses": { + "request": { + "operation": "DescribeAddresses", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "instance-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "VpcAddress", + "identifiers": [ + { "target": "AllocationId", "source": "response", "path": "Addresses[].AllocationId" } + ], + "path": "Addresses[]" + } + } + } + }, + "InternetGateway": { + "identifiers": [ + { + "name": "Id", + "memberName": "InternetGatewayId" + } + ], + "shape": "InternetGateway", + "load": { + "request": { + "operation": "DescribeInternetGateways", + "params": [ + { "target": "InternetGatewayIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "InternetGateways[0]" + }, + "actions": { + "AttachToVpc": { + "request": { + "operation": "AttachInternetGateway", + "params": [ + { "target": "InternetGatewayId", "source": "identifier", "name": "Id" } + ] + } + }, + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "identifier", "name": "Id" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteInternetGateway", + "params": [ + { "target": "InternetGatewayId", "source": "identifier", "name": "Id" } + ] + } + }, + "DetachFromVpc": { + "request": { + "operation": "DetachInternetGateway", + "params": [ + { "target": "InternetGatewayId", "source": "identifier", "name": "Id" } + ] + } + } + } + }, + "KeyPair": { + "identifiers": [ + { + "name": "Name", + "memberName": "KeyName" + } + ], + "shape": "KeyPair", + "actions": { + "Delete": { + "request": { + "operation": "DeleteKeyPair", + "params": [ + { "target": "KeyName", "source": "identifier", "name": "Name" } + ] + } + } + } + }, + "KeyPairInfo": { + "identifiers": [ + { + "name": "Name", + "memberName": "KeyName" + } + ], + "shape": "KeyPairInfo", + "load": { + "request": { + "operation": "DescribeKeyPairs", + "params": [ + { "target": "KeyNames[0]", "source": "identifier", "name": "Name" } + ] + }, + "path": "KeyPairs[0]" + }, + "actions": { + "Delete": { + "request": { + "operation": "DeleteKeyPair", + "params": [ + { "target": "KeyName", "source": "identifier", "name": "Name" } + ] + } + } + } + }, + "NetworkAcl": { + "identifiers": [ + { + "name": "Id", + "memberName": "NetworkAclId" + } + ], + "shape": "NetworkAcl", + "load": { + "request": { + "operation": "DescribeNetworkAcls", + "params": [ + { "target": "NetworkAclIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "NetworkAcls[0]" + }, + "actions": { + "CreateEntry": { + "request": { + "operation": "CreateNetworkAclEntry", + "params": [ + { "target": "NetworkAclId", "source": "identifier", "name": "Id" } + ] + } + }, + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "identifier", "name": "Id" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteNetworkAcl", + "params": [ + { "target": "NetworkAclId", "source": "identifier", "name": "Id" } + ] + } + }, + "DeleteEntry": { + "request": { + "operation": "DeleteNetworkAclEntry", + "params": [ + { "target": "NetworkAclId", "source": "identifier", "name": "Id" } + ] + } + }, + "ReplaceAssociation": { + "request": { + "operation": "ReplaceNetworkAclAssociation", + "params": [ + { "target": "NetworkAclId", "source": "identifier", "name": "Id" } + ] + } + }, + "ReplaceEntry": { + "request": { + "operation": "ReplaceNetworkAclEntry", + "params": [ + { "target": "NetworkAclId", "source": "identifier", "name": "Id" } + ] + } + } + }, + "has": { + "Vpc": { + "resource": { + "type": "Vpc", + "identifiers": [ + { "target": "Id", "source": "data", "path": "VpcId" } + ] + } + } + } + }, + "NetworkInterface": { + "identifiers": [ + { + "name": "Id", + "memberName": "NetworkInterfaceId" + } + ], + "shape": "NetworkInterface", + "load": { + "request": { + "operation": "DescribeNetworkInterfaces", + "params": [ + { "target": "NetworkInterfaceIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "NetworkInterfaces[0]" + }, + "actions": { + "AssignPrivateIpAddresses": { + "request": { + "operation": "AssignPrivateIpAddresses", + "params": [ + { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } + ] + } + }, + "Attach": { + "request": { + "operation": "AttachNetworkInterface", + "params": [ + { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } + ] + } + }, + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "identifier", "name": "Id" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteNetworkInterface", + "params": [ + { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } + ] + } + }, + "DescribeAttribute": { + "request": { + "operation": "DescribeNetworkInterfaceAttribute", + "params": [ + { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } + ] + } + }, + "Detach": { + "request": { + "operation": "DetachNetworkInterface", + "params": [ + { "target": "AttachmentId", "source": "data", "path": "Attachment.AttachmentId" } + ] + } + }, + "ModifyAttribute": { + "request": { + "operation": "ModifyNetworkInterfaceAttribute", + "params": [ + { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } + ] + } + }, + "ResetAttribute": { + "request": { + "operation": "ResetNetworkInterfaceAttribute", + "params": [ + { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } + ] + } + }, + "UnassignPrivateIpAddresses": { + "request": { + "operation": "UnassignPrivateIpAddresses", + "params": [ + { "target": "NetworkInterfaceId", "source": "identifier", "name": "Id" } + ] + } + } + }, + "has": { + "Association": { + "resource": { + "type": "NetworkInterfaceAssociation", + "identifiers": [ + { "target": "Id", "source": "data", "path": "Association.AssociationId" } + ], + "path": "Association" + } + }, + "Subnet": { + "resource": { + "type": "Subnet", + "identifiers": [ + { "target": "Id", "source": "data", "path": "SubnetId" } + ] + } + }, + "Vpc": { + "resource": { + "type": "Vpc", + "identifiers": [ + { "target": "Id", "source": "data", "path": "VpcId" } + ] + } + } + } + }, + "NetworkInterfaceAssociation": { + "identifiers": [ + { + "name": "Id" + } + ], + "shape": "InstanceNetworkInterfaceAssociation", + "actions": { + "Delete": { + "request": { + "operation": "DisassociateAddress", + "params": [ + { "target": "AssociationId", "source": "identifier", "name": "Id" } + ] + } + } + }, + "has": { + "Address": { + "resource": { + "type": "VpcAddress", + "identifiers": [ + { "target": "AllocationId", "source": "data", "path": "AllocationId" } + ] + } + } + } + }, + "PlacementGroup": { + "identifiers": [ + { + "name": "Name", + "memberName": "GroupName" + } + ], + "shape": "PlacementGroup", + "load": { + "request": { + "operation": "DescribePlacementGroups", + "params": [ + { "target": "GroupNames[0]", "source": "identifier", "name": "Name" } + ] + }, + "path": "PlacementGroups[0]" + }, + "actions": { + "Delete": { + "request": { + "operation": "DeletePlacementGroup", + "params": [ + { "target": "GroupName", "source": "identifier", "name": "Name" } + ] + } + } + }, + "hasMany": { + "Instances": { + "request": { + "operation": "DescribeInstances", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "placement-group-name" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Name" } + ] + }, + "resource": { + "type": "Instance", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Reservations[].Instances[].InstanceId" } + ], + "path": "Reservations[].Instances[]" + } + } + } + }, + "RouteTable": { + "identifiers": [ + { + "name": "Id", + "memberName": "RouteTableId" + } + ], + "shape": "RouteTable", + "load": { + "request": { + "operation": "DescribeRouteTables", + "params": [ + { "target": "RouteTableIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "RouteTables[0]" + }, + "actions": { + "AssociateWithSubnet": { + "request": { + "operation": "AssociateRouteTable", + "params": [ + { "target": "RouteTableId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "RouteTableAssociation", + "identifiers": [ + { "target": "Id", "source": "response", "path": "AssociationId" } + ] + } + }, + "CreateRoute": { + "request": { + "operation": "CreateRoute", + "params": [ + { "target": "RouteTableId", "source": "identifier", "name": "Id" } + ] + } + }, + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "identifier", "name": "Id" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteRouteTable", + "params": [ + { "target": "RouteTableId", "source": "identifier", "name": "Id" } + ] + } + } + }, + "has": { + "Vpc": { + "resource": { + "type": "Vpc", + "identifiers": [ + { "target": "Id", "source": "data", "path": "VpcId" } + ] + } + } + }, + "hasMany": { + "Associations": { + "request": { + "operation": "DescribeRouteTables", + "params": [ + { "target": "RouteTableIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "RouteTableAssociation", + "identifiers": [ + { "target": "Id", "source": "response", "path": "RouteTables[0].Associations[].RouteTableAssociationId" } + ], + "path": "RouteTables[0].Associations[]" + } + } + } + }, + "RouteTableAssociation": { + "identifiers": [ + { + "name": "Id", + "memberName": "RouteTableAssociationId" + } + ], + "shape": "RouteTableAssociation", + "actions": { + "Delete": { + "request": { + "operation": "DisassociateRouteTable", + "params": [ + { "target": "AssociationId", "source": "identifier", "name": "Id" } + ] + } + }, + "ReplaceSubnet": { + "request": { + "operation": "ReplaceRouteTableAssociation", + "params": [ + { "target": "AssociationId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "RouteTableAssociation", + "identifiers": [ + { "target": "Id", "source": "response", "path": "NewAssociationId" } + ] + } + } + }, + "has": { + "RouteTable": { + "resource": { + "type": "RouteTable", + "identifiers": [ + { "target": "Id", "source": "data", "path": "RouteTableId" } + ] + } + }, + "Subnet": { + "resource": { + "type": "Subnet", + "identifiers": [ + { "target": "Id", "source": "data", "path": "SubnetId" } + ] + } + } + } + }, + "SecurityGroup": { + "identifiers": [ + { + "name": "Id", + "memberName": "GroupId" + } + ], + "shape": "SecurityGroup", + "load": { + "request": { + "operation": "DescribeSecurityGroups", + "params": [ + { "target": "GroupIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "SecurityGroups[0]" + }, + "actions": { + "AuthorizeEgress": { + "request": { + "operation": "AuthorizeSecurityGroupEgress", + "params": [ + { "target": "GroupId", "source": "identifier", "name": "Id" } + ] + } + }, + "AuthorizeIngress": { + "request": { + "operation": "AuthorizeSecurityGroupIngress", + "params": [ + { "target": "GroupId", "source": "identifier", "name": "Id" } + ] + } + }, + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "identifier", "name": "Id" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteSecurityGroup", + "params": [ + { "target": "GroupId", "source": "identifier", "name": "Id" } + ] + } + }, + "RevokeEgress": { + "request": { + "operation": "RevokeSecurityGroupEgress", + "params": [ + { "target": "GroupId", "source": "identifier", "name": "Id" } + ] + } + }, + "RevokeIngress": { + "request": { + "operation": "RevokeSecurityGroupIngress", + "params": [ + { "target": "GroupId", "source": "identifier", "name": "Id" } + ] + } + } + } + }, + "Snapshot": { + "identifiers": [ + { + "name": "Id", + "memberName": "SnapshotId" + } + ], + "shape": "Snapshot", + "load": { + "request": { + "operation": "DescribeSnapshots", + "params": [ + { "target": "SnapshotIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "Snapshots[0]" + }, + "actions": { + "Copy": { + "request": { + "operation": "CopySnapshot", + "params": [ + { "target": "SourceSnapshotId", "source": "identifier", "name": "Id" } + ] + } + }, + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "identifier", "name": "Id" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteSnapshot", + "params": [ + { "target": "SnapshotId", "source": "identifier", "name": "Id" } + ] + } + }, + "DescribeAttribute": { + "request": { + "operation": "DescribeSnapshotAttribute", + "params": [ + { "target": "SnapshotId", "source": "identifier", "name": "Id" } + ] + } + }, + "ModifyAttribute": { + "request": { + "operation": "ModifySnapshotAttribute", + "params": [ + { "target": "SnapshotId", "source": "identifier", "name": "Id" } + ] + } + }, + "ResetAttribute": { + "request": { + "operation": "ResetSnapshotAttribute", + "params": [ + { "target": "SnapshotId", "source": "identifier", "name": "Id" } + ] + } + } + }, + "waiters": { + "Completed": { + "waiterName": "SnapshotCompleted", + "params": [ + { "target": "SnapshotIds[]", "source": "identifier", "name": "Id" } + ], + "path": "Snapshots[]" + } + }, + "has": { + "Volume": { + "resource": { + "type": "Volume", + "identifiers": [ + { "target": "Id", "source": "data", "path": "VolumeId" } + ] + } + } + } + }, + "Subnet": { + "identifiers": [ + { + "name": "Id", + "memberName": "SubnetId" + } + ], + "shape": "Subnet", + "load": { + "request": { + "operation": "DescribeSubnets", + "params": [ + { "target": "SubnetIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "Subnets[0]" + }, + "actions": { + "CreateInstances": { + "request": { + "operation": "RunInstances", + "params": [ + { "target": "SubnetId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Instance", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Instances[].InstanceId" } + ], + "path": "Instances[]" + } + }, + "CreateNetworkInterface": { + "request": { + "operation": "CreateNetworkInterface", + "params": [ + { "target": "SubnetId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "NetworkInterface", + "identifiers": [ + { "target": "Id", "source": "response", "path": "NetworkInterface.NetworkInterfaceId" } + ], + "path": "NetworkInterface" + } + }, + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "identifier", "name": "Id" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteSubnet", + "params": [ + { "target": "SubnetId", "source": "identifier", "name": "Id" } + ] + } + } + }, + "has": { + "Vpc": { + "resource": { + "type": "Vpc", + "identifiers": [ + { "target": "Id", "source": "data", "path": "VpcId" } + ] + } + } + }, + "hasMany": { + "Instances": { + "request": { + "operation": "DescribeInstances", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "subnet-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Instance", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Reservations[].Instances[].InstanceId" } + ], + "path": "Reservations[].Instances[]" + } + }, + "NetworkInterfaces": { + "request": { + "operation": "DescribeNetworkInterfaces", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "subnet-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "NetworkInterface", + "identifiers": [ + { "target": "Id", "source": "response", "path": "NetworkInterfaces[].NetworkInterfaceId" } + ], + "path": "NetworkInterfaces[]" + } + } + } + }, + "Tag": { + "identifiers": [ + { + "name": "ResourceId", + "memberName": "ResourceId" + }, + { + "name": "Key", + "memberName": "Key" + }, + { + "name": "Value", + "memberName": "Value" + } + ], + "shape": "TagDescription", + "load": { + "request": { + "operation": "DescribeTags", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "key" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Key" }, + { "target": "Filters[1].Name", "source": "string", "value": "value" }, + { "target": "Filters[1].Values[0]", "source": "identifier", "name": "Value" } + ] + }, + "path": "Tags[0]" + }, + "actions": { + "Delete": { + "request": { + "operation": "DeleteTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "ResourceId" }, + { "target": "Tags[0].Key", "source": "identifier", "name": "Key" }, + { "target": "Tags[0].Value", "source": "identifier", "name": "Value" } + ] + } + } + }, + "batchActions": { + "Delete": { + "request": { + "operation": "DeleteTags", + "params": [ + { "target": "Resources[]", "source": "identifier", "name": "ResourceId" }, + { "target": "Tags[*].Key", "source": "identifier", "name": "Key" }, + { "target": "Tags[*].Value", "source": "identifier", "name": "Value" } + ] + } + } + } + }, + "Volume": { + "identifiers": [ + { + "name": "Id", + "memberName": "VolumeId" + } + ], + "shape": "Volume", + "load": { + "request": { + "operation": "DescribeVolumes", + "params": [ + { "target": "VolumeIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "Volumes[0]" + }, + "actions": { + "AttachToInstance": { + "request": { + "operation": "AttachVolume", + "params": [ + { "target": "VolumeId", "source": "identifier", "name": "Id" } + ] + } + }, + "CreateSnapshot": { + "request": { + "operation": "CreateSnapshot", + "params": [ + { "target": "VolumeId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Snapshot", + "identifiers": [ + { "target": "Id", "source": "response", "path": "SnapshotId" } + ], + "path": "@" + } + }, + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "identifier", "name": "Id" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteVolume", + "params": [ + { "target": "VolumeId", "source": "identifier", "name": "Id" } + ] + } + }, + "DescribeAttribute": { + "request": { + "operation": "DescribeVolumeAttribute", + "params": [ + { "target": "VolumeId", "source": "identifier", "name": "Id" } + ] + } + }, + "DescribeStatus": { + "request": { + "operation": "DescribeVolumeStatus", + "params": [ + { "target": "VolumeIds[0]", "source": "identifier", "name": "Id" } + ] + } + }, + "DetachFromInstance": { + "request": { + "operation": "DetachVolume", + "params": [ + { "target": "VolumeId", "source": "identifier", "name": "Id" } + ] + } + }, + "EnableIo": { + "request": { + "operation": "EnableVolumeIO", + "params": [ + { "target": "VolumeId", "source": "identifier", "name": "Id" } + ] + } + }, + "ModifyAttribute": { + "request": { + "operation": "ModifyVolumeAttribute", + "params": [ + { "target": "VolumeId", "source": "identifier", "name": "Id" } + ] + } + } + }, + "hasMany": { + "Snapshots": { + "request": { + "operation": "DescribeSnapshots", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "volume-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Snapshot", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Snapshots[].SnapshotId" } + ], + "path": "Snapshots[]" + } + } + } + }, + "Vpc": { + "identifiers": [ + { + "name": "Id", + "memberName": "VpcId" + } + ], + "shape": "Vpc", + "load": { + "request": { + "operation": "DescribeVpcs", + "params": [ + { "target": "VpcIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "Vpcs[0]" + }, + "actions": { + "AssociateDhcpOptions": { + "request": { + "operation": "AssociateDhcpOptions", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + } + }, + "AttachClassicLinkInstance": { + "request": { + "operation": "AttachClassicLinkVpc", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + } + }, + "AttachInternetGateway": { + "request": { + "operation": "AttachInternetGateway", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + } + }, + "CreateNetworkAcl": { + "request": { + "operation": "CreateNetworkAcl", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "NetworkAcl", + "identifiers": [ + { "target": "Id", "source": "response", "path": "NetworkAcl.NetworkAclId" } + ], + "path": "NetworkAcl" + } + }, + "CreateRouteTable": { + "request": { + "operation": "CreateRouteTable", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "RouteTable", + "identifiers": [ + { "target": "Id", "source": "response", "path": "RouteTable.RouteTableId" } + ], + "path": "RouteTable" + } + }, + "CreateSecurityGroup": { + "request": { + "operation": "CreateSecurityGroup", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "SecurityGroup", + "identifiers": [ + { "target": "Id", "source": "response", "path": "GroupId" } + ] + } + }, + "CreateSubnet": { + "request": { + "operation": "CreateSubnet", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Subnet", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Subnet.SubnetId" } + ], + "path": "Subnet" + } + }, + "CreateTags": { + "request": { + "operation": "CreateTags", + "params": [ + { "target": "Resources[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Tag", + "identifiers": [ + { "target": "ResourceId", "source": "identifier", "name": "Id" }, + { "target": "Key", "source": "requestParameter", "path": "Tags[].Key" }, + { "target": "Value", "source": "requestParameter", "path": "Tags[].Value" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteVpc", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + } + }, + "DescribeAttribute": { + "request": { + "operation": "DescribeVpcAttribute", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + } + }, + "DetachClassicLinkInstance": { + "request": { + "operation": "DetachClassicLinkVpc", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + } + }, + "DetachInternetGateway": { + "request": { + "operation": "DetachInternetGateway", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + } + }, + "DisableClassicLink": { + "request": { + "operation": "DisableVpcClassicLink", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + } + }, + "EnableClassicLink": { + "request": { + "operation": "EnableVpcClassicLink", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + } + }, + "ModifyAttribute": { + "request": { + "operation": "ModifyVpcAttribute", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + } + }, + "RequestVpcPeeringConnection": { + "request": { + "operation": "CreateVpcPeeringConnection", + "params": [ + { "target": "VpcId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "VpcPeeringConnection", + "identifiers": [ + { "target": "Id", "source": "response", "path": "VpcPeeringConnection.VpcPeeringConnectionId" } + ], + "path": "VpcPeeringConnection" + } + } + }, + "has": { + "DhcpOptions": { + "resource": { + "type": "DhcpOptions", + "identifiers": [ + { "target": "Id", "source": "data", "path": "DhcpOptionsId" } + ] + } + } + }, + "hasMany": { + "AcceptedVpcPeeringConnections": { + "request": { + "operation": "DescribeVpcPeeringConnections", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "accepter-vpc-info.vpc-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "VpcPeeringConnection", + "identifiers": [ + { "target": "Id", "source": "response", "path": "VpcPeeringConnections[].VpcPeeringConnectionId" } + ], + "path": "VpcPeeringConnections[]" + } + }, + "Instances": { + "request": { + "operation": "DescribeInstances", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Instance", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Reservations[].Instances[].InstanceId" } + ], + "path": "Reservations[].Instances[]" + } + }, + "InternetGateways": { + "request": { + "operation": "DescribeInternetGateways", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "attachment.vpc-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "InternetGateway", + "identifiers": [ + { "target": "Id", "source": "response", "path": "InternetGateways[].InternetGatewayId" } + ], + "path": "InternetGateways[]" + } + }, + "NetworkAcls": { + "request": { + "operation": "DescribeNetworkAcls", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "NetworkAcl", + "identifiers": [ + { "target": "Id", "source": "response", "path": "NetworkAcls[].NetworkAclId" } + ], + "path": "NetworkAcls[]" + } + }, + "NetworkInterfaces": { + "request": { + "operation": "DescribeNetworkInterfaces", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "NetworkInterface", + "identifiers": [ + { "target": "Id", "source": "response", "path": "NetworkInterfaces[].NetworkInterfaceId" } + ], + "path": "NetworkInterfaces[]" + } + }, + "RequestedVpcPeeringConnections": { + "request": { + "operation": "DescribeVpcPeeringConnections", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "requester-vpc-info.vpc-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "VpcPeeringConnection", + "identifiers": [ + { "target": "Id", "source": "response", "path": "VpcPeeringConnections[].VpcPeeringConnectionId" } + ], + "path": "VpcPeeringConnections[]" + } + }, + "RouteTables": { + "request": { + "operation": "DescribeRouteTables", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "RouteTable", + "identifiers": [ + { "target": "Id", "source": "response", "path": "RouteTables[].RouteTableId" } + ], + "path": "RouteTables[]" + } + }, + "SecurityGroups": { + "request": { + "operation": "DescribeSecurityGroups", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "SecurityGroup", + "identifiers": [ + { "target": "Id", "source": "response", "path": "SecurityGroups[].GroupId" } + ], + "path": "SecurityGroups[]" + } + }, + "Subnets": { + "request": { + "operation": "DescribeSubnets", + "params": [ + { "target": "Filters[0].Name", "source": "string", "value": "vpc-id" }, + { "target": "Filters[0].Values[0]", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Subnet", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Subnets[].SubnetId" } + ], + "path": "Subnets[]" + } + } + } + }, + "VpcPeeringConnection": { + "identifiers": [ + { + "name": "Id", + "memberName": "VpcPeeringConnectionId" + } + ], + "shape": "VpcPeeringConnection", + "load": { + "request": { + "operation": "DescribeVpcPeeringConnections", + "params": [ + { "target": "VpcPeeringConnectionIds[0]", "source": "identifier", "name": "Id" } + ] + }, + "path": "VpcPeeringConnections[0]" + }, + "actions": { + "Accept": { + "request": { + "operation": "AcceptVpcPeeringConnection", + "params": [ + { "target": "VpcPeeringConnectionId", "source": "identifier", "name": "Id" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteVpcPeeringConnection", + "params": [ + { "target": "VpcPeeringConnectionId", "source": "identifier", "name": "Id" } + ] + } + }, + "Reject": { + "request": { + "operation": "RejectVpcPeeringConnection", + "params": [ + { "target": "VpcPeeringConnectionId", "source": "identifier", "name": "Id" } + ] + } + } + }, + "has": { + "AccepterVpc": { + "resource": { + "type": "Vpc", + "identifiers": [ + { "target": "Id", "source": "data", "path": "AccepterVpcInfo.VpcId" } + ] + } + }, + "RequesterVpc": { + "resource": { + "type": "Vpc", + "identifiers": [ + { "target": "Id", "source": "data", "path": "RequesterVpcInfo.VpcId" } + ] + } + } + } + }, + "VpcAddress": { + "identifiers": [ + { + "name": "AllocationId" + } + ], + "shape": "Address", + "load": { + "request": { + "operation": "DescribeAddresses", + "params": [ + { "target": "AllocationIds[0]", "source": "identifier", "name": "AllocationId" } + ] + }, + "path": "Addresses[0]" + }, + "actions": { + "Associate": { + "request": { + "operation": "AssociateAddress", + "params": [ + { "target": "AllocationId", "source": "identifier", "name": "AllocationId" } + ] + } + }, + "Release": { + "request": { + "operation": "ReleaseAddress", + "params": [ + { "target": "AllocationId", "source": "data", "path": "AllocationId" } + ] + } + } + }, + "has": { + "Association": { + "resource": { + "type": "NetworkInterfaceAssociation", + "identifiers": [ + { "target": "Id", "source": "data", "path": "AssociationId" } + ] + } + } + } + } + } +} diff --git a/boto3/data/glacier/2012-06-01/resources-1.json b/boto3/data/glacier/2012-06-01/resources-1.json new file mode 100644 index 0000000..06267cf --- /dev/null +++ b/boto3/data/glacier/2012-06-01/resources-1.json @@ -0,0 +1,563 @@ +{ + "service": { + "actions": { + "CreateVault": { + "request": { + "operation": "CreateVault", + "params": [ + { "target": "accountId", "source": "string", "value": "-" } + ] + }, + "resource": { + "type": "Vault", + "identifiers": [ + { "target": "AccountId", "source": "requestParameter", "path": "accountId" }, + { "target": "Name", "source": "requestParameter", "path": "vaultName" } + ] + } + } + }, + "has": { + "Account": { + "resource": { + "type": "Account", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + } + }, + "hasMany": { + "Vaults": { + "request": { + "operation": "ListVaults", + "params": [ + { "target": "accountId", "source": "string", "value": "-" } + ] + }, + "resource": { + "type": "Vault", + "identifiers": [ + { "target": "AccountId", "source": "requestParameter", "path": "accountId" }, + { "target": "Name", "source": "response", "path": "VaultList[].VaultName" } + ], + "path": "VaultList[]" + } + } + } + }, + "resources": { + "Account": { + "identifiers": [ + { "name": "Id" } + ], + "actions": { + "CreateVault": { + "request": { + "operation": "CreateVault", + "params": [ + { "target": "accountId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Vault", + "identifiers": [ + { "target": "AccountId", "source": "identifier", "name": "Id" }, + { "target": "Name", "source": "requestParameter", "path": "vaultName" } + ] + } + } + }, + "has": { + "Vault": { + "resource": { + "type": "Vault", + "identifiers": [ + { "target": "AccountId", "source": "identifier", "name": "Id" }, + { "target": "Name", "source": "input" } + ] + } + } + }, + "hasMany": { + "Vaults": { + "request": { + "operation": "ListVaults", + "params": [ + { "target": "accountId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Vault", + "identifiers": [ + { "target": "AccountId", "source": "identifier", "name": "Id" }, + { "target": "Name", "source": "response", "path": "VaultList[].VaultName" } + ], + "path": "VaultList[]" + } + } + } + }, + "Archive": { + "identifiers": [ + { "name": "AccountId" }, + { "name": "VaultName" }, + { "name": "Id" } + ], + "actions": { + "Delete": { + "request": { + "operation": "DeleteArchive", + "params": [ + { "target": "accountId", "source": "identifier", "name": "AccountId" }, + { "target": "vaultName", "source": "identifier", "name": "VaultName" }, + { "target": "archiveId", "source": "identifier", "name": "Id" } + ] + } + }, + "InitiateArchiveRetrieval": { + "request": { + "operation": "InitiateJob", + "params": [ + { "target": "vaultName", "source": "identifier", "name": "VaultName" }, + { "target": "accountId", "source": "identifier", "name": "AccountId" }, + { "target": "jobParameters.Type", "source": "string", "value": "archive-retrieval" }, + { "target": "jobParameters.ArchiveId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Job", + "identifiers": [ + { "target": "Id", "source": "response", "path": "jobId" }, + { "target": "AccountId", "source": "identifier", "name": "AccountId" }, + { "target": "VaultName", "source": "identifier", "name": "VaultName" } + ] + } + } + }, + "has": { + "Vault": { + "resource": { + "type": "Vault", + "identifiers": [ + { "target": "AccountId", "source": "identifier", "name": "AccountId" }, + { "target": "Name", "source": "identifier", "name": "VaultName" } + ] + } + } + } + }, + "Job": { + "identifiers": [ + { "name": "AccountId" }, + { "name": "VaultName" }, + { + "name": "Id", + "memberName": "JobId" + } + ], + "shape": "GlacierJobDescription", + "load": { + "request": { + "operation": "DescribeJob", + "params": [ + { "target": "accountId", "source": "identifier", "name": "AccountId" }, + { "target": "vaultName", "source": "identifier", "name": "VaultName" }, + { "target": "jobId", "source": "identifier", "name": "Id" } + ] + }, + "path": "@" + }, + "actions": { + "GetOutput": { + "request": { + "operation": "GetJobOutput", + "params": [ + { "target": "accountId", "source": "identifier", "name": "AccountId" }, + { "target": "vaultName", "source": "identifier", "name": "VaultName" }, + { "target": "jobId", "source": "identifier", "name": "Id" } + ] + } + } + }, + "has": { + "Vault": { + "resource": { + "type": "Vault", + "identifiers": [ + { "target": "AccountId", "source": "identifier", "name": "AccountId" }, + { "target": "Name", "source": "identifier", "name": "VaultName" } + ] + } + } + } + }, + "MultipartUpload": { + "identifiers": [ + { "name": "AccountId" }, + { "name": "VaultName" }, + { + "name": "Id", + "memberName": "MultipartUploadId" + } + ], + "shape": "UploadListElement", + "actions": { + "Abort": { + "request": { + "operation": "AbortMultipartUpload", + "params": [ + { "target": "accountId", "source": "identifier", "name": "AccountId" }, + { "target": "vaultName", "source": "identifier", "name": "VaultName" }, + { "target": "uploadId", "source": "identifier", "name": "Id" } + ] + } + }, + "Complete": { + "request": { + "operation": "CompleteMultipartUpload", + "params": [ + { "target": "accountId", "source": "identifier", "name": "AccountId" }, + { "target": "vaultName", "source": "identifier", "name": "VaultName" }, + { "target": "uploadId", "source": "identifier", "name": "Id" } + ] + } + }, + "Parts": { + "request": { + "operation": "ListParts", + "params": [ + { "target": "accountId", "source": "identifier", "name": "AccountId" }, + { "target": "vaultName", "source": "identifier", "name": "VaultName" }, + { "target": "uploadId", "source": "identifier", "name": "Id" } + ] + } + }, + "UploadPart": { + "request": { + "operation": "UploadMultipartPart", + "params": [ + { "target": "accountId", "source": "identifier", "name": "AccountId" }, + { "target": "vaultName", "source": "identifier", "name": "VaultName" }, + { "target": "uploadId", "source": "identifier", "name": "Id" } + ] + } + } + }, + "has": { + "Vault": { + "resource": { + "type": "Vault", + "identifiers": [ + { "target": "AccountId", "source": "identifier", "name": "AccountId" }, + { "target": "Name", "source": "identifier", "name": "VaultName" } + ] + } + } + } + }, + "Notification": { + "identifiers": [ + { "name": "AccountId" }, + { "name": "VaultName" } + ], + "shape": "VaultNotificationConfig", + "load": { + "request": { + "operation": "GetVaultNotifications", + "params": [ + { "target": "accountId", "source": "identifier", "name": "AccountId" }, + { "target": "vaultName", "source": "identifier", "name": "VaultName" } + ] + }, + "path": "vaultNotificationConfig" + }, + "actions": { + "Delete": { + "request": { + "operation": "DeleteVaultNotifications", + "params": [ + { "target": "accountId", "source": "identifier", "name": "AccountId" }, + { "target": "vaultName", "source": "identifier", "name": "VaultName" } + ] + } + }, + "Set": { + "request": { + "operation": "SetVaultNotifications", + "params": [ + { "target": "accountId", "source": "identifier", "name": "AccountId" }, + { "target": "vaultName", "source": "identifier", "name": "VaultName" } + ] + } + } + }, + "has": { + "Vault": { + "resource": { + "type": "Vault", + "identifiers": [ + { "target": "AccountId", "source": "identifier", "name": "AccountId" }, + { "target": "Name", "source": "identifier", "name": "VaultName" } + ] + } + } + } + }, + "Vault": { + "identifiers": [ + { "name": "AccountId" }, + { + "name": "Name", + "memberName": "VaultName" + } + ], + "shape": "DescribeVaultOutput", + "load": { + "request": { + "operation": "DescribeVault", + "params": [ + { "target": "vaultName", "source": "identifier", "name": "Name" }, + { "target": "accountId", "source": "identifier", "name": "AccountId" } + ] + }, + "path": "@" + }, + "actions": { + "Create": { + "request": { + "operation": "CreateVault", + "params": [ + { "target": "vaultName", "source": "identifier", "name": "Name" }, + { "target": "accountId", "source": "identifier", "name": "AccountId" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteVault", + "params": [ + { "target": "vaultName", "source": "identifier", "name": "Name" }, + { "target": "accountId", "source": "identifier", "name": "AccountId" } + ] + } + }, + "InitiateInventoryRetrieval": { + "request": { + "operation": "InitiateJob", + "params": [ + { "target": "vaultName", "source": "identifier", "name": "Name" }, + { "target": "accountId", "source": "identifier", "name": "AccountId" }, + { "target": "jobParameters.Type", "source": "string", "value": "inventory-retrieval" } + ] + }, + "resource": { + "type": "Job", + "identifiers": [ + { "target": "Id", "source": "response", "path": "jobId" }, + { "target": "AccountId", "source": "identifier", "name": "AccountId" }, + { "target": "VaultName", "source": "identifier", "name": "Name" } + ] + } + }, + "InitiateMultipartUpload": { + "request": { + "operation": "InitiateMultipartUpload", + "params": [ + { "target": "vaultName", "source": "identifier", "name": "Name" }, + { "target": "accountId", "source": "identifier", "name": "AccountId" } + ] + }, + "resource": { + "type": "MultipartUpload", + "identifiers": [ + { "target": "Id", "source": "response", "path": "uploadId" }, + { "target": "AccountId", "source": "identifier", "name": "AccountId" }, + { "target": "VaultName", "source": "identifier", "name": "Name" } + ] + } + }, + "UploadArchive": { + "request": { + "operation": "UploadArchive", + "params": [ + { "target": "vaultName", "source": "identifier", "name": "Name" }, + { "target": "accountId", "source": "identifier", "name": "AccountId" } + ] + }, + "resource": { + "type": "Archive", + "identifiers": [ + { "target": "Id", "source": "response", "path": "archiveId" }, + { "target": "AccountId", "source": "identifier", "name": "AccountId" }, + { "target": "VaultName", "source": "identifier", "name": "Name" } + ] + } + } + }, + "has": { + "Account": { + "resource": { + "type": "Account", + "identifiers": [ + { "target": "Id", "source": "identifier", "name": "AccountId" } + ] + } + }, + "Archive": { + "resource": { + "type": "Archive", + "identifiers": [ + { "target": "AccountId", "source": "identifier", "name": "AccountId" }, + { "target": "VaultName", "source": "identifier", "name": "Name" }, + { "target": "Id", "source": "input" } + ] + } + }, + "Job": { + "resource": { + "type": "Job", + "identifiers": [ + { "target": "AccountId", "source": "identifier", "name": "AccountId" }, + { "target": "VaultName", "source": "identifier", "name": "Name" }, + { "target": "Id", "source": "input" } + ] + } + }, + "MultipartUpload": { + "resource": { + "type": "MultipartUpload", + "identifiers": [ + { "target": "AccountId", "source": "identifier", "name": "AccountId" }, + { "target": "VaultName", "source": "identifier", "name": "Name" }, + { "target": "Id", "source": "input" } + ] + } + }, + "Notification": { + "resource": { + "type": "Notification", + "identifiers": [ + { "target": "AccountId", "source": "identifier", "name": "AccountId" }, + { "target": "VaultName", "source": "identifier", "name": "Name" } + ] + } + } + }, + "hasMany": { + "CompletedJobs": { + "request": { + "operation": "ListJobs", + "params": [ + { "target": "accountId", "source": "identifier", "name": "AccountId" }, + { "target": "vaultName", "source": "identifier", "name": "Name" }, + { "target": "completed", "source": "string", "value": "true" } + ] + }, + "resource": { + "type": "Job", + "identifiers": [ + { "target": "AccountId", "source": "identifier", "name": "AccountId" }, + { "target": "VaultName", "source": "identifier", "name": "Name" }, + { "target": "Id", "source": "response", "path": "JobList[].JobId" } + ], + "path": "JobList[]" + } + }, + "FailedJobs": { + "request": { + "operation": "ListJobs", + "params": [ + { "target": "accountId", "source": "identifier", "name": "AccountId" }, + { "target": "vaultName", "source": "identifier", "name": "Name" }, + { "target": "statuscode", "source": "string", "value": "Failed" } + ] + }, + "resource": { + "type": "Job", + "identifiers": [ + { "target": "AccountId", "source": "identifier", "name": "AccountId" }, + { "target": "VaultName", "source": "identifier", "name": "Name" }, + { "target": "Id", "source": "response", "path": "JobList[].JobId" } + ], + "path": "JobList[]" + } + }, + "Jobs": { + "request": { + "operation": "ListJobs", + "params": [ + { "target": "accountId", "source": "identifier", "name": "AccountId" }, + { "target": "vaultName", "source": "identifier", "name": "Name" } + ] + }, + "resource": { + "type": "Job", + "identifiers": [ + { "target": "AccountId", "source": "identifier", "name": "AccountId" }, + { "target": "VaultName", "source": "identifier", "name": "Name" }, + { "target": "Id", "source": "response", "path": "JobList[].JobId" } + ], + "path": "JobList[]" + } + }, + "JobsInProgress": { + "request": { + "operation": "ListJobs", + "params": [ + { "target": "accountId", "source": "identifier", "name": "AccountId" }, + { "target": "vaultName", "source": "identifier", "name": "Name" }, + { "target": "statuscode", "source": "string", "value": "InProgress" } + ] + }, + "resource": { + "type": "Job", + "identifiers": [ + { "target": "AccountId", "source": "identifier", "name": "AccountId" }, + { "target": "VaultName", "source": "identifier", "name": "Name" }, + { "target": "Id", "source": "response", "path": "JobList[].JobId" } + ], + "path": "JobList[]" + } + }, + "MultipartUplaods": { + "request": { + "operation": "ListMultipartUploads", + "params": [ + { "target": "vaultName", "source": "identifier", "name": "Name" }, + { "target": "accountId", "source": "identifier", "name": "AccountId" } + ] + }, + "resource": { + "type": "MultipartUpload", + "identifiers": [ + { "target": "AccountId", "source": "identifier", "name": "AccountId" }, + { "target": "VaultName", "source": "identifier", "name": "Name" }, + { "target": "Id", "source": "response", "path": "UploadsList[].MultipartUploadId" } + ], + "path": "UploadsList[]" + } + }, + "SucceededJobs": { + "request": { + "operation": "ListJobs", + "params": [ + { "target": "accountId", "source": "identifier", "name": "AccountId" }, + { "target": "vaultName", "source": "identifier", "name": "Name" }, + { "target": "statuscode", "source": "string", "value": "Succeeded" } + ] + }, + "resource": { + "type": "Job", + "identifiers": [ + { "target": "AccountId", "source": "identifier", "name": "AccountId" }, + { "target": "VaultName", "source": "identifier", "name": "Name" }, + { "target": "Id", "source": "response", "path": "JobList[].JobId" } + ], + "path": "JobList[]" + } + } + } + } + } +} diff --git a/boto3/data/iam/2010-05-08/resources-1.json b/boto3/data/iam/2010-05-08/resources-1.json new file mode 100644 index 0000000..59d1855 --- /dev/null +++ b/boto3/data/iam/2010-05-08/resources-1.json @@ -0,0 +1,1721 @@ +{ + "service": { + "actions": { + "ChangePassword": { + "request": { "operation": "ChangePassword" } + }, + "CreateAccountAlias": { + "request": { "operation": "CreateAccountAlias" } + }, + "CreateAccountPasswordPolicy": { + "request": { "operation": "UpdateAccountPasswordPolicy" }, + "resource": { + "type": "AccountPasswordPolicy", + "identifiers": [ ] + } + }, + "CreateGroup": { + "request": { "operation": "CreateGroup" }, + "resource": { + "type": "Group", + "identifiers": [ + { "target": "Name", "source": "requestParameter", "path": "GroupName" } + ], + "path": "Group" + } + }, + "CreateInstanceProfile": { + "request": { "operation": "CreateInstanceProfile" }, + "resource": { + "type": "InstanceProfile", + "identifiers": [ + { "target": "Name", "source": "requestParameter", "path": "InstanceProfileName" } + ], + "path": "InstanceProfile" + } + }, + "CreatePolicy": { + "request": { "operation": "CreatePolicy" }, + "resource": { + "type": "Policy", + "identifiers": [ + { "target": "Arn", "source": "response", "path": "Policy.Arn" } + ] + } + }, + "CreateRole": { + "request": { "operation": "CreateRole" }, + "resource": { + "type": "Role", + "identifiers": [ + { "target": "Name", "source": "requestParameter", "path": "RoleName" } + ], + "path": "Role" + } + }, + "CreateSamlProvider": { + "request": { "operation": "CreateSAMLProvider" }, + "resource": { + "type": "SamlProvider", + "identifiers": [ + { "target": "Arn", "source": "response", "path": "SAMLProviderArn" } + ] + } + }, + "CreateServerCertificate": { + "request": { "operation": "UploadServerCertificate" }, + "resource": { + "type": "ServerCertificate", + "identifiers": [ + { "target": "Name", "source": "requestParameter", "path": "ServerCertificateName" } + ] + } + }, + "CreateSigningCertificate": { + "request": { "operation": "UploadSigningCertificate" }, + "resource": { + "type": "SigningCertificate", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Certificate.CertificateId" } + ], + "path": "Certificate" + } + }, + "CreateUser": { + "request": { "operation": "CreateUser" }, + "resource": { + "type": "User", + "identifiers": [ + { "target": "Name", "source": "requestParameter", "path": "UserName" } + ], + "path": "User" + } + }, + "CreateVirtualMfaDevice": { + "request": { "operation": "CreateVirtualMFADevice" }, + "resource": { + "type": "VirtualMfaDevice", + "identifiers": [ + { "target": "SerialNumber", "source": "response", "path": "VirtualMFADevice.SerialNumber" } + ], + "path": "VirtualMFADevice" + } + } + }, + "has": { + "AccountPasswordPolicy": { + "resource": { + "type": "AccountPasswordPolicy", + "identifiers": [ ] + } + }, + "AccountSummary": { + "resource": { + "type": "AccountSummary", + "identifiers": [ ] + } + }, + "CurrentUser": { + "resource": { + "type": "CurrentUser", + "identifiers": [ ] + } + }, + "Group": { + "resource": { + "type": "Group", + "identifiers": [ + { "target": "Name", "source": "input" } + ] + } + }, + "InstanceProfile": { + "resource": { + "type": "InstanceProfile", + "identifiers": [ + { "target": "Name", "source": "input" } + ] + } + }, + "Policy": { + "resource": { + "type": "Policy", + "identifiers": [ + { "target": "PolicyArn", "source": "input" } + ] + } + }, + "Role": { + "resource": { + "type": "Role", + "identifiers": [ + { "target": "Name", "source": "input" } + ] + } + }, + "SamlProvider": { + "resource": { + "type": "SamlProvider", + "identifiers": [ + { "target": "Arn", "source": "input" } + ] + } + }, + "ServerCertificate": { + "resource": { + "type": "ServerCertificate", + "identifiers": [ + { "target": "Name", "source": "input" } + ] + } + }, + "User": { + "resource": { + "type": "User", + "identifiers": [ + { "target": "Name", "source": "input" } + ] + } + }, + "VirtualMfaDevice": { + "resource": { + "type": "VirtualMfaDevice", + "identifiers": [ + { "target": "SerialNumber", "source": "input" } + ] + } + } + }, + "hasMany": { + "Groups": { + "request": { "operation": "ListGroups" }, + "resource": { + "type": "Group", + "identifiers": [ + { "target": "Name", "source": "response", "path": "Groups[].GroupName" } + ], + "path": "Groups[]" + } + }, + "InstanceProfiles": { + "request": { "operation": "ListInstanceProfiles" }, + "resource": { + "type": "InstanceProfile", + "identifiers": [ + { "target": "Name", "source": "response", "path": "InstanceProfiles[].InstanceProfileName" } + ], + "path": "InstanceProfiles[]" + } + }, + "Policies": { + "request": { "operation": "ListPolicies" }, + "resource": { + "type": "Policy", + "identifiers": [ + { "target": "Arn", "source": "response", "path": "Policies[].Arn" } + ], + "path": "Policies[]" + } + }, + "Roles": { + "request": { "operation": "ListRoles" }, + "resource": { + "type": "Role", + "identifiers": [ + { "target": "Name", "source": "response", "path": "Roles[].RoleName" } + ], + "path": "Roles[]" + } + }, + "SamlProviders": { + "request": { "operation": "ListSAMLProviders" }, + "resource": { + "type": "SamlProvider", + "identifiers": [ + { "target": "Arn", "source": "response", "path": "SAMLProviderList[].Arn" } + ] + } + }, + "ServerCertificates": { + "request": { "operation": "ListServerCertificates" }, + "resource": { + "type": "ServerCertificate", + "identifiers": [ + { "target": "Name", "source": "response", "path": "ServerCertificateMetadataList[].ServerCertificateName" } + ] + } + }, + "Users": { + "request": { "operation": "ListUsers" }, + "resource": { + "type": "User", + "identifiers": [ + { "target": "Name", "source": "response", "path": "Users[].UserName" } + ], + "path": "Users[]" + } + }, + "VirtualMfaDevices": { + "request": { "operation": "ListVirtualMFADevices" }, + "resource": { + "type": "VirtualMfaDevice", + "identifiers": [ + { "target": "SerialNumber", "source": "response", "path": "VirtualMFADevices[].SerialNumber" } + ], + "path": "VirtualMFADevices[]" + } + } + } + }, + "resources": { + "AccessKey": { + "identifiers": [ + { + "name": "UserName", + "memberName": "UserName" + }, + { + "name": "Id", + "memberName": "AccessKeyId" + } + ], + "shape": "AccessKeyMetadata", + "actions": { + "Activate": { + "request": { + "operation": "UpdateAccessKey", + "params": [ + { "target": "UserName", "source": "identifier", "name": "UserName" }, + { "target": "AccessKeyId", "source": "identifier", "name": "Id" }, + { "target": "Status", "source": "string", "value": "Active" } + ] + } + }, + "Deactivate": { + "request": { + "operation": "UpdateAccessKey", + "params": [ + { "target": "UserName", "source": "identifier", "name": "UserName" }, + { "target": "AccessKeyId", "source": "identifier", "name": "Id" }, + { "target": "Status", "source": "string", "value": "Inactive" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteAccessKey", + "params": [ + { "target": "UserName", "source": "identifier", "name": "UserName" }, + { "target": "AccessKeyId", "source": "identifier", "name": "Id" } + ] + } + } + }, + "has": { + "User": { + "resource": { + "type": "User", + "identifiers": [ + { "target": "Name", "source": "identifier", "name": "UserName" } + ] + } + } + } + }, + "AccessKeyPair": { + "identifiers": [ + { + "name": "UserName", + "memberName": "UserName" + }, + { + "name": "Id", + "memberName": "AccessKeyId" + }, + { + "name": "Secret", + "memberName": "SecretAccessKey" + } + ], + "shape": "AccessKey", + "actions": { + "Activate": { + "request": { + "operation": "UpdateAccessKey", + "params": [ + { "target": "UserName", "source": "identifier", "name": "UserName" }, + { "target": "AccessKeyId", "source": "identifier", "name": "Id" }, + { "target": "Status", "source": "string", "value": "Active" } + ] + } + }, + "Deactivate": { + "request": { + "operation": "UpdateAccessKey", + "params": [ + { "target": "UserName", "source": "identifier", "name": "UserName" }, + { "target": "AccessKeyId", "source": "identifier", "name": "Id" }, + { "target": "Status", "source": "string", "value": "Inactive" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteAccessKey", + "params": [ + { "target": "UserName", "source": "identifier", "name": "UserName" }, + { "target": "AccessKeyId", "source": "identifier", "name": "Id" } + ] + } + } + } + }, + "AccountPasswordPolicy": { + "identifiers": [ ], + "shape": "PasswordPolicy", + "load": { + "request": { "operation": "GetAccountPasswordPolicy" }, + "path": "PasswordPolicy" + }, + "actions": { + "Delete": { + "request": { "operation": "DeleteAccountPasswordPolicy" } + }, + "Update": { + "request": { "operation": "UpdateAccountPasswordPolicy" } + } + } + }, + "AccountSummary": { + "identifiers": [ ], + "shape": "GetAccountSummaryResponse", + "load": { + "request": { "operation": "GetAccountSummary" }, + "path": "@" + } + }, + "AssumeRolePolicy": { + "identifiers": [ + { "name": "RoleName" } + ], + "actions": { + "Update": { + "request": { + "operation": "UpdateAssumeRolePolicy", + "params": [ + { "target": "RoleName", "source": "identifier", "name": "RoleName" } + ] + } + } + }, + "has": { + "Role": { + "resource": { + "type": "Role", + "identifiers": [ + { "target": "Name", "source": "identifier", "name": "RoleName" } + ] + } + } + } + }, + "CurrentUser": { + "identifiers": [ ], + "shape": "User", + "load": { + "request": { "operation": "GetUser" }, + "path": "User" + }, + "has": { + "User": { + "resource": { + "type": "User", + "identifiers": [ + { "target": "Name", "source": "data", "path": "UserName" } + ] + } + } + }, + "hasMany": { + "AccessKeys": { + "request": { "operation": "ListAccessKeys" }, + "resource": { + "type": "AccessKey", + "identifiers": [ + { "target": "UserName", "source": "response", "path": "AccessKeyMetadata[].UserName" }, + { "target": "Id", "source": "response", "path": "AccessKeyMetadata[].AccessKeyId" } + ], + "path": "AccessKeyMetadata[]" + } + }, + "MfaDevices": { + "request": { "operation": "ListMFADevices" }, + "resource": { + "type": "MfaDevice", + "identifiers": [ + { "target": "UserName", "source": "response", "path": "MFADevices[].UserName" }, + { "target": "SerialNumber", "source": "response", "path": "MFADevices[].SerialNumber" } + ], + "path": "MFADevices[]" + } + }, + "SigningCertificates": { + "request": { "operation": "ListSigningCertificates" }, + "resource": { + "type": "SigningCertificate", + "identifiers": [ + { "target": "UserName", "source": "response", "path": "Certificates[].UserName" }, + { "target": "Id", "source": "response", "path": "Certificates[].CertificateId" } + ], + "path": "Certificates[]" + } + } + } + }, + "Group": { + "identifiers": [ + { + "name": "Name", + "memberName": "GroupName" + } + ], + "shape": "Group", + "load": { + "request": { + "operation": "GetGroup", + "params": [ + { "target": "GroupName", "source": "identifier", "name": "Name" } + ] + }, + "path": "Group" + }, + "actions": { + "AddUser": { + "request": { + "operation": "AddUserToGroup", + "params": [ + { "target": "GroupName", "source": "identifier", "name": "Name" } + ] + } + }, + "AttachPolicy": { + "request": { + "operation": "AttachGroupPolicy", + "params": [ + { "target": "GroupName", "source": "identifier", "name": "Name" } + ] + } + }, + "Create": { + "request": { + "operation": "CreateGroup", + "params": [ + { "target": "GroupName", "source": "identifier", "name": "Name" } + ] + }, + "resource": { + "type": "Group", + "identifiers": [ + { "target": "Name", "source": "requestParameter", "path": "GroupName" } + ], + "path": "Group" + } + }, + "CreatePolicy": { + "request": { + "operation": "PutGroupPolicy", + "params": [ + { "target": "GroupName", "source": "identifier", "name": "Name" } + ] + }, + "resource": { + "type": "GroupPolicy", + "identifiers": [ + { "target": "GroupName", "source": "identifier", "name": "Name" }, + { "target": "Name", "source": "requestParameter", "path": "PolicyName" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteGroup", + "params": [ + { "target": "GroupName", "source": "identifier", "name": "Name" } + ] + } + }, + "DetachPolicy": { + "request": { + "operation": "DetachGroupPolicy", + "params": [ + { "target": "GroupName", "source": "identifier", "name": "Name" } + ] + } + }, + "RemoveUser": { + "request": { + "operation": "RemoveUserFromGroup", + "params": [ + { "target": "GroupName", "source": "identifier", "name": "Name" } + ] + } + }, + "Update": { + "request": { + "operation": "UpdateGroup", + "params": [ + { "target": "GroupName", "source": "identifier", "name": "Name" } + ] + }, + "resource": { + "type": "Group", + "identifiers": [ + { "target": "Name", "source": "requestParameter", "path": "NewGroupName" } + ] + } + } + }, + "has": { + "Policy": { + "resource": { + "type": "GroupPolicy", + "identifiers": [ + { "target": "GroupName", "source": "identifier", "name": "Name" }, + { "target": "Name", "source": "input" } + ] + } + } + }, + "hasMany": { + "AttachedPolicies": { + "request": { + "operation": "ListAttachedGroupPolicies", + "params": [ + { "target": "GroupName", "source": "identifier", "name": "Name" } + ] + }, + "resource": { + "type": "Policy", + "identifiers": [ + { "target": "Arn", "source": "response", "path": "AttachedPolicies[].PolicyArn" } + ] + } + }, + "Policies": { + "request": { + "operation": "ListGroupPolicies", + "params": [ + { "target": "GroupName", "source": "identifier", "name": "Name" } + ] + }, + "resource": { + "type": "GroupPolicy", + "identifiers": [ + { "target": "GroupName", "source": "identifier", "name": "Name" }, + { "target": "Name", "source": "response", "path": "PolicyNames[]" } + ] + } + }, + "Users": { + "request": { + "operation": "GetGroup", + "params": [ + { "target": "GroupName", "source": "identifier", "name": "Name" } + ] + }, + "resource": { + "type": "User", + "identifiers": [ + { "target": "Name", "source": "response", "path": "Users[].UserName" } + ], + "path": "Users[]" + } + } + } + }, + "GroupPolicy": { + "identifiers": [ + { + "name": "GroupName", + "memberName": "GroupName" + }, + { + "name": "Name", + "memberName": "PolicyName" + } + ], + "shape": "GetGroupPolicyResponse", + "load": { + "request": { + "operation": "GetGroupPolicy", + "params": [ + { "target": "GroupName", "source": "identifier", "name": "GroupName" }, + { "target": "PolicyName", "source": "identifier", "name": "Name" } + ] + }, + "path": "@" + }, + "actions": { + "Delete": { + "request": { + "operation": "DeleteGroupPolicy", + "params": [ + { "target": "GroupName", "source": "identifier", "name": "GroupName" }, + { "target": "PolicyName", "source": "identifier", "name": "Name" } + ] + } + }, + "Put": { + "request": { + "operation": "PutGroupPolicy", + "params": [ + { "target": "GroupName", "source": "identifier", "name": "GroupName" }, + { "target": "PolicyName", "source": "identifier", "name": "Name" } + ] + } + } + }, + "has": { + "Group": { + "resource": { + "type": "Group", + "identifiers": [ + { "target": "Name", "source": "identifier", "name": "GroupName" } + ] + } + } + } + }, + "InstanceProfile": { + "identifiers": [ + { + "name": "Name", + "memberName": "InstanceProfileName" + } + ], + "shape": "InstanceProfile", + "load": { + "request": { + "operation": "GetInstanceProfile", + "params": [ + { "target": "InstanceProfileName", "source": "identifier", "name": "Name" } + ] + }, + "path": "InstanceProfile" + }, + "actions": { + "AddRole": { + "request": { + "operation": "AddRoleToInstanceProfile", + "params": [ + { "target": "InstanceProfileName", "source": "identifier", "name": "Name" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteInstanceProfile", + "params": [ + { "target": "InstanceProfileName", "source": "identifier", "name": "Name" } + ] + } + }, + "RemoveRole": { + "request": { + "operation": "RemoveRoleFromInstanceProfile", + "params": [ + { "target": "InstanceProfileName", "source": "identifier", "name": "Name" } + ] + } + } + }, + "has": { + "Roles": { + "resource": { + "type": "Role", + "identifiers": [ + { "target": "Name", "source": "data", "path": "Roles[].RoleName" } + ], + "path": "Roles[]" + } + } + } + }, + "LoginProfile": { + "identifiers": [ + { + "name": "UserName", + "memberName": "UserName" + } + ], + "shape": "LoginProfile", + "load": { + "request": { + "operation": "GetLoginProfile", + "params": [ + { "target": "UserName", "source": "identifier", "name": "UserName" } + ] + }, + "path": "LoginProfile" + }, + "actions": { + "Create": { + "request": { + "operation": "CreateLoginProfile", + "params": [ + { "target": "UserName", "source": "identifier", "name": "UserName" } + ] + }, + "resource": { + "type": "LoginProfile", + "identifiers": [ + { "target": "UserName", "source": "response", "path": "LoginProfile.UserName" } + ], + "path": "LoginProfile" + } + }, + "Delete": { + "request": { + "operation": "DeleteLoginProfile", + "params": [ + { "target": "UserName", "source": "identifier", "name": "UserName" } + ] + } + }, + "Update": { + "request": { + "operation": "UpdateLoginProfile", + "params": [ + { "target": "UserName", "source": "identifier", "name": "UserName" } + ] + } + } + }, + "has": { + "User": { + "resource": { + "type": "User", + "identifiers": [ + { "target": "Name", "source": "identifier", "name": "UserName" } + ] + } + } + } + }, + "MfaDevice": { + "identifiers": [ + { + "name": "UserName", + "memberName": "UserName" + }, + { + "name": "SerialNumber", + "memberName": "SerialNumber" + } + ], + "shape": "MFADevice", + "actions": { + "Associate": { + "request": { + "operation": "EnableMFADevice", + "params": [ + { "target": "UserName", "source": "identifier", "name": "UserName" }, + { "target": "SerialNumber", "source": "identifier", "name": "SerialNumber" } + ] + } + }, + "Disassociate": { + "request": { + "operation": "DeactivateMFADevice", + "params": [ + { "target": "UserName", "source": "identifier", "name": "UserName" }, + { "target": "SerialNumber", "source": "identifier", "name": "SerialNumber" } + ] + } + }, + "Resync": { + "request": { + "operation": "ResyncMFADevice", + "params": [ + { "target": "UserName", "source": "identifier", "name": "UserName" }, + { "target": "SerialNumber", "source": "identifier", "name": "SerialNumber" } + ] + } + } + }, + "has": { + "User": { + "resource": { + "type": "User", + "identifiers": [ + { "target": "Name", "source": "identifier", "name": "UserName" } + ] + } + } + } + }, + "Policy": { + "identifiers": [ + { + "name": "Arn", + "memberName": "Arn" + } + ], + "shape": "Policy", + "load": { + "request": { + "operation": "GetPolicy", + "params": [ + { "target": "PolicyArn", "source": "identifier", "name": "Arn" } + ] + }, + "path": "Policy" + }, + "actions": { + "AttachGroup": { + "request": { + "operation": "AttachGroupPolicy", + "params": [ + { "target": "PolicyArn", "source": "identifier", "name": "Arn" } + ] + } + }, + "AttachRole": { + "request": { + "operation": "AttachRolePolicy", + "params": [ + { "target": "PolicyArn", "source": "identifier", "name": "Arn" } + ] + } + }, + "AttachUser": { + "request": { + "operation": "AttachUserPolicy", + "params": [ + { "target": "PolicyArn", "source": "identifier", "name": "Arn" } + ] + } + }, + "CreateVersion": { + "request": { + "operation": "CreatePolicyVersion", + "params": [ + { "target": "PolicyArn", "source": "identifier", "name": "Arn" } + ] + }, + "resource": { + "type": "PolicyVersion", + "identifiers": [ + { "target": "Arn", "source": "identifier", "name": "Arn" }, + { "target": "VersionId", "source": "response", "path": "PolicyVersion.VersionId" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeletePolicy", + "params": [ + { "target": "PolicyArn", "source": "identifier", "name": "Arn" } + ] + } + }, + "DetachGroup": { + "request": { + "operation": "DetachGroupPolicy", + "params": [ + { "target": "PolicyArn", "source": "identifier", "name": "Arn" } + ] + } + }, + "DetachRole": { + "request": { + "operation": "DetachRolePolicy", + "params": [ + { "target": "PolicyArn", "source": "identifier", "name": "Arn" } + ] + } + }, + "DetachUser": { + "request": { + "operation": "DetachUserPolicy", + "params": [ + { "target": "PolicyArn", "source": "identifier", "name": "Arn" } + ] + } + } + }, + "has": { + "DefaultVersion": { + "resource": { + "type": "PolicyVersion", + "identifiers": [ + { "target": "Arn", "source": "identifier", "name": "Arn" }, + { "target": "VersionId", "source": "data", "path": "DefaultVersionId" } + ] + } + } + }, + "hasMany": { + "AttachedGroups": { + "request": { + "operation": "ListEntitiesForPolicy", + "params": [ + { "target": "PolicyArn", "source": "identifier", "name": "Arn" }, + { "target": "EntityFilter", "source": "string", "value": "Group" } + ] + }, + "resource": { + "type": "Group", + "identifiers": [ + { "target": "Name", "source": "response", "path": "PolicyGroups[].GroupName" } + ] + } + }, + "AttachedRoles": { + "request": { + "operation": "ListEntitiesForPolicy", + "params": [ + { "target": "PolicyArn", "source": "identifier", "name": "Arn" }, + { "target": "EntityFilter", "source": "string", "value": "Role" } + ] + }, + "resource": { + "type": "Role", + "identifiers": [ + { "target": "Name", "source": "response", "path": "PolicyRoles[].RoleName" } + ] + } + }, + "AttachedUsers": { + "request": { + "operation": "ListEntitiesForPolicy", + "params": [ + { "target": "PolicyArn", "source": "identifier", "name": "Arn" }, + { "target": "EntityFilter", "source": "string", "value": "User" } + ] + }, + "resource": { + "type": "User", + "identifiers": [ + { "target": "Name", "source": "response", "path": "PolicyUsers[].UserName" } + ] + } + }, + "Versions": { + "request": { + "operation": "ListPolicyVersions", + "params": [ + { "target": "PolicyArn", "source": "identifier", "name": "Arn" } + ] + }, + "resource": { + "type": "PolicyVersion", + "identifiers": [ + { "target": "Arn", "source": "identifier", "name": "Arn" }, + { "target": "VersionId", "source": "response", "path": "Versions[].VersionId" } + ], + "path": "Versions[]" + } + } + } + }, + "PolicyVersion": { + "identifiers": [ + { "name": "Arn" }, + { "name": "VersionId" } + ], + "shape": "PolicyVersion", + "load": { + "request": { + "operation": "GetPolicyVersion", + "params": [ + { "target": "PolicyArn", "source": "identifier", "name": "Arn" }, + { "target": "VersionId", "source": "identifier", "name": "VersionId" } + ] + }, + "path": "PolicyVersion" + }, + "actions": { + "Delete": { + "request": { + "operation": "DeletePolicyVersion", + "params": [ + { "target": "PolicyArn", "source": "identifier", "name": "Arn" }, + { "target": "VersionId", "source": "identifier", "name": "VersionId" } + ] + } + }, + "SetAsDefault": { + "request": { + "operation": "SetDefaultPolicyVersion", + "params": [ + { "target": "PolicyArn", "source": "identifier", "name": "Arn" }, + { "target": "VersionId", "source": "identifier", "name": "VersionId" } + ] + } + } + } + }, + "Role": { + "identifiers": [ + { + "name": "Name", + "memberName": "RoleName" + } + ], + "shape": "Role", + "load": { + "request": { + "operation": "GetRole", + "params": [ + { "target": "RoleName", "source": "identifier", "name": "Name" } + ] + }, + "path": "Role" + }, + "actions": { + "AttachPolicy": { + "request": { + "operation": "AttachRolePolicy", + "params": [ + { "target": "RoleName", "source": "identifier", "name": "Name" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteRole", + "params": [ + { "target": "RoleName", "source": "identifier", "name": "Name" } + ] + } + }, + "DetachPolicy": { + "request": { + "operation": "DetachRolePolicy", + "params": [ + { "target": "RoleName", "source": "identifier", "name": "Name" } + ] + } + } + }, + "has": { + "AssumeRolePolicy": { + "resource": { + "type": "AssumeRolePolicy", + "identifiers": [ + { "target": "RoleName", "source": "identifier", "name": "Name" } + ] + } + }, + "Policy": { + "resource": { + "type": "RolePolicy", + "identifiers": [ + { "target": "RoleName", "source": "identifier", "name": "Name" }, + { "target": "Name", "source": "input" } + ] + } + } + }, + "hasMany": { + "AttachedPolicies": { + "request": { + "operation": "ListAttachedRolePolicies", + "params": [ + { "target": "RoleName", "source": "identifier", "name": "Name" } + ] + }, + "resource": { + "type": "Policy", + "identifiers": [ + { "target": "Arn", "source": "response", "path": "AttachedPolicies[].PolicyArn" } + ] + } + }, + "InstanceProfiles": { + "request": { + "operation": "ListInstanceProfilesForRole", + "params": [ + { "target": "RoleName", "source": "identifier", "name": "Name" } + ] + }, + "resource": { + "type": "InstanceProfile", + "identifiers": [ + { "target": "Name", "source": "response", "path": "InstanceProfiles[].InstanceProfileName" } + ], + "path": "InstanceProfiles[]" + } + }, + "Policies": { + "request": { + "operation": "ListRolePolicies", + "params": [ + { "target": "RoleName", "source": "identifier", "name": "Name" } + ] + }, + "resource": { + "type": "RolePolicy", + "identifiers": [ + { "target": "RoleName", "source": "identifier", "name": "Name" }, + { "target": "Name", "source": "response", "path": "PolicyNames[]" } + ] + } + } + } + }, + "RolePolicy": { + "identifiers": [ + { + "name": "RoleName", + "memberName": "RoleName" + }, + { + "name": "Name", + "memberName": "PolicyName" + } + ], + "shape": "GetRolePolicyResponse", + "load": { + "request": { + "operation": "GetRolePolicy", + "params": [ + { "target": "RoleName", "source": "identifier", "name": "RoleName" }, + { "target": "PolicyName", "source": "identifier", "name": "Name" } + ] + }, + "path": "@" + }, + "actions": { + "Delete": { + "request": { + "operation": "DeleteRolePolicy", + "params": [ + { "target": "RoleName", "source": "identifier", "name": "RoleName" }, + { "target": "PolicyName", "source": "identifier", "name": "Name" } + ] + } + }, + "Put": { + "request": { + "operation": "PutRolePolicy", + "params": [ + { "target": "RoleName", "source": "identifier", "name": "RoleName" }, + { "target": "PolicyName", "source": "identifier", "name": "Name" } + ] + } + } + }, + "has": { + "Role": { + "resource": { + "type": "Role", + "identifiers": [ + { "target": "Name", "source": "identifier", "name": "RoleName" } + ] + } + } + } + }, + "SamlProvider": { + "identifiers": [ + { "name": "Arn" } + ], + "shape": "GetSAMLProviderResponse", + "load": { + "request": { + "operation": "GetSAMLProvider", + "params": [ + { "target": "SAMLProviderArn", "source": "identifier", "name": "Arn" } + ] + }, + "path": "@" + }, + "actions": { + "Delete": { + "request": { + "operation": "DeleteSAMLProvider", + "params": [ + { "target": "SAMLProviderArn", "source": "identifier", "name": "Arn" } + ] + } + }, + "Update": { + "request": { + "operation": "UpdateSAMLProvider", + "params": [ + { "target": "SAMLProviderArn", "source": "identifier", "name": "Arn" } + ] + } + } + } + }, + "ServerCertificate": { + "identifiers": [ + { "name": "Name" } + ], + "shape": "ServerCertificate", + "load": { + "request": { + "operation": "GetServerCertificate", + "params": [ + { "target": "ServerCertificateName", "source": "identifier", "name": "Name" } + ] + }, + "path": "ServerCertificate" + }, + "actions": { + "Delete": { + "request": { + "operation": "DeleteServerCertificate", + "params": [ + { "target": "ServerCertificateName", "source": "identifier", "name": "Name" } + ] + } + }, + "Update": { + "request": { + "operation": "UpdateServerCertificate", + "params": [ + { "target": "ServerCertificateName", "source": "identifier", "name": "Name" } + ] + }, + "resource": { + "type": "ServerCertificate", + "identifiers": [ + { "target": "Name", "source": "requestParameter", "path": "NewServerCertificateName" } + ] + } + } + } + }, + "SigningCertificate": { + "identifiers": [ + { + "name": "UserName", + "memberName": "UserName" + }, + { + "name": "Id", + "memberName": "CertificateId" + } + ], + "shape": "SigningCertificate", + "actions": { + "Activate": { + "request": { + "operation": "UpdateSigningCertificate", + "params": [ + { "target": "UserName", "source": "identifier", "name": "UserName" }, + { "target": "CertificateId", "source": "identifier", "name": "Id" }, + { "target": "Status", "source": "string", "value": "Active" } + ] + } + }, + "Deactivate": { + "request": { + "operation": "UpdateSigningCertificate", + "params": [ + { "target": "UserName", "source": "identifier", "name": "UserName" }, + { "target": "CertificateId", "source": "identifier", "name": "Id" }, + { "target": "Status", "source": "string", "value": "Inactive" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteSigningCertificate", + "params": [ + { "target": "UserName", "source": "identifier", "name": "UserName" }, + { "target": "CertificateId", "source": "identifier", "name": "Id" } + ] + } + } + }, + "has": { + "User": { + "resource": { + "type": "User", + "identifiers": [ + { "target": "Name", "source": "identifier", "name": "UserName" } + ] + } + } + } + }, + "User": { + "identifiers": [ + { + "name": "Name", + "memberName": "UserName" + } + ], + "shape": "User", + "load": { + "request": { + "operation": "GetUser", + "params": [ + { "target": "UserName", "source": "identifier", "name": "Name" } + ] + }, + "path": "User" + }, + "actions": { + "AddGroup": { + "request": { + "operation": "AddUserToGroup", + "params": [ + { "target": "UserName", "source": "identifier", "name": "Name" } + ] + } + }, + "AttachPolicy": { + "request": { + "operation": "AttachUserPolicy", + "params": [ + { "target": "UserName", "source": "identifier", "name": "Name" } + ] + } + }, + "Create": { + "request": { + "operation": "CreateUser", + "params": [ + { "target": "UserName", "source": "identifier", "name": "Name" } + ] + }, + "resource": { + "type": "User", + "identifiers": [ + { "target": "Name", "source": "requestParameter", "path": "UserName" } + ], + "path": "User" + } + }, + "CreateAccessKeyPair": { + "request": { + "operation": "CreateAccessKey", + "params": [ + { "target": "UserName", "source": "identifier", "name": "Name" } + ] + }, + "resource": { + "type": "AccessKeyPair", + "identifiers": [ + { "target": "UserName", "source": "identifier", "name": "Name" }, + { "target": "Id", "source": "response", "path": "AccessKey.AccessKeyId" }, + { "target": "Secret", "source": "response", "path": "AccessKey.SecretAccessKey" } + ], + "path": "AccessKey" + } + }, + "CreateLoginProfile": { + "request": { + "operation": "CreateLoginProfile", + "params": [ + { "target": "UserName", "source": "identifier", "name": "Name" } + ] + }, + "resource": { + "type": "LoginProfile", + "identifiers": [ + { "target": "UserName", "source": "response", "path": "LoginProfile.UserName" } + ], + "path": "LoginProfile" + } + }, + "CreatePolicy": { + "request": { + "operation": "PutUserPolicy", + "params": [ + { "target": "UserName", "source": "identifier", "name": "Name" } + ] + }, + "resource": { + "type": "UserPolicy", + "identifiers": [ + { "target": "UserName", "source": "identifier", "name": "Name" }, + { "target": "Name", "source": "requestParameter", "path": "PolicyName" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteUser", + "params": [ + { "target": "UserName", "source": "identifier", "name": "Name" } + ] + } + }, + "DetachPolicy": { + "request": { + "operation": "DetachUserPolicy", + "params": [ + { "target": "UserName", "source": "identifier", "name": "Name" } + ] + } + }, + "EnableMfa": { + "request": { + "operation": "EnableMFADevice", + "params": [ + { "target": "UserName", "source": "identifier", "name": "Name" } + ] + }, + "resource": { + "type": "MfaDevice", + "identifiers": [ + { "target": "UserName", "source": "identifier", "name": "Name" }, + { "target": "SerialNumber", "source": "requestParameter", "path": "SerialNumber" } + ] + } + }, + "RemoveGroup": { + "request": { + "operation": "RemoveUserFromGroup", + "params": [ + { "target": "UserName", "source": "identifier", "name": "Name" } + ] + } + }, + "Update": { + "request": { + "operation": "UpdateUser", + "params": [ + { "target": "UserName", "source": "identifier", "name": "Name" } + ] + }, + "resource": { + "type": "User", + "identifiers": [ + { "target": "Name", "source": "requestParameter", "path": "NewUserName" } + ] + } + } + }, + "has": { + "AccessKey": { + "resource": { + "type": "AccessKey", + "identifiers": [ + { "target": "UserName", "source": "identifier", "name": "Name" }, + { "target": "Id", "source": "input" } + ] + } + }, + "LoginProfile": { + "resource": { + "type": "LoginProfile", + "identifiers": [ + { "target": "UserName", "source": "identifier", "name": "Name" } + ] + } + }, + "MfaDevice": { + "resource": { + "type": "MfaDevice", + "identifiers": [ + { "target": "UserName", "source": "identifier", "name": "Name" }, + { "target": "SerialNumber", "source": "input" } + ] + } + }, + "Policy": { + "resource": { + "type": "UserPolicy", + "identifiers": [ + { "target": "UserName", "source": "identifier", "name": "Name" }, + { "target": "Name", "source": "input" } + ] + } + }, + "SigningCertificate": { + "resource": { + "type": "SigningCertificate", + "identifiers": [ + { "target": "UserName", "source": "identifier", "name": "Name" }, + { "target": "Id", "source": "input" } + ] + } + } + }, + "hasMany": { + "AccessKeys": { + "request": { + "operation": "ListAccessKeys", + "params": [ + { "target": "UserName", "source": "identifier", "name": "Name" } + ] + }, + "resource": { + "type": "AccessKey", + "identifiers": [ + { "target": "UserName", "source": "identifier", "name": "Name" }, + { "target": "Id", "source": "response", "path": "AccessKeyMetadata[].AccessKeyId" } + ], + "path": "AccessKeyMetadata[]" + } + }, + "AttachedPolicies": { + "request": { + "operation": "ListAttachedUserPolicies", + "params": [ + { "target": "UserName", "source": "identifier", "name": "Name" } + ] + }, + "resource": { + "type": "Policy", + "identifiers": [ + { "target": "Arn", "source": "response", "path": "AttachedPolicies[].PolicyArn" } + ] + } + }, + "Groups": { + "request": { + "operation": "ListGroupsForUser", + "params": [ + { "target": "UserName", "source": "identifier", "name": "Name" } + ] + }, + "resource": { + "type": "Group", + "identifiers": [ + { "target": "Name", "source": "response", "path": "Groups[].GroupName" } + ], + "path": "Groups[]" + } + }, + "MfaDevices": { + "request": { + "operation": "ListMFADevices", + "params": [ + { "target": "UserName", "source": "identifier", "name": "Name" } + ] + }, + "resource": { + "type": "MfaDevice", + "identifiers": [ + { "target": "UserName", "source": "identifier", "name": "Name" }, + { "target": "SerialNumber", "source": "response", "path": "MFADevices[].SerialNumber" } + ], + "path": "MFADevices[]" + } + }, + "Policies": { + "request": { + "operation": "ListUserPolicies", + "params": [ + { "target": "UserName", "source": "identifier", "name": "Name" } + ] + }, + "resource": { + "type": "UserPolicy", + "identifiers": [ + { "target": "UserName", "source": "identifier", "name": "Name" }, + { "target": "Name", "source": "response", "path": "PolicyNames[]" } + ] + } + }, + "SigningCertificates": { + "request": { + "operation": "ListSigningCertificates", + "params": [ + { "target": "UserName", "source": "identifier", "name": "Name" } + ] + }, + "resource": { + "type": "SigningCertificate", + "identifiers": [ + { "target": "UserName", "source": "identifier", "name": "Name" }, + { "target": "Id", "source": "response", "path": "Certificates[].CertificateId" } + ], + "path": "Certificates[]" + } + } + } + }, + "UserPolicy": { + "identifiers": [ + { + "name": "UserName", + "memberName": "UserName" + }, + { + "name": "Name", + "memberName": "PolicyName" + } + ], + "shape": "GetUserPolicyResponse", + "load": { + "request": { + "operation": "GetUserPolicy", + "params": [ + { "target": "UserName", "source": "identifier", "name": "UserName" }, + { "target": "PolicyName", "source": "identifier", "name": "Name" } + ] + }, + "path": "@" + }, + "actions": { + "Delete": { + "request": { + "operation": "DeleteUserPolicy", + "params": [ + { "target": "UserName", "source": "identifier", "name": "UserName" }, + { "target": "PolicyName", "source": "identifier", "name": "Name" } + ] + } + }, + "Put": { + "request": { + "operation": "PutUserPolicy", + "params": [ + { "target": "UserName", "source": "identifier", "name": "UserName" }, + { "target": "PolicyName", "source": "identifier", "name": "Name" } + ] + } + } + }, + "has": { + "User": { + "resource": { + "type": "User", + "identifiers": [ + { "target": "Name", "source": "identifier", "name": "UserName" } + ] + } + } + } + }, + "VirtualMfaDevice": { + "identifiers": [ + { + "name": "SerialNumber", + "memberName": "SerialNumber" + } + ], + "shape": "VirtualMFADevice", + "actions": { + "Delete": { + "request": { + "operation": "DeleteVirtualMFADevice", + "params": [ + { "target": "SerialNumber", "source": "identifier", "name": "SerialNumber" } + ] + } + } + }, + "has": { + "User": { + "resource": { + "type": "User", + "identifiers": [ + { "target": "Name", "source": "data", "path": "User.UserName" } + ] + } + } + } + } + } +} diff --git a/boto3/data/opsworks/2013-02-18/resources-1.json b/boto3/data/opsworks/2013-02-18/resources-1.json new file mode 100644 index 0000000..0435b13 --- /dev/null +++ b/boto3/data/opsworks/2013-02-18/resources-1.json @@ -0,0 +1,173 @@ +{ + "service": { + "actions": { + "CreateStack": { + "request": { "operation": "CreateStack" }, + "resource": { + "type": "Stack", + "identifiers": [ + { "target": "Id", "source": "response", "path": "StackId" } + ] + } + } + }, + "has": { + "Layer": { + "resource": { + "type": "Layer", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + }, + "Stack": { + "resource": { + "type": "Stack", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + } + }, + "hasMany": { + "Stacks": { + "request": { "operation": "DescribeStacks" }, + "resource": { + "type": "Stack", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Stacks[].StackId" } + ], + "path": "Stacks[]" + } + } + } + }, + "resources": { + "Layer": { + "identifiers": [ + { "name": "Id" } + ], + "shape": "Layer", + "load": { + "request": { + "operation": "DescribeLayers", + "params": [ + { "target": "LayerIds[]", "source": "identifier", "name": "Id" } + ] + }, + "path": "Layers[0]" + }, + "actions": { + "Delete": { + "request": { + "operation": "DeleteLayer", + "params": [ + { "target": "LayerId", "source": "identifier", "name": "Id" } + ] + } + } + }, + "has": { + "Stack": { + "resource": { + "type": "Stack", + "identifiers": [ + { "target": "Id", "source": "data", "path": "StackId" } + ] + } + } + } + }, + "Stack": { + "identifiers": [ + { "name": "Id" } + ], + "shape": "Stack", + "load": { + "request": { + "operation": "DescribeStacks", + "params": [ + { "target": "StackIds[]", "source": "identifier", "name": "Id" } + ] + }, + "path": "Stacks[0]" + }, + "actions": { + "CreateLayer": { + "request": { + "operation": "CreateLayer", + "params": [ + { "target": "StackId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Layer", + "identifiers": [ + { "target": "Id", "source": "response", "path": "LayerId" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteStack", + "params": [ + { "target": "StackId", "source": "identifier", "name": "Id" } + ] + } + } + }, + "has": { + "Summary": { + "resource": { + "type": "StackSummary", + "identifiers": [ + { "target": "StackId", "source": "identifier", "name": "Id" } + ] + } + } + }, + "hasMany": { + "Layers": { + "request": { + "operation": "DescribeLayers", + "params": [ + { "target": "StackId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Layer", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Layers[].LayerId" } + ], + "path": "Layers[]" + } + } + } + }, + "StackSummary": { + "identifiers": [ + { "name": "StackId" } + ], + "shape": "StackSummary", + "load": { + "request": { + "operation": "DescribeStackSummary", + "params": [ + { "target": "StackId", "source": "identifier", "name": "StackId" } + ] + }, + "path": "StackSummary" + }, + "has": { + "Stack": { + "resource": { + "type": "Stack", + "identifiers": [ + { "target": "Id", "source": "identifier", "name": "StackId" } + ] + } + } + } + } + } +} diff --git a/boto3/data/s3/2006-03-01/resources-1.json b/boto3/data/s3/2006-03-01/resources-1.json new file mode 100644 index 0000000..1665c3a --- /dev/null +++ b/boto3/data/s3/2006-03-01/resources-1.json @@ -0,0 +1,1158 @@ +{ + "service": { + "actions": { + "CreateBucket": { + "request": { "operation": "CreateBucket" }, + "resource": { + "type": "Bucket", + "identifiers": [ + { "target": "Name", "source": "requestParameter", "path": "Bucket" } + ] + } + } + }, + "has": { + "Bucket": { + "resource": { + "type": "Bucket", + "identifiers": [ + { "target": "Name", "source": "input" } + ] + } + } + }, + "hasMany": { + "Buckets": { + "request": { "operation": "ListBuckets" }, + "resource": { + "type": "Bucket", + "identifiers": [ + { "target": "Name", "source": "response", "path": "Buckets[].Name" } + ], + "path": "Buckets[]" + } + } + } + }, + "resources": { + "Bucket": { + "identifiers": [ + { "name": "Name" } + ], + "shape": "Bucket", + "actions": { + "Create": { + "request": { + "operation": "CreateBucket", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "Name" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteBucket", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "Name" } + ] + } + }, + "DeleteObjects": { + "request": { + "operation": "DeleteObjects", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "Name" } + ] + } + }, + "PutObject": { + "request": { + "operation": "PutObject", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "Name" } + ] + }, + "resource": { + "type": "Object", + "identifiers": [ + { "target": "BucketName", "source": "identifier", "name": "Name" }, + { "target": "Key", "source": "requestParameter", "path": "Key" } + ] + } + } + }, + "waiters": { + "Exists": { + "waiterName": "BucketExists", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "Name" } + ] + }, + "NotExists": { + "waiterName": "BucketNotExists", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "Name" } + ] + } + }, + "has": { + "Acl": { + "resource": { + "type": "BucketAcl", + "identifiers": [ + { "target": "BucketName", "source": "identifier", "name": "Name" } + ] + } + }, + "Cors": { + "resource": { + "type": "BucketCors", + "identifiers": [ + { "target": "BucketName", "source": "identifier", "name": "Name" } + ] + } + }, + "Lifecycle": { + "resource": { + "type": "BucketLifecycle", + "identifiers": [ + { "target": "BucketName", "source": "identifier", "name": "Name" } + ] + } + }, + "Logging": { + "resource": { + "type": "BucketLogging", + "identifiers": [ + { "target": "BucketName", "source": "identifier", "name": "Name" } + ] + } + }, + "Notification": { + "resource": { + "type": "BucketNotification", + "identifiers": [ + { "target": "BucketName", "source": "identifier", "name": "Name" } + ] + } + }, + "Object": { + "resource": { + "type": "Object", + "identifiers": [ + { "target": "BucketName", "source": "identifier", "name": "Name" }, + { "target": "Key", "source": "input" } + ] + } + }, + "Policy": { + "resource": { + "type": "BucketPolicy", + "identifiers": [ + { "target": "BucketName", "source": "identifier", "name": "Name" } + ] + } + }, + "RequestPayment": { + "resource": { + "type": "BucketRequestPayment", + "identifiers": [ + { "target": "BucketName", "source": "identifier", "name": "Name" } + ] + } + }, + "Tagging": { + "resource": { + "type": "BucketTagging", + "identifiers": [ + { "target": "BucketName", "source": "identifier", "name": "Name" } + ] + } + }, + "Versioning": { + "resource": { + "type": "BucketVersioning", + "identifiers": [ + { "target": "BucketName", "source": "identifier", "name": "Name" } + ] + } + }, + "Website": { + "resource": { + "type": "BucketWebsite", + "identifiers": [ + { "target": "BucketName", "source": "identifier", "name": "Name" } + ] + } + } + }, + "hasMany": { + "MultipartUploads": { + "request": { + "operation": "ListMultipartUploads", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "Name" } + ] + }, + "resource": { + "type": "MultipartUpload", + "identifiers": [ + { "target": "BucketName", "source": "identifier", "name": "Name" }, + { "target": "ObjectKey", "source": "response", "path": "Uploads[].Key" }, + { "target": "Id", "source": "response", "path": "Uploads[].UploadId" } + ], + "path": "Uploads[]" + } + }, + "ObjectVersions": { + "request": { + "operation": "ListObjectVersions", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "Name" } + ] + }, + "resource": { + "type": "ObjectVersion", + "identifiers": [ + { "target": "BucketName", "source": "identifier", "name": "Name" }, + { "target": "ObjectKey", "source": "response", "path": "[Versions,DeleteMarkers]|[].Key" }, + { "target": "Id", "source": "response", "path": "[Versions,DeleteMarkers]|[].VersionId" } + ], + "path": "[Versions,DeleteMarkers]|[]" + } + }, + "Objects": { + "request": { + "operation": "ListObjects", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "Name" } + ] + }, + "resource": { + "type": "ObjectSummary", + "identifiers": [ + { "target": "BucketName", "source": "identifier", "name": "Name" }, + { "target": "Key", "source": "response", "path": "Contents[].Key" } + ], + "path": "Contents[]" + } + } + } + }, + "BucketAcl": { + "identifiers": [ + { "name": "BucketName" } + ], + "shape": "GetBucketAclOutput", + "load": { + "request": { + "operation": "GetBucketAcl", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" } + ] + }, + "path": "@" + }, + "actions": { + "Put": { + "request": { + "operation": "PutBucketAcl", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" } + ] + } + } + }, + "has": { + "Bucket": { + "resource": { + "type": "Bucket", + "identifiers": [ + { "target": "Name", "source": "identifier", "name": "BucketName" } + ] + } + } + } + }, + "BucketCors": { + "identifiers": [ + { "name": "BucketName" } + ], + "shape": "GetBucketCorsOutput", + "load": { + "request": { + "operation": "GetBucketCors", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" } + ] + }, + "path": "@" + }, + "actions": { + "Delete": { + "request": { + "operation": "DeleteBucketCors", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" } + ] + } + }, + "Put": { + "request": { + "operation": "PutBucketCors", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" } + ] + } + } + }, + "has": { + "Bucket": { + "resource": { + "type": "Bucket", + "identifiers": [ + { "target": "Name", "source": "identifier", "name": "BucketName" } + ] + } + } + } + }, + "BucketLifecycle": { + "identifiers": [ + { "name": "BucketName" } + ], + "shape": "GetBucketLifecycleOutput", + "load": { + "request": { + "operation": "GetBucketLifecycle", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" } + ] + }, + "path": "@" + }, + "actions": { + "Delete": { + "request": { + "operation": "DeleteBucketLifecycle", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" } + ] + } + }, + "Put": { + "request": { + "operation": "PutBucketLifecycle", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" } + ] + } + } + }, + "has": { + "Bucket": { + "resource": { + "type": "Bucket", + "identifiers": [ + { "target": "Name", "source": "identifier", "name": "BucketName" } + ] + } + } + } + }, + "BucketLogging": { + "identifiers": [ + { "name": "BucketName" } + ], + "shape": "GetBucketLoggingOutput", + "load": { + "request": { + "operation": "GetBucketLogging", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" } + ] + }, + "path": "@" + }, + "actions": { + "Put": { + "request": { + "operation": "PutBucketLogging", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" } + ] + } + } + }, + "has": { + "Bucket": { + "resource": { + "type": "Bucket", + "identifiers": [ + { "target": "Name", "source": "identifier", "name": "BucketName" } + ] + } + } + } + }, + "BucketNotification": { + "identifiers": [ + { "name": "BucketName" } + ], + "shape": "NotificationConfiguration", + "load": { + "request": { + "operation": "GetBucketNotificationConfiguration", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" } + ] + }, + "path": "@" + }, + "actions": { + "Put": { + "request": { + "operation": "PutBucketNotificationConfiguration", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" } + ] + } + } + }, + "has": { + "Bucket": { + "resource": { + "type": "Bucket", + "identifiers": [ + { "target": "Name", "source": "identifier", "name": "BucketName" } + ] + } + } + } + }, + "BucketPolicy": { + "identifiers": [ + { "name": "BucketName" } + ], + "shape": "GetBucketPolicyOutput", + "load": { + "request": { + "operation": "GetBucketPolicy", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" } + ] + }, + "path": "@" + }, + "actions": { + "Delete": { + "request": { + "operation": "DeleteBucketPolicy", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" } + ] + } + }, + "Put": { + "request": { + "operation": "PutBucketPolicy", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" } + ] + } + } + }, + "has": { + "Bucket": { + "resource": { + "type": "Bucket", + "identifiers": [ + { "target": "Name", "source": "identifier", "name": "BucketName" } + ] + } + } + } + }, + "BucketRequestPayment": { + "identifiers": [ + { "name": "BucketName" } + ], + "shape": "GetBucketRequestPaymentOutput", + "load": { + "request": { + "operation": "GetBucketRequestPayment", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" } + ] + }, + "path": "@" + }, + "actions": { + "Put": { + "request": { + "operation": "PutBucketRequestPayment", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" } + ] + } + } + }, + "has": { + "Bucket": { + "resource": { + "type": "Bucket", + "identifiers": [ + { "target": "Name", "source": "identifier", "name": "BucketName" } + ] + } + } + } + }, + "BucketTagging": { + "identifiers": [ + { "name": "BucketName" } + ], + "shape": "GetBucketTaggingOutput", + "load": { + "request": { + "operation": "GetBucketTagging", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" } + ] + }, + "path": "@" + }, + "actions": { + "Delete": { + "request": { + "operation": "DeleteBucketTagging", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" } + ] + } + }, + "Put": { + "request": { + "operation": "PutBucketTagging", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" } + ] + } + } + }, + "has": { + "Bucket": { + "resource": { + "type": "Bucket", + "identifiers": [ + { "target": "Name", "source": "identifier", "name": "BucketName" } + ] + } + } + } + }, + "BucketVersioning": { + "identifiers": [ + { "name": "BucketName" } + ], + "shape": "GetBucketVersioningOutput", + "load": { + "request": { + "operation": "GetBucketVersioning", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" } + ] + }, + "path": "@" + }, + "actions": { + "Enable": { + "request": { + "operation": "PutBucketVersioning", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" }, + { "target": "VersioningConfiguration.Status", "source": "string", "value": "Enabled" } + ] + } + }, + "Put": { + "request": { + "operation": "PutBucketVersioning", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" } + ] + } + }, + "Suspend": { + "request": { + "operation": "PutBucketVersioning", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" }, + { "target": "VersioningConfiguration.Status", "source": "string", "value": "Suspended" } + ] + } + } + }, + "has": { + "Bucket": { + "resource": { + "type": "Bucket", + "identifiers": [ + { "target": "Name", "source": "identifier", "name": "BucketName" } + ] + } + } + } + }, + "BucketWebsite": { + "identifiers": [ + { "name": "BucketName" } + ], + "shape": "GetBucketWebsiteOutput", + "load": { + "request": { + "operation": "GetBucketWebsite", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" } + ] + }, + "path": "@" + }, + "actions": { + "Delete": { + "request": { + "operation": "DeleteBucketWebsite", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" } + ] + } + }, + "Put": { + "request": { + "operation": "PutBucketWebsite", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" } + ] + } + } + }, + "has": { + "Bucket": { + "resource": { + "type": "Bucket", + "identifiers": [ + { "target": "Name", "source": "identifier", "name": "BucketName" } + ] + } + } + } + }, + "MultipartUpload": { + "identifiers": [ + { "name": "BucketName" }, + { "name": "ObjectKey" }, + { "name": "Id" } + ], + "shape": "MultipartUpload", + "actions": { + "Abort": { + "request": { + "operation": "AbortMultipartUpload", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" }, + { "target": "Key", "source": "identifier", "name": "ObjectKey" }, + { "target": "UploadId", "source": "identifier", "name": "Id" } + ] + } + }, + "Complete": { + "request": { + "operation": "CompleteMultipartUpload", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" }, + { "target": "Key", "source": "identifier", "name": "ObjectKey" }, + { "target": "UploadId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "Object", + "identifiers": [ + { "target": "BucketName", "source": "identifier", "name": "BucketName" }, + { "target": "Key", "source": "identifier", "name": "ObjectKey" } + ] + } + } + }, + "has": { + "Object": { + "resource": { + "type": "Object", + "identifiers": [ + { "target": "BucketName", "source": "identifier", "name": "BucketName" }, + { "target": "Key", "source": "identifier", "name": "ObjectKey" } + ] + } + }, + "Part": { + "resource": { + "type": "MultipartUploadPart", + "identifiers": [ + { "target": "BucketName", "source": "identifier", "name": "BucketName" }, + { "target": "ObjectKey", "source": "identifier", "name": "ObjectKey" }, + { "target": "MultipartUploadId", "source": "identifier", "name": "Id" }, + { "target": "PartNumber", "source": "input" } + ] + } + } + }, + "hasMany": { + "Parts": { + "request": { + "operation": "ListParts", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" }, + { "target": "Key", "source": "identifier", "name": "ObjectKey" }, + { "target": "UploadId", "source": "identifier", "name": "Id" } + ] + }, + "resource": { + "type": "MultipartUploadPart", + "identifiers": [ + { "target": "BucketName", "source": "requestParameter", "path": "Bucket" }, + { "target": "ObjectKey", "source": "requestParameter", "path": "Key" }, + { "target": "MultipartUploadId", "source": "requestParameter", "path": "UploadId" }, + { "target": "PartNumber", "source": "response", "path": "Parts[].PartNumber" } + ], + "path": "Parts[]" + } + } + } + }, + "MultipartUploadPart": { + "identifiers": [ + { "name": "BucketName" }, + { "name": "ObjectKey" }, + { "name": "MultipartUploadId" }, + { + "name": "PartNumber", + "type": "integer", + "memberName": "PartNumber" + } + ], + "shape": "Part", + "actions": { + "CopyFrom": { + "request": { + "operation": "UploadPartCopy", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" }, + { "target": "Key", "source": "identifier", "name": "ObjectKey" }, + { "target": "UploadId", "source": "identifier", "name": "MultipartUploadId" }, + { "target": "PartNumber", "source": "identifier", "name": "PartNumber" } + ] + } + }, + "Upload": { + "request": { + "operation": "UploadPart", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" }, + { "target": "Key", "source": "identifier", "name": "ObjectKey" }, + { "target": "UploadId", "source": "identifier", "name": "MultipartUploadId" }, + { "target": "PartNumber", "source": "identifier", "name": "PartNumber" } + ] + } + } + }, + "has": { + "MultipartUpload": { + "resource": { + "type": "MultipartUpload", + "identifiers": [ + { "target": "BucketName", "source": "identifier", "name": "BucketName" }, + { "target": "ObjectKey", "source": "identifier", "name": "ObjectKey" }, + { "target": "Id", "source": "identifier", "name": "MultipartUploadId" } + ] + } + } + } + }, + "Object": { + "identifiers": [ + { "name": "BucketName" }, + { "name": "Key" } + ], + "shape": "HeadObjectOutput", + "load": { + "request": { + "operation": "HeadObject", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" }, + { "target": "Key", "source": "identifier", "name": "Key" } + ] + }, + "path": "@" + }, + "actions": { + "CopyFrom": { + "request": { + "operation": "CopyObject", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" }, + { "target": "Key", "source": "identifier", "name": "Key" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteObject", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" }, + { "target": "Key", "source": "identifier", "name": "Key" } + ] + } + }, + "Get": { + "request": { + "operation": "GetObject", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" }, + { "target": "Key", "source": "identifier", "name": "Key" } + ] + } + }, + "InitiateMultipartUpload": { + "request": { + "operation": "CreateMultipartUpload", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" }, + { "target": "Key", "source": "identifier", "name": "Key" } + ] + }, + "resource": { + "type": "MultipartUpload", + "identifiers": [ + { "target": "BucketName", "source": "identifier", "name": "BucketName" }, + { "target": "ObjectKey", "source": "identifier", "name": "Key" }, + { "target": "Id", "source": "response", "path": "UploadId" } + ] + } + }, + "Put": { + "request": { + "operation": "PutObject", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" }, + { "target": "Key", "source": "identifier", "name": "Key" } + ] + } + } + }, + "batchActions": { + "Delete": { + "request": { + "operation": "DeleteObjects", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" }, + { "target": "Delete.Objects[].Key", "source": "identifier", "name": "Key" } + ] + } + } + }, + "waiters": { + "Exists": { + "waiterName": "ObjectExists", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" }, + { "target": "Key", "source": "identifier", "name": "Key" } + ] + }, + "NotExists": { + "waiterName": "ObjectNotExists", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" }, + { "target": "Key", "source": "identifier", "name": "Key" } + ] + } + }, + "has": { + "Acl": { + "resource": { + "type": "ObjectAcl", + "identifiers": [ + { "target": "BucketName", "source": "identifier", "name": "BucketName" }, + { "target": "ObjectKey", "source": "identifier", "name": "Key" } + ] + } + }, + "Bucket": { + "resource": { + "type": "Bucket", + "identifiers": [ + { "target": "Name", "source": "identifier", "name": "BucketName" } + ] + } + }, + "MultipartUpload": { + "resource": { + "type": "MultipartUpload", + "identifiers": [ + { "target": "BucketName", "source": "identifier", "name": "BucketName" }, + { "target": "ObjectKey", "source": "identifier", "name": "Key" }, + { "target": "Id", "source": "input" } + ] + } + }, + "Version": { + "resource": { + "type": "ObjectVersion", + "identifiers": [ + { "target": "BucketName", "source": "identifier", "name": "BucketName" }, + { "target": "ObjectKey", "source": "identifier", "name": "Key" }, + { "target": "Id", "source": "input" } + ] + } + } + } + }, + "ObjectAcl": { + "identifiers": [ + { "name": "BucketName" }, + { "name": "ObjectKey" } + ], + "shape": "GetObjectAclOutput", + "load": { + "request": { + "operation": "GetObjectAcl", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" }, + { "target": "Key", "source": "identifier", "name": "ObjectKey" } + ] + }, + "path": "@" + }, + "actions": { + "Put": { + "request": { + "operation": "PutObjectAcl", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" }, + { "target": "Key", "source": "identifier", "name": "ObjectKey" } + ] + } + } + }, + "has": { + "Object": { + "resource": { + "type": "Object", + "identifiers": [ + { "target": "BucketName", "source": "identifier", "name": "BucketName" }, + { "target": "Key", "source": "identifier", "name": "ObjectKey" } + ] + } + } + } + }, + "ObjectSummary": { + "identifiers": [ + { "name": "BucketName" }, + { "name": "Key" } + ], + "shape": "Object", + "actions": { + "CopyFrom": { + "request": { + "operation": "CopyObject", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" }, + { "target": "Key", "source": "identifier", "name": "Key" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteObject", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" }, + { "target": "Key", "source": "identifier", "name": "Key" } + ] + } + }, + "Get": { + "request": { + "operation": "GetObject", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" }, + { "target": "Key", "source": "identifier", "name": "Key" } + ] + } + }, + "InitiateMultipartUpload": { + "request": { + "operation": "CreateMultipartUpload", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" }, + { "target": "Key", "source": "identifier", "name": "Key" } + ] + }, + "resource": { + "type": "MultipartUpload", + "identifiers": [ + { "target": "BucketName", "source": "identifier", "name": "BucketName" }, + { "target": "ObjectKey", "source": "identifier", "name": "Key" }, + { "target": "Id", "source": "response", "path": "UploadId" } + ] + } + }, + "Put": { + "request": { + "operation": "PutObject", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" }, + { "target": "Key", "source": "identifier", "name": "Key" } + ] + } + } + }, + "batchActions": { + "Delete": { + "request": { + "operation": "DeleteObjects", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" }, + { "target": "Delete.Objects[].Key", "source": "identifier", "name": "Key" } + ] + } + } + }, + "waiters": { + "Exists": { + "waiterName": "ObjectExists", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" }, + { "target": "Key", "source": "identifier", "name": "Key" } + ] + }, + "NotExists": { + "waiterName": "ObjectNotExists", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" }, + { "target": "Key", "source": "identifier", "name": "Key" } + ] + } + }, + "has": { + "Acl": { + "resource": { + "type": "ObjectAcl", + "identifiers": [ + { "target": "BucketName", "source": "identifier", "name": "BucketName" }, + { "target": "ObjectKey", "source": "identifier", "name": "Key" } + ] + } + }, + "Bucket": { + "resource": { + "type": "Bucket", + "identifiers": [ + { "target": "Name", "source": "identifier", "name": "BucketName" } + ] + } + }, + "MultipartUpload": { + "resource": { + "type": "MultipartUpload", + "identifiers": [ + { "target": "BucketName", "source": "identifier", "name": "BucketName" }, + { "target": "ObjectKey", "source": "identifier", "name": "Key" }, + { "target": "Id", "source": "input" } + ] + } + }, + "Object": { + "resource": { + "type": "Object", + "identifiers": [ + { "target": "BucketName", "source": "identifier", "name": "BucketName" }, + { "target": "Key", "source": "identifier", "name": "Key" } + ] + } + }, + "Version": { + "resource": { + "type": "ObjectVersion", + "identifiers": [ + { "target": "BucketName", "source": "identifier", "name": "BucketName" }, + { "target": "ObjectKey", "source": "identifier", "name": "Key" }, + { "target": "Id", "source": "input" } + ] + } + } + } + }, + "ObjectVersion": { + "identifiers": [ + { "name": "BucketName" }, + { "name": "ObjectKey" }, + { "name": "Id" } + ], + "shape": "ObjectVersion", + "actions": { + "Delete": { + "request": { + "operation": "DeleteObject", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" }, + { "target": "Key", "source": "identifier", "name": "ObjectKey" }, + { "target": "VersionId", "source": "identifier", "name": "Id" } + ] + } + }, + "Get": { + "request": { + "operation": "GetObject", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" }, + { "target": "Key", "source": "identifier", "name": "ObjectKey" }, + { "target": "VersionId", "source": "identifier", "name": "Id" } + ] + } + }, + "Head": { + "request": { + "operation": "HeadObject", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" }, + { "target": "Key", "source": "identifier", "name": "ObjectKey" }, + { "target": "VersionId", "source": "identifier", "name": "Id" } + ] + } + } + }, + "batchActions": { + "Delete": { + "request": { + "operation": "DeleteObjects", + "params": [ + { "target": "Bucket", "source": "identifier", "name": "BucketName" }, + { "target": "Delete.Objects[*].Key", "source": "identifier", "name": "ObjectKey" }, + { "target": "Delete.Objects[*].VersionId", "source": "identifier", "name": "Id" } + ] + } + } + }, + "has": { + "Object": { + "resource": { + "type": "Object", + "identifiers": [ + { "target": "BucketName", "source": "identifier", "name": "BucketName" }, + { "target": "Key", "source": "identifier", "name": "ObjectKey" } + ] + } + } + } + } + } +} diff --git a/boto3/data/sns/2010-03-31/resources-1.json b/boto3/data/sns/2010-03-31/resources-1.json new file mode 100644 index 0000000..cee300a --- /dev/null +++ b/boto3/data/sns/2010-03-31/resources-1.json @@ -0,0 +1,327 @@ +{ + "service": { + "actions": { + "CreatePlatformApplication": { + "request": { "operation": "CreatePlatformApplication" }, + "resource": { + "type": "PlatformApplication", + "identifiers": [ + { "target": "Arn", "source": "response", "path": "PlatformApplicationArn" } + ] + } + }, + "CreateTopic": { + "request": { "operation": "CreateTopic" }, + "resource": { + "type": "Topic", + "identifiers": [ + { "target": "Arn", "source": "response", "path": "TopicArn" } + ] + } + } + }, + "has": { + "PlatformApplication": { + "resource": { + "type": "PlatformApplication", + "identifiers": [ + { "target": "Arn", "source": "input" } + ] + } + }, + "PlatformEndpoint": { + "resource": { + "type": "PlatformEndpoint", + "identifiers": [ + { "target": "Arn", "source": "input" } + ] + } + }, + "Subscription": { + "resource": { + "type": "Subscription", + "identifiers": [ + { "target": "Arn", "source": "input" } + ] + } + }, + "Topic": { + "resource": { + "type": "Topic", + "identifiers": [ + { "target": "Arn", "source": "input" } + ] + } + } + }, + "hasMany": { + "PlatformApplications": { + "request": { "operation": "ListPlatformApplications" }, + "resource": { + "type": "PlatformApplication", + "identifiers": [ + { "target": "Arn", "source": "response", "path": "PlatformApplications[].PlatformApplicationArn" } + ] + } + }, + "Subscriptions": { + "request": { "operation": "ListSubscriptions" }, + "resource": { + "type": "Subscription", + "identifiers": [ + { "target": "Arn", "source": "response", "path": "Subscriptions[].SubscriptionArn" } + ] + } + }, + "Topics": { + "request": { "operation": "ListTopics" }, + "resource": { + "type": "Topic", + "identifiers": [ + { "target": "Arn", "source": "response", "path": "Topics[].TopicArn" } + ] + } + } + } + }, + "resources": { + "PlatformApplication": { + "identifiers": [ + { "name": "Arn" } + ], + "shape": "GetPlatformApplicationAttributesResponse", + "load": { + "request": { + "operation": "GetPlatformApplicationAttributes", + "params": [ + { "target": "PlatformApplicationArn", "source": "identifier", "name": "Arn" } + ] + }, + "path": "@" + }, + "actions": { + "CreatePlatformEndpoint": { + "request": { + "operation": "CreatePlatformEndpoint", + "params": [ + { "target": "PlatformApplicationArn", "source": "identifier", "name": "Arn" } + ] + }, + "resource": { + "type": "PlatformEndpoint", + "identifiers": [ + { "target": "Arn", "source": "response", "path": "EndpointArn" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeletePlatformApplication", + "params": [ + { "target": "PlatformApplicationArn", "source": "identifier", "name": "Arn" } + ] + } + }, + "SetAttributes": { + "request": { + "operation": "SetPlatformApplicationAttributes", + "params": [ + { "target": "PlatformApplicationArn", "source": "identifier", "name": "Arn" } + ] + } + } + }, + "hasMany": { + "Endpoints": { + "request": { + "operation": "ListEndpointsByPlatformApplication", + "params": [ + { "target": "PlatformApplicationArn", "source": "identifier", "name": "Arn" } + ] + }, + "resource": { + "type": "PlatformEndpoint", + "identifiers": [ + { "target": "Arn", "source": "response", "path": "Endpoints[].EndpointArn" } + ] + } + } + } + }, + "PlatformEndpoint": { + "identifiers": [ + { "name": "Arn" } + ], + "shape": "GetEndpointAttributesResponse", + "load": { + "request": { + "operation": "GetEndpointAttributes", + "params": [ + { "target": "EndpointArn", "source": "identifier", "name": "Arn" } + ] + }, + "path": "@" + }, + "actions": { + "Delete": { + "request": { + "operation": "DeleteEndpoint", + "params": [ + { "target": "EndpointArn", "source": "identifier", "name": "Arn" } + ] + } + }, + "Publish": { + "request": { + "operation": "Publish", + "params": [ + { "target": "TargetArn", "source": "identifier", "name": "Arn" } + ] + } + }, + "SetAttributes": { + "request": { + "operation": "SetEndpointAttributes", + "params": [ + { "target": "EndpointArn", "source": "identifier", "name": "Arn" } + ] + } + } + } + }, + "Subscription": { + "identifiers": [ + { "name": "Arn" } + ], + "shape": "GetSubscriptionAttributesResponse", + "load": { + "request": { + "operation": "GetSubscriptionAttributes", + "params": [ + { "target": "SubscriptionArn", "source": "identifier", "name": "Arn" } + ] + }, + "path": "@" + }, + "actions": { + "Delete": { + "request": { + "operation": "Unsubscribe", + "params": [ + { "target": "SubscriptionArn", "source": "identifier", "name": "Arn" } + ] + } + }, + "SetAttributes": { + "request": { + "operation": "SetSubscriptionAttributes", + "params": [ + { "target": "SubscriptionArn", "source": "identifier", "name": "Arn" } + ] + } + } + } + }, + "Topic": { + "identifiers": [ + { "name": "Arn" } + ], + "shape": "GetTopicAttributesResponse", + "load": { + "request": { + "operation": "GetTopicAttributes", + "params": [ + { "target": "TopicArn", "source": "identifier", "name": "Arn" } + ] + }, + "path": "@" + }, + "actions": { + "AddPermission": { + "request": { + "operation": "AddPermission", + "params": [ + { "target": "TopicArn", "source": "identifier", "name": "Arn" } + ] + } + }, + "ConfirmSubscription": { + "request": { + "operation": "ConfirmSubscription", + "params": [ + { "target": "TopicArn", "source": "identifier", "name": "Arn" } + ] + }, + "resource": { + "type": "Subscription", + "identifiers": [ + { "target": "Arn", "source": "response", "path": "SubscriptionArn" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteTopic", + "params": [ + { "target": "TopicArn", "source": "identifier", "name": "Arn" } + ] + } + }, + "Publish": { + "request": { + "operation": "Publish", + "params": [ + { "target": "TopicArn", "source": "identifier", "name": "Arn" } + ] + } + }, + "RemovePermission": { + "request": { + "operation": "RemovePermission", + "params": [ + { "target": "TopicArn", "source": "identifier", "name": "Arn" } + ] + } + }, + "SetAttributes": { + "request": { + "operation": "SetTopicAttributes", + "params": [ + { "target": "TopicArn", "source": "identifier", "name": "Arn" } + ] + } + }, + "Subscribe": { + "request": { + "operation": "Subscribe", + "params": [ + { "target": "TopicArn", "source": "identifier", "name": "Arn" } + ] + }, + "resource": { + "type": "Subscription", + "identifiers": [ + { "target": "Arn", "source": "response", "path": "SubscriptionArn" } + ] + } + } + }, + "hasMany": { + "Subscriptions": { + "request": { + "operation": "ListSubscriptionsByTopic", + "params": [ + { "target": "TopicArn", "source": "identifier", "name": "Arn" } + ] + }, + "resource": { + "type": "Subscription", + "identifiers": [ + { "target": "Arn", "source": "response", "path": "Subscriptions[].SubscriptionArn" } + ] + } + } + } + } + } +} diff --git a/boto3/data/sqs/2012-11-05/resources-1.json b/boto3/data/sqs/2012-11-05/resources-1.json new file mode 100644 index 0000000..84b4528 --- /dev/null +++ b/boto3/data/sqs/2012-11-05/resources-1.json @@ -0,0 +1,232 @@ +{ + "service": { + "actions": { + "CreateQueue": { + "request": { "operation": "CreateQueue" }, + "resource": { + "type": "Queue", + "identifiers": [ + { "target": "Url", "source": "response", "path": "QueueUrl" } + ] + } + }, + "GetQueueByName": { + "request": { "operation": "GetQueueUrl" }, + "resource": { + "type": "Queue", + "identifiers": [ + { "target": "Url", "source": "response", "path": "QueueUrl" } + ] + } + } + }, + "has": { + "Queue": { + "resource": { + "type": "Queue", + "identifiers": [ + { "target": "Url", "source": "input" } + ] + } + } + }, + "hasMany": { + "Queues": { + "request": { "operation": "ListQueues" }, + "resource": { + "type": "Queue", + "identifiers": [ + { "target": "Url", "source": "response", "path": "QueueUrls[]" } + ] + } + } + } + }, + "resources": { + "Message": { + "identifiers": [ + { "name": "QueueUrl" }, + { + "name": "ReceiptHandle", + "memberName": "ReceiptHandle" + } + ], + "shape": "Message", + "actions": { + "ChangeVisibility": { + "request": { + "operation": "ChangeMessageVisibility", + "params": [ + { "target": "QueueUrl", "source": "identifier", "name": "QueueUrl" }, + { "target": "ReceiptHandle", "source": "identifier", "name": "ReceiptHandle" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteMessage", + "params": [ + { "target": "QueueUrl", "source": "identifier", "name": "QueueUrl" }, + { "target": "ReceiptHandle", "source": "identifier", "name": "ReceiptHandle" } + ] + } + } + }, + "batchActions": { + "Delete": { + "request": { + "operation": "DeleteMessageBatch", + "params": [ + { "target": "QueueUrl", "source": "identifier", "name": "QueueUrl" }, + { "target": "Entries[*].Id", "source": "data", "path": "MessageId" }, + { "target": "Entries[*].ReceiptHandle", "source": "identifier", "name": "ReceiptHandle" } + ] + } + } + }, + "has": { + "Queue": { + "resource": { + "type": "Queue", + "identifiers": [ + { "target": "Url", "source": "identifier", "name": "QueueUrl" } + ] + } + } + } + }, + "Queue": { + "identifiers": [ + { "name": "Url" } + ], + "shape": "GetQueueAttributesResult", + "load": { + "request": { + "operation": "GetQueueAttributes", + "params": [ + { "target": "QueueUrl", "source": "identifier", "name": "Url" }, + { "target": "AttributeNames[]", "source": "string", "value": "All" } + ] + }, + "path": "@" + }, + "actions": { + "AddPermission": { + "request": { + "operation": "AddPermission", + "params": [ + { "target": "QueueUrl", "source": "identifier", "name": "Url" } + ] + } + }, + "ChangeMessageVisibilityBatch": { + "request": { + "operation": "ChangeMessageVisibilityBatch", + "params": [ + { "target": "QueueUrl", "source": "identifier", "name": "Url" } + ] + } + }, + "Delete": { + "request": { + "operation": "DeleteQueue", + "params": [ + { "target": "QueueUrl", "source": "identifier", "name": "Url" } + ] + } + }, + "DeleteMessages": { + "request": { + "operation": "DeleteMessageBatch", + "params": [ + { "target": "QueueUrl", "source": "identifier", "name": "Url" } + ] + } + }, + "Purge": { + "request": { + "operation": "PurgeQueue", + "params": [ + { "target": "QueueUrl", "source": "identifier", "name": "Url" } + ] + } + }, + "ReceiveMessages": { + "request": { + "operation": "ReceiveMessage", + "params": [ + { "target": "QueueUrl", "source": "identifier", "name": "Url" } + ] + }, + "resource": { + "type": "Message", + "identifiers": [ + { "target": "QueueUrl", "source": "identifier", "name": "Url" }, + { "target": "ReceiptHandle", "source": "response", "path": "Messages[].ReceiptHandle" } + ], + "path": "Messages[]" + } + }, + "RemovePermission": { + "request": { + "operation": "RemovePermission", + "params": [ + { "target": "QueueUrl", "source": "identifier", "name": "Url" } + ] + } + }, + "SendMessage": { + "request": { + "operation": "SendMessage", + "params": [ + { "target": "QueueUrl", "source": "identifier", "name": "Url" } + ] + } + }, + "SendMessages": { + "request": { + "operation": "SendMessageBatch", + "params": [ + { "target": "QueueUrl", "source": "identifier", "name": "Url" } + ] + } + }, + "SetAttributes": { + "request": { + "operation": "SetQueueAttributes", + "params": [ + { "target": "QueueUrl", "source": "identifier", "name": "Url" } + ] + } + } + }, + "has": { + "Message": { + "resource": { + "type": "Message", + "identifiers": [ + { "target": "QueueUrl", "source": "identifier", "name": "Url" }, + { "target": "ReceiptHandle", "source": "input" } + ] + } + } + }, + "hasMany": { + "DeadLetterSourceQueues": { + "request": { + "operation": "ListDeadLetterSourceQueues", + "params": [ + { "target": "QueueUrl", "source": "identifier", "name": "Url" } + ] + }, + "resource": { + "type": "Queue", + "identifiers": [ + { "target": "Url", "source": "response", "path": "QueueUrls[]" } + ] + } + } + } + } + } +} diff --git a/boto3/docs/__init__.py b/boto3/docs/__init__.py new file mode 100644 index 0000000..c18c86e --- /dev/null +++ b/boto3/docs/__init__.py @@ -0,0 +1,39 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +import os + +from boto3.docs.service import ServiceDocumenter + + +def generate_docs(root_dir, session): + """Generates the reference documentation for botocore + + This will go through every available AWS service and output ReSTructured + text files documenting each service. + + :param root_dir: The directory to write the reference files to. Each + service's reference documentation is loacated at + root_dir/reference/services/service-name.rst + + :param session: The boto3 session + """ + services_doc_path = os.path.join(root_dir, 'reference', 'services') + if not os.path.exists(services_doc_path): + os.makedirs(services_doc_path) + + for service_name in session.get_available_services(): + docs = ServiceDocumenter(service_name, session).document_service() + service_doc_path = os.path.join( + services_doc_path, service_name + '.rst') + with open(service_doc_path, 'wb') as f: + f.write(docs) diff --git a/boto3/docs/action.py b/boto3/docs/action.py new file mode 100644 index 0000000..16d91b1 --- /dev/null +++ b/boto3/docs/action.py @@ -0,0 +1,146 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from botocore import xform_name +from botocore.model import OperationModel +from botocore.utils import get_service_module_name +from botocore.docs.method import document_model_driven_method +from botocore.docs.method import document_custom_method + +from boto3.docs.base import BaseDocumenter +from boto3.docs.method import document_model_driven_resource_method +from boto3.docs.utils import get_resource_ignore_params +from boto3.docs.utils import get_resource_public_actions +from boto3.docs.utils import add_resource_type_overview + + +class ActionDocumenter(BaseDocumenter): + def document_actions(self, section): + modeled_actions_list = self._resource_model.actions + modeled_actions = {} + for modeled_action in modeled_actions_list: + modeled_actions[modeled_action.name] = modeled_action + resource_actions = get_resource_public_actions( + self._resource.__class__) + self.member_map['actions'] = sorted(resource_actions) + add_resource_type_overview( + section=section, + resource_type='Actions', + description=( + 'Actions call operations on resources. They may ' + 'automatically handle the passing in of arguments set ' + 'from identifiers and some attributes.'), + intro_link='actions_intro') + for action_name in sorted(resource_actions): + action_section = section.add_new_section(action_name) + if action_name in ['load', 'reload'] and self._resource_model.load: + document_load_reload_action( + section=action_section, + action_name=action_name, + resource_name=self._resource_name, + event_emitter=self._resource.meta.client.meta.events, + load_model=self._resource_model.load, + service_model=self._service_model + ) + elif action_name in modeled_actions: + document_action( + section=action_section, + resource_name=self._resource_name, + event_emitter=self._resource.meta.client.meta.events, + action_model=modeled_actions[action_name], + service_model=self._service_model, + ) + else: + document_custom_method( + section, action_name, resource_actions[action_name]) + + +def document_action(section, resource_name, event_emitter, action_model, + service_model, include_signature=True): + """Documents a resource action + + :param section: The section to write to + + :param resource_name: The name of the resource + + :param event_emitter: The event emitter to use to emit events + + :param action_model: The model of the action + + :param service_model: The model of the service + + :param include_signature: Whether or not to include the signature. + It is useful for generating docstrings. + """ + operation_model = service_model.operation_model( + action_model.request.operation) + ignore_params = get_resource_ignore_params(action_model.request.params) + + example_return_value = 'response' + if action_model.resource: + example_return_value = xform_name(action_model.resource.type) + example_resource_name = xform_name(resource_name) + if service_model.service_name == resource_name: + example_resource_name = resource_name + example_prefix = '%s = %s.%s' % ( + example_return_value, example_resource_name, action_model.name) + document_model_driven_resource_method( + section=section, method_name=action_model.name, + operation_model=operation_model, + event_emitter=event_emitter, + method_description=operation_model.documentation, + example_prefix=example_prefix, + exclude_input=ignore_params, + resource_action_model=action_model, + include_signature=include_signature + ) + + +def document_load_reload_action(section, action_name, resource_name, + event_emitter, load_model, service_model, + include_signature=True): + """Documents the resource load action + + :param section: The section to write to + + :param action_name: The name of the loading action should be load or reload + + :param resource_name: The name of the resource + + :param event_emitter: The event emitter to use to emit events + + :param load_model: The model of the load action + + :param service_model: The model of the service + + :param include_signature: Whether or not to include the signature. + It is useful for generating docstrings. + """ + description = ( + 'Calls :py:meth:`%s.Client.%s` to update the attributes of the' + ' %s resource' % ( + get_service_module_name(service_model), + xform_name(load_model.request.operation), + resource_name) + ) + example_resource_name = xform_name(resource_name) + if service_model.service_name == resource_name: + example_resource_name = resource_name + example_prefix = '%s.%s' % (example_resource_name, action_name) + document_model_driven_method( + section=section, method_name=action_name, + operation_model=OperationModel({}, service_model), + event_emitter=event_emitter, + method_description=description, + example_prefix=example_prefix, + include_signature=include_signature + ) diff --git a/boto3/docs/attr.py b/boto3/docs/attr.py new file mode 100644 index 0000000..06579a0 --- /dev/null +++ b/boto3/docs/attr.py @@ -0,0 +1,43 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from botocore.docs.utils import py_type_name + +from boto3.docs.utils import get_identifier_description + + +def document_attribute(section, attr_name, attr_model, include_signature=True): + if include_signature: + section.style.start_sphinx_py_attr(attr_name) + attr_type = '*(%s)* ' % py_type_name(attr_model.type_name) + section.write(attr_type) + section.include_doc_string(attr_model.documentation) + + +def document_identifier(section, resource_name, identifier_model, + include_signature=True): + if include_signature: + section.style.start_sphinx_py_attr(identifier_model.name) + description = get_identifier_description( + resource_name, identifier_model.name) + description = '*(string)* ' + description + section.write(description) + + +def document_reference(section, reference_model, include_signature=True): + if include_signature: + section.style.start_sphinx_py_attr(reference_model.name) + reference_type = '(:py:class:`%s`) ' % reference_model.resource.type + section.write(reference_type) + section.include_doc_string( + 'The related %s if set, otherwise ``None``.' % reference_model.name + ) diff --git a/boto3/docs/base.py b/boto3/docs/base.py new file mode 100644 index 0000000..1ebd422 --- /dev/null +++ b/boto3/docs/base.py @@ -0,0 +1,31 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from botocore.compat import OrderedDict + + +class BaseDocumenter(object): + def __init__(self, resource): + self._resource = resource + self._client = self._resource.meta.client + self._resource_model = self._resource.meta.resource_model + self._service_model = self._client.meta.service_model + self._resource_name = self._resource.meta.resource_model.name + self._service_name = self._service_model.service_name + self._service_docs_name = self._client.__class__.__name__ + self.member_map = OrderedDict() + self.represents_service_resource = ( + self._service_name == self._resource_name) + + @property + def class_name(self): + return '%s.%s' % (self._service_docs_name, self._resource_name) diff --git a/boto3/docs/client.py b/boto3/docs/client.py new file mode 100644 index 0000000..7db8c1f --- /dev/null +++ b/boto3/docs/client.py @@ -0,0 +1,27 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from botocore.docs.client import ClientDocumenter + + +class Boto3ClientDocumenter(ClientDocumenter): + def _add_client_creation_example(self, section): + section.style.start_codeblock() + section.style.new_line() + section.write('import boto3') + section.style.new_line() + section.style.new_line() + section.write( + 'client = boto3.client(\'{service}\')'.format( + service=self._service_name) + ) + section.style.end_codeblock() diff --git a/boto3/docs/collection.py b/boto3/docs/collection.py new file mode 100644 index 0000000..1825ec2 --- /dev/null +++ b/boto3/docs/collection.py @@ -0,0 +1,234 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from botocore import xform_name +from botocore.docs.method import get_instance_public_methods +from botocore.docs.utils import DocumentedShape + +from boto3.docs.base import BaseDocumenter +from boto3.docs.utils import get_resource_ignore_params +from boto3.docs.method import document_model_driven_resource_method +from boto3.docs.utils import add_resource_type_overview + + +class CollectionDocumenter(BaseDocumenter): + def document_collections(self, section): + collections = self._resource.meta.resource_model.collections + collections_list = [] + add_resource_type_overview( + section=section, + resource_type='Collections', + description=( + 'Collections provide an interface to iterate over and ' + 'manipulate groups of resources. '), + intro_link='guide_collections') + self.member_map['collections'] = collections_list + for collection in collections: + collection_section = section.add_new_section(collection.name) + collections_list.append(collection.name) + self._document_collection(collection_section, collection) + + def _document_collection(self, section, collection): + methods = get_instance_public_methods( + getattr(self._resource, collection.name)) + document_collection_object(section, collection) + batch_actions = {} + for batch_action in collection.batch_actions: + batch_actions[batch_action.name] = batch_action + + for method in sorted(methods): + method_section = section.add_new_section(method) + if method in batch_actions: + document_batch_action( + section=method_section, + resource_name=self._resource_name, + event_emitter=self._resource.meta.client.meta.events, + batch_action_model=batch_actions[method], + collection_model=collection, + service_model=self._resource.meta.client.meta.service_model + ) + else: + document_collection_method( + section=method_section, + resource_name=self._resource_name, + action_name=method, + event_emitter=self._resource.meta.client.meta.events, + collection_model=collection, + service_model=self._resource.meta.client.meta.service_model + ) + + +def document_collection_object(section, collection_model, + include_signature=True): + """Documents a collection resource object + + :param section: The section to write to + + :param collection_model: The model of the collection + + :param include_signature: Whether or not to include the signature. + It is useful for generating docstrings. + """ + if include_signature: + section.style.start_sphinx_py_attr(collection_model.name) + section.include_doc_string( + 'A collection of %s resources' % collection_model.resource.type) + + +def document_batch_action(section, resource_name, event_emitter, + batch_action_model, service_model, collection_model, + include_signature=True): + """Documents a collection's batch action + + :param section: The section to write to + + :param resource_name: The name of the resource + + :param action_name: The name of collection action. Currently only + can be all, filter, limit, or page_size + + :param event_emitter: The event emitter to use to emit events + + :param batch_action_model: The model of the batch action + + :param collection_model: The model of the collection + + :param service_model: The model of the service + + :param include_signature: Whether or not to include the signature. + It is useful for generating docstrings. + """ + operation_model = service_model.operation_model( + batch_action_model.request.operation) + ignore_params = get_resource_ignore_params( + batch_action_model.request.params) + + example_return_value = 'response' + if batch_action_model.resource: + example_return_value = xform_name(batch_action_model.resource.type) + + example_resource_name = xform_name(resource_name) + if service_model.service_name == resource_name: + example_resource_name = resource_name + example_prefix = '%s = %s.%s.%s' % ( + example_return_value, example_resource_name, + collection_model.name, batch_action_model.name + ) + document_model_driven_resource_method( + section=section, method_name=batch_action_model.name, + operation_model=operation_model, + event_emitter=event_emitter, + method_description=operation_model.documentation, + example_prefix=example_prefix, + exclude_input=ignore_params, + resource_action_model=batch_action_model, + include_signature=include_signature + ) + + +def document_collection_method(section, resource_name, action_name, + event_emitter, collection_model, service_model, + include_signature=True): + """Documents a collection method + + :param section: The section to write to + + :param resource_name: The name of the resource + + :param action_name: The name of collection action. Currently only + can be all, filter, limit, or page_size + + :param event_emitter: The event emitter to use to emit events + + :param collection_model: The model of the collection + + :param service_model: The model of the service + + :param include_signature: Whether or not to include the signature. + It is useful for generating docstrings. + """ + operation_model = service_model.operation_model( + collection_model.request.operation) + + underlying_operation_members = [] + if operation_model.input_shape: + underlying_operation_members = operation_model.input_shape.members + + example_resource_name = xform_name(resource_name) + if service_model.service_name == resource_name: + example_resource_name = resource_name + + custom_action_info_dict = { + 'all': { + 'method_description': ( + 'Creates an iterable of all %s resources ' + 'in the collection.' % collection_model.resource.type), + 'example_prefix': '%s_iterator = %s.%s.all' % ( + xform_name(collection_model.resource.type), + example_resource_name, collection_model.name), + 'exclude_input': underlying_operation_members + }, + 'filter': { + 'method_description': ( + 'Creates an iterable of all %s resources ' + 'in the collection filtered by kwargs passed to ' + 'method.' % collection_model.resource.type), + 'example_prefix': '%s_iterator = %s.%s.filter' % ( + xform_name(collection_model.resource.type), + example_resource_name, collection_model.name), + 'exclude_input': get_resource_ignore_params( + collection_model.request.params) + }, + 'limit': { + 'method_description': ( + 'Creates an iterable up to a specified amount of ' + '%s resources in the collection.' % + collection_model.resource.type), + 'example_prefix': '%s_iterator = %s.%s.limit' % ( + xform_name(collection_model.resource.type), + example_resource_name, collection_model.name), + 'include_input': [ + DocumentedShape( + name='count', type_name='integer', + documentation=( + 'The limit to the number of resources ' + 'in the iterable.'))], + 'exclude_input': underlying_operation_members + }, + 'page_size': { + 'method_description': ( + 'Creates an iterable of all %s resources ' + 'in the collection, but limits the number of ' + 'items returned by each service call by the specified ' + 'amount.' % collection_model.resource.type), + 'example_prefix': '%s_iterator = %s.%s.page_size' % ( + xform_name(collection_model.resource.type), + example_resource_name, collection_model.name), + 'include_input': [ + DocumentedShape( + name='count', type_name='integer', + documentation=( + 'The number of items returned by each ' + 'service call'))], + 'exclude_input': underlying_operation_members + } + } + if action_name in custom_action_info_dict: + action_info = custom_action_info_dict[action_name] + document_model_driven_resource_method( + section=section, method_name=action_name, + operation_model=operation_model, + event_emitter=event_emitter, + resource_action_model=collection_model, + include_signature=include_signature, + **action_info + ) diff --git a/boto3/docs/docstring.py b/boto3/docs/docstring.py new file mode 100644 index 0000000..9dd4d39 --- /dev/null +++ b/boto3/docs/docstring.py @@ -0,0 +1,74 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from botocore.docs.docstring import LazyLoadedDocstring + +from boto3.docs.action import document_action +from boto3.docs.action import document_load_reload_action +from boto3.docs.subresource import document_sub_resource +from boto3.docs.attr import document_attribute +from boto3.docs.attr import document_identifier +from boto3.docs.attr import document_reference +from boto3.docs.collection import document_collection_object +from boto3.docs.collection import document_collection_method +from boto3.docs.collection import document_batch_action +from boto3.docs.waiter import document_resource_waiter + + +class ActionDocstring(LazyLoadedDocstring): + def _write_docstring(self, *args, **kwargs): + document_action(*args, **kwargs) + + +class LoadReloadDocstring(LazyLoadedDocstring): + def _write_docstring(self, *args, **kwargs): + document_load_reload_action(*args, **kwargs) + + +class SubResourceDocstring(LazyLoadedDocstring): + def _write_docstring(self, *args, **kwargs): + document_sub_resource(*args, **kwargs) + + +class AttributeDocstring(LazyLoadedDocstring): + def _write_docstring(self, *args, **kwargs): + document_attribute(*args, **kwargs) + + +class IdentifierDocstring(LazyLoadedDocstring): + def _write_docstring(self, *args, **kwargs): + document_identifier(*args, **kwargs) + + +class ReferenceDocstring(LazyLoadedDocstring): + def _write_docstring(self, *args, **kwargs): + document_reference(*args, **kwargs) + + +class CollectionDocstring(LazyLoadedDocstring): + def _write_docstring(self, *args, **kwargs): + document_collection_object(*args, **kwargs) + + +class CollectionMethodDocstring(LazyLoadedDocstring): + def _write_docstring(self, *args, **kwargs): + document_collection_method(*args, **kwargs) + + +class BatchActionDocstring(LazyLoadedDocstring): + def _write_docstring(self, *args, **kwargs): + document_batch_action(*args, **kwargs) + + +class ResourceWaiterDocstring(LazyLoadedDocstring): + def _write_docstring(self, *args, **kwargs): + document_resource_waiter(*args, **kwargs) diff --git a/boto3/docs/method.py b/boto3/docs/method.py new file mode 100644 index 0000000..fcc398c --- /dev/null +++ b/boto3/docs/method.py @@ -0,0 +1,71 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from botocore.docs.method import document_model_driven_method + + +def document_model_driven_resource_method( + section, method_name, operation_model, event_emitter, + method_description=None, example_prefix=None, include_input=None, + include_output=None, exclude_input=None, exclude_output=None, + document_output=True, resource_action_model=None, + include_signature=True): + + document_model_driven_method( + section=section, method_name=method_name, + operation_model=operation_model, + event_emitter=event_emitter, + method_description=method_description, + example_prefix=example_prefix, + include_input=include_input, + include_output=include_output, + exclude_input=exclude_input, + exclude_output=exclude_output, + document_output=document_output, + include_signature=include_signature + ) + + # If this action returns a resource modify the return example to + # appropriately reflect that. + if resource_action_model.resource: + if 'return' in section.available_sections: + section.delete_section('return') + resource_type = resource_action_model.resource.type + + new_return_section = section.add_new_section('return') + return_resource_type = '%s.%s' % ( + operation_model.service_model.service_name, + resource_type) + + return_type = ':py:class:`%s`' % return_resource_type + return_description = '%s resource' % (resource_type) + + if _method_returns_resource_list(resource_action_model.resource): + return_type = 'list(%s)' % return_type + return_description = 'A list of %s resources' % ( + resource_type) + + new_return_section.style.new_line() + new_return_section.write( + ':rtype: %s' % return_type) + new_return_section.style.new_line() + new_return_section.write( + ':returns: %s' % return_description) + new_return_section.style.new_line() + + +def _method_returns_resource_list(resource): + for identifier in resource.identifiers: + if identifier.path and '[]' in identifier.path: + return True + + return False diff --git a/boto3/docs/resource.py b/boto3/docs/resource.py new file mode 100644 index 0000000..cc95819 --- /dev/null +++ b/boto3/docs/resource.py @@ -0,0 +1,256 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from botocore import xform_name +from botocore.docs.utils import get_official_service_name + +from boto3.docs.base import BaseDocumenter +from boto3.docs.action import ActionDocumenter +from boto3.docs.waiter import WaiterResourceDocumenter +from boto3.docs.collection import CollectionDocumenter +from boto3.docs.subresource import SubResourceDocumenter +from boto3.docs.attr import document_attribute +from boto3.docs.attr import document_identifier +from boto3.docs.attr import document_reference +from boto3.docs.utils import get_identifier_args_for_signature +from boto3.docs.utils import get_identifier_values_for_example +from boto3.docs.utils import get_identifier_description +from boto3.docs.utils import add_resource_type_overview + + +class ResourceDocumenter(BaseDocumenter): + def __init__(self, resource, botocore_session): + super(ResourceDocumenter, self).__init__(resource) + self._botocore_session = botocore_session + + def document_resource(self, section): + self._add_title(section) + self._add_intro(section) + overview_section = section.add_new_section('member-overview') + self._add_identifiers(section) + self._add_attributes(section) + self._add_references(section) + self._add_actions(section) + self._add_sub_resources(section) + self._add_collections(section) + self._add_waiters(section) + self._add_overview_of_members(overview_section) + + def _add_title(self, section): + section.style.h2(self._resource_name) + + def _add_intro(self, section): + identifier_names = [] + if self._resource_model.identifiers: + for identifier in self._resource_model.identifiers: + identifier_names.append(identifier.name) + + # Write out the class signature. + class_args = get_identifier_args_for_signature(identifier_names) + section.style.start_sphinx_py_class( + class_name='%s(%s)' % (self.class_name, class_args)) + + # Add as short description about the resource + description_section = section.add_new_section('description') + self._add_description(description_section) + + # Add an example of how to instantiate the resource + example_section = section.add_new_section('example') + self._add_example(example_section, identifier_names) + + # Add the description for the parameters to instantiate the + # resource. + param_section = section.add_new_section('params') + self._add_params_description(param_section, identifier_names) + + def _add_description(self, section): + official_service_name = get_official_service_name( + self._service_model) + section.write( + 'A resource representing an %s %s' % ( + official_service_name, self._resource_name)) + + def _add_example(self, section, identifier_names): + section.style.start_codeblock() + section.style.new_line() + section.write('import boto3') + section.style.new_line() + section.style.new_line() + section.write( + '%s = boto3.resource(\'%s\')' % ( + self._service_name, self._service_name) + ) + section.style.new_line() + example_values = get_identifier_values_for_example(identifier_names) + section.write( + '%s = %s.%s(%s)' % ( + xform_name(self._resource_name), self._service_name, + self._resource_name, example_values)) + section.style.end_codeblock() + + def _add_params_description(self, section, identifier_names): + for identifier_name in identifier_names: + description = get_identifier_description( + self._resource_name, identifier_name) + section.write(':type %s: string' % identifier_name) + section.style.new_line() + section.write(':param %s: %s' % ( + identifier_name, description)) + section.style.new_line() + + def _add_overview_of_members(self, section): + for resource_member_type in self.member_map: + section.style.new_line() + section.write('These are the resource\'s available %s:' % ( + resource_member_type)) + section.style.new_line() + for member in self.member_map[resource_member_type]: + if resource_member_type in ['identifiers', 'attributes', + 'references', 'collections']: + section.style.li(':py:attr:`%s`' % member) + else: + section.style.li(':py:meth:`%s()`' % member) + + def _add_identifiers(self, section): + identifiers = self._resource.meta.resource_model.identifiers + section = section.add_new_section('identifiers') + member_list = [] + if identifiers: + self.member_map['identifiers'] = member_list + add_resource_type_overview( + section=section, + resource_type='Identifiers', + description=( + 'Identifiers are properties of a resource that are ' + 'set upon instantation of the resource.'), + intro_link='identifiers_attributes_intro') + for identifier in identifiers: + identifier_section = section.add_new_section(identifier.name) + member_list.append(identifier.name) + document_identifier( + section=identifier_section, + resource_name=self._resource_name, + identifier_model=identifier + ) + + def _add_attributes(self, section): + service_model = self._resource.meta.client.meta.service_model + attributes = {} + if self._resource.meta.resource_model.shape: + shape = service_model.shape_for( + self._resource.meta.resource_model.shape) + attributes = self._resource.meta.resource_model.get_attributes( + shape) + section = section.add_new_section('attributes') + attribute_list = [] + if attributes: + add_resource_type_overview( + section=section, + resource_type='Attributes', + description=( + 'Attributes provide access' + ' to the properties of a resource. Attributes are lazy-' + 'loaded the first time one is accessed via the' + ' :py:meth:`load` method.'), + intro_link='identifiers_attributes_intro') + self.member_map['attributes'] = attribute_list + for attr_name in sorted(attributes): + _, attr_shape = attributes[attr_name] + attribute_section = section.add_new_section(attr_name) + attribute_list.append(attr_name) + document_attribute( + section=attribute_section, + attr_name=attr_name, + attr_model=attr_shape + ) + + def _add_references(self, section): + section = section.add_new_section('references') + references = self._resource.meta.resource_model.references + reference_list = [] + if references: + add_resource_type_overview( + section=section, + resource_type='References', + description=( + 'References are related resource instances that have ' + 'a belongs-to relationship.'), + intro_link='references_intro') + self.member_map['references'] = reference_list + for reference in references: + reference_section = section.add_new_section(reference.name) + reference_list.append(reference.name) + document_reference( + section=reference_section, + reference_model=reference + ) + + def _add_actions(self, section): + section = section.add_new_section('actions') + actions = self._resource.meta.resource_model.actions + if actions: + documenter = ActionDocumenter(self._resource) + documenter.member_map = self.member_map + documenter.document_actions(section) + + def _add_sub_resources(self, section): + section = section.add_new_section('sub-resources') + sub_resources = self._resource.meta.resource_model.subresources + if sub_resources: + documenter = SubResourceDocumenter(self._resource) + documenter.member_map = self.member_map + documenter.document_sub_resources(section) + + def _add_collections(self, section): + section = section.add_new_section('collections') + collections = self._resource.meta.resource_model.collections + if collections: + documenter = CollectionDocumenter(self._resource) + documenter.member_map = self.member_map + documenter.document_collections(section) + + def _add_waiters(self, section): + section = section.add_new_section('waiters') + waiters = self._resource.meta.resource_model.waiters + if waiters: + service_waiter_model = self._botocore_session.get_waiter_model( + self._service_name) + documenter = WaiterResourceDocumenter( + self._resource, service_waiter_model) + documenter.member_map = self.member_map + documenter.document_resource_waiters(section) + + +class ServiceResourceDocumenter(ResourceDocumenter): + @property + def class_name(self): + return '%s.ServiceResource' % self._service_docs_name + + def _add_title(self, section): + section.style.h2('Service Resource') + + def _add_description(self, section): + official_service_name = get_official_service_name( + self._service_model) + section.write( + 'A resource representing %s' % official_service_name) + + def _add_example(self, section, identifier_names): + section.style.start_codeblock() + section.style.new_line() + section.write('import boto3') + section.style.new_line() + section.style.new_line() + section.write( + '%s = boto3.resource(\'%s\')' % ( + self._service_name, self._service_name)) + section.style.end_codeblock() diff --git a/boto3/docs/service.py b/boto3/docs/service.py new file mode 100644 index 0000000..edea4ea --- /dev/null +++ b/boto3/docs/service.py @@ -0,0 +1,122 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from botocore.exceptions import DataNotFoundError +from botocore.docs.paginator import PaginatorDocumenter +from botocore.docs.waiter import WaiterDocumenter +from botocore.docs.bcdoc.restdoc import DocumentStructure + +from boto3.utils import ServiceContext +from boto3.docs.client import Boto3ClientDocumenter +from boto3.docs.resource import ResourceDocumenter +from boto3.docs.resource import ServiceResourceDocumenter + + +class ServiceDocumenter(object): + def __init__(self, service_name, session): + self._service_name = service_name + self._session = session + # I know that this is an internal attribute, but the botocore session + # is needed to load the paginator and waiter models. + self._botocore_session = session._session + self._client = self._session.client(service_name) + self._service_resource = None + if self._service_name in self._session.get_available_resources(): + self._service_resource = self._session.resource(service_name) + self.sections = [ + 'title', + 'table-of-contents', + 'client', + 'paginators', + 'waiters', + 'service-resource', + 'resources' + ] + + def document_service(self): + """Documents an entire service. + + :returns: The reStructured text of the documented service. + """ + doc_structure = DocumentStructure( + self._service_name, section_names=self.sections, + target='html') + self._document_title(doc_structure.get_section('title')) + self._document_table_of_contents( + doc_structure.get_section('table-of-contents')) + self._document_client(doc_structure.get_section('client')) + self._document_paginators(doc_structure.get_section('paginators')) + self._document_waiters(doc_structure.get_section('waiters')) + if self._service_resource: + self._document_service_resource( + doc_structure.get_section('service-resource')) + self._document_resources(doc_structure.get_section('resources')) + return doc_structure.flush_structure() + + def _document_title(self, section): + section.style.h1(self._client.__class__.__name__) + + def _document_table_of_contents(self, section): + section.style.table_of_contents(title='Table of Contents', depth=2) + + def _document_client(self, section): + Boto3ClientDocumenter(self._client).document_client(section) + + def _document_paginators(self, section): + try: + paginator_model = self._botocore_session.get_paginator_model( + self._service_name) + except DataNotFoundError: + return + paginator_documenter = PaginatorDocumenter( + self._client, paginator_model) + paginator_documenter.document_paginators(section) + + def _document_waiters(self, section): + if self._client.waiter_names: + service_waiter_model = self._botocore_session.get_waiter_model( + self._service_name) + waiter_documenter = WaiterDocumenter( + self._client, service_waiter_model) + waiter_documenter.document_waiters(section) + + def _document_service_resource(self, section): + ServiceResourceDocumenter( + self._service_resource, self._botocore_session).document_resource( + section) + + def _document_resources(self, section): + temp_identifier_value = 'foo' + loader = self._botocore_session.get_component('data_loader') + json_resource_model = loader.load_service_model( + self._service_name, 'resources-1') + service_model = self._service_resource.meta.client.meta.service_model + for resource_name in json_resource_model['resources']: + resource_model = json_resource_model['resources'][resource_name] + resource_cls = self._session.resource_factory.load_from_definition( + resource_name=resource_name, + single_resource_json_definition=resource_model, + service_context=ServiceContext( + service_name=self._service_name, + resource_json_definitions=json_resource_model['resources'], + service_model=service_model, + service_waiter_model=None + ) + ) + identifiers = resource_cls.meta.resource_model.identifiers + args = [] + for _ in identifiers: + args.append(temp_identifier_value) + resource = resource_cls(*args, client=self._client) + ResourceDocumenter( + resource, self._botocore_session).document_resource( + section.add_new_section(resource.meta.resource_model.name)) diff --git a/boto3/docs/subresource.py b/boto3/docs/subresource.py new file mode 100644 index 0000000..7241606 --- /dev/null +++ b/boto3/docs/subresource.py @@ -0,0 +1,112 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from botocore import xform_name +from botocore.utils import get_service_module_name + +from boto3.docs.base import BaseDocumenter +from boto3.docs.utils import get_identifier_args_for_signature +from boto3.docs.utils import get_identifier_values_for_example +from boto3.docs.utils import get_identifier_description +from boto3.docs.utils import add_resource_type_overview + + +class SubResourceDocumenter(BaseDocumenter): + def document_sub_resources(self, section): + add_resource_type_overview( + section=section, + resource_type='Sub-resources', + description=( + 'Sub-resources are methods that create a new instance of a' + ' child resource. This resource\'s identifiers get passed' + ' along to the child.'), + intro_link='subresources_intro') + sub_resources = sorted( + self._resource.meta.resource_model.subresources, + key=lambda sub_resource: sub_resource.name + ) + sub_resources_list = [] + self.member_map['sub-resources'] = sub_resources_list + for sub_resource in sub_resources: + sub_resource_section = section.add_new_section(sub_resource.name) + sub_resources_list.append(sub_resource.name) + document_sub_resource( + section=sub_resource_section, + resource_name=self._resource_name, + sub_resource_model=sub_resource, + service_model=self._service_model + ) + + +def document_sub_resource(section, resource_name, sub_resource_model, + service_model, include_signature=True): + """Documents a resource action + + :param section: The section to write to + + :param resource_name: The name of the resource + + :param sub_resource_model: The model of the subresource + + :param service_model: The model of the service + + :param include_signature: Whether or not to include the signature. + It is useful for generating docstrings. + """ + identifiers_needed = [] + for identifier in sub_resource_model.resource.identifiers: + if identifier.source == 'input': + identifiers_needed.append(xform_name(identifier.target)) + + if include_signature: + signature_args = get_identifier_args_for_signature(identifiers_needed) + section.style.start_sphinx_py_method( + sub_resource_model.name, signature_args) + + method_intro_section = section.add_new_section( + 'method-intro') + description = 'Creates a %s resource.' % sub_resource_model.resource.type + method_intro_section.include_doc_string(description) + example_section = section.add_new_section('example') + example_values = get_identifier_values_for_example(identifiers_needed) + example_resource_name = xform_name(resource_name) + if service_model.service_name == resource_name: + example_resource_name = resource_name + example = '%s = %s.%s(%s)' % ( + xform_name(sub_resource_model.resource.type), + example_resource_name, + sub_resource_model.name, example_values + ) + example_section.style.start_codeblock() + example_section.write(example) + example_section.style.end_codeblock() + + param_section = section.add_new_section('params') + for identifier in identifiers_needed: + description = get_identifier_description( + sub_resource_model.name, identifier) + param_section.write(':type %s: string' % identifier) + param_section.style.new_line() + param_section.write(':param %s: %s' % ( + identifier, description)) + param_section.style.new_line() + + return_section = section.add_new_section('return') + return_section.style.new_line() + return_section.write( + ':rtype: :py:class:`%s.%s`' % ( + get_service_module_name(service_model), + sub_resource_model.resource.type)) + return_section.style.new_line() + return_section.write( + ':returns: A %s resource' % sub_resource_model.resource.type) + return_section.style.new_line() diff --git a/boto3/docs/utils.py b/boto3/docs/utils.py new file mode 100644 index 0000000..8c96a12 --- /dev/null +++ b/boto3/docs/utils.py @@ -0,0 +1,142 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +import inspect + +import jmespath + +from botocore.compat import six + + +def get_resource_ignore_params(params): + """Helper method to determine which parameters to ignore for actions + + :returns: A list of the parameter names that does not need to be + included in a resource's method call for documentation purposes. + """ + ignore_params = [] + for param in params: + result = jmespath.compile(param.target) + current = result.parsed + # Use JMESPath to find the left most element in the target expression + # which will be the parameter to ignore in the action call. + while current['children']: + current = current['children'][0] + # Make sure the parameter we are about to ignore is a field. + # If it is not, we should ignore the result to avoid false positives. + if current['type'] == 'field': + ignore_params.append(current['value']) + return ignore_params + + +def is_resource_action(action_handle): + if six.PY3: + return inspect.isfunction(action_handle) + else: + return inspect.ismethod(action_handle) + + +def get_resource_public_actions(resource_class): + resource_class_members = inspect.getmembers(resource_class) + resource_methods = {} + for name, member in resource_class_members: + if not name.startswith('_'): + if not name[0].isupper(): + if not name.startswith('wait_until'): + if is_resource_action(member): + resource_methods[name] = member + return resource_methods + + +def get_identifier_values_for_example(identifier_names): + example_values = ['\'%s\'' % identifier for identifier in identifier_names] + return ','.join(example_values) + + +def get_identifier_args_for_signature(identifier_names): + return ','.join(identifier_names) + + +def get_identifier_description(resource_name, identifier_name): + return "The %s's %s identifier. This **must** be set." % ( + resource_name, identifier_name) + + +def add_resource_type_overview(section, resource_type, description, + intro_link=None): + section.style.new_line() + section.write('.. rst-class:: admonition-title') + section.style.new_line() + section.style.new_line() + section.write(resource_type) + section.style.new_line() + section.style.new_line() + section.write(description) + section.style.new_line() + if intro_link is not None: + section.write('For more information about %s refer to the ' + ':ref:`Resources Introduction Guide<%s>`.' % ( + resource_type.lower(), intro_link)) + section.style.new_line() + + +class DocumentModifiedShape(object): + def __init__(self, shape_name, new_type, new_description, + new_example_value): + self._shape_name = shape_name + self._new_type = new_type + self._new_description = new_description + self._new_example_value = new_example_value + + def replace_documentation_for_matching_shape(self, event_name, section, + **kwargs): + if self._shape_name == section.context.get('shape'): + self._replace_documentation(event_name, section) + for section_name in section.available_sections: + sub_section = section.get_section(section_name) + if self._shape_name == sub_section.context.get('shape'): + self._replace_documentation(event_name, sub_section) + else: + self.replace_documentation_for_matching_shape( + event_name, sub_section) + + def _replace_documentation(self, event_name, section): + if event_name.startswith('docs.request-example') or \ + event_name.startswith('docs.response-example'): + section.remove_all_sections() + section.clear_text() + section.write(self._new_example_value) + + if event_name.startswith('docs.request-params') or \ + event_name.startswith('docs.response-params'): + for section_name in section.available_sections: + # Delete any extra members as a new shape is being + # used. + if section_name not in ['param-name', 'param-documentation', + 'end-structure', 'param-type', + 'end-param']: + section.delete_section(section_name) + + # Update the documentation + description_section = section.get_section('param-documentation') + description_section.clear_text() + description_section.write(self._new_description) + + # Update the param type + type_section = section.get_section('param-type') + if type_section.getvalue().decode('utf-8').startswith(':type'): + type_section.clear_text() + type_section.write(':type %s: %s' % ( + section.name, self._new_type)) + else: + type_section.clear_text() + type_section.style.italics('(%s) -- ' % self._new_type) diff --git a/boto3/docs/waiter.py b/boto3/docs/waiter.py new file mode 100644 index 0000000..da7e776 --- /dev/null +++ b/boto3/docs/waiter.py @@ -0,0 +1,91 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from botocore import xform_name +from botocore.utils import get_service_module_name +from botocore.docs.method import document_model_driven_method + +from boto3.docs.base import BaseDocumenter +from boto3.docs.utils import get_resource_ignore_params +from boto3.docs.utils import add_resource_type_overview + + +class WaiterResourceDocumenter(BaseDocumenter): + def __init__(self, resource, service_waiter_model): + super(WaiterResourceDocumenter, self).__init__(resource) + self._service_waiter_model = service_waiter_model + + def document_resource_waiters(self, section): + waiters = self._resource.meta.resource_model.waiters + add_resource_type_overview( + section=section, + resource_type='Waiters', + description=( + 'Waiters provide an interface to wait for a resource' + ' to reach a specific state.'), + intro_link='waiters_intro') + waiter_list = [] + self.member_map['waiters'] = waiter_list + for waiter in waiters: + waiter_section = section.add_new_section(waiter.name) + waiter_list.append(waiter.name) + document_resource_waiter( + section=waiter_section, + resource_name=self._resource_name, + event_emitter=self._resource.meta.client.meta.events, + service_model=self._service_model, + resource_waiter_model=waiter, + service_waiter_model=self._service_waiter_model + ) + + +def document_resource_waiter(section, resource_name, event_emitter, + service_model, resource_waiter_model, + service_waiter_model, include_signature=True): + waiter_model = service_waiter_model.get_waiter( + resource_waiter_model.waiter_name) + operation_model = service_model.operation_model( + waiter_model.operation) + + ignore_params = get_resource_ignore_params(resource_waiter_model.params) + service_module_name = get_service_module_name(service_model) + description = ( + 'Waits until this %s is %s. This method calls ' + ':py:meth:`%s.Waiter.%s.wait` which polls. ' + ':py:meth:`%s.Client.%s` every %s seconds until ' + 'a successful state is reached. An error is returned ' + 'after %s failed checks.' % ( + resource_name, ' '.join(resource_waiter_model.name.split('_')[2:]), + service_module_name, + xform_name(resource_waiter_model.waiter_name), + service_module_name, + xform_name(waiter_model.operation), + waiter_model.delay, waiter_model.max_attempts)) + example_prefix = '%s.%s' % ( + xform_name(resource_name), resource_waiter_model.name) + document_model_driven_method( + section=section, method_name=resource_waiter_model.name, + operation_model=operation_model, + event_emitter=event_emitter, + example_prefix=example_prefix, + method_description=description, + exclude_input=ignore_params, + include_signature=include_signature + ) + if 'return' in section.available_sections: + # Waiters do not return anything so we should remove + # any sections that may document the underlying return + # value of the client method. + return_section = section.get_section('return') + return_section.clear_text() + return_section.remove_all_sections() + return_section.write(':returns: None') diff --git a/boto3/dynamodb/__init__.py b/boto3/dynamodb/__init__.py new file mode 100644 index 0000000..c89416d --- /dev/null +++ b/boto3/dynamodb/__init__.py @@ -0,0 +1,12 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. diff --git a/boto3/dynamodb/conditions.py b/boto3/dynamodb/conditions.py new file mode 100644 index 0000000..2ec2a6b --- /dev/null +++ b/boto3/dynamodb/conditions.py @@ -0,0 +1,416 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from collections import namedtuple +import functools +import re + +from boto3.exceptions import DynamoDBOperationNotSupportedError +from boto3.exceptions import DynamoDBNeedsConditionError +from boto3.exceptions import DynamoDBNeedsKeyConditionError + + +ATTR_NAME_REGEX = re.compile(r'[^.\[\]]+(?![^\[]*\])') + + +class ConditionBase(object): + + expression_format = '' + expression_operator = '' + has_grouped_values = False + + def __init__(self, *values): + self._values = values + + def __and__(self, other): + if not isinstance(other, ConditionBase): + raise DynamoDBOperationNotSupportedError('AND', other) + return And(self, other) + + def __or__(self, other): + if not isinstance(other, ConditionBase): + raise DynamoDBOperationNotSupportedError('OR', other) + return Or(self, other) + + def __invert__(self): + return Not(self) + + def get_expression(self): + return {'format': self.expression_format, + 'operator': self.expression_operator, + 'values': self._values} + + def __eq__(self, other): + if isinstance(other, type(self)): + if self._values == other._values: + return True + return False + + def __ne__(self, other): + return not self.__eq__(other) + + +class AttributeBase(object): + def __init__(self, name): + self.name = name + + def __and__(self, value): + raise DynamoDBOperationNotSupportedError('AND', self) + + def __or__(self, value): + raise DynamoDBOperationNotSupportedError('OR', self) + + def __invert__(self): + raise DynamoDBOperationNotSupportedError('NOT', self) + + def eq(self, value): + """Creates a condtion where the attribute is equal to the value. + + :param value: The value that the attribute is equal to. + """ + return Equals(self, value) + + def lt(self, value): + """Creates a condtion where the attribute is less than the value. + + :param value: The value that the attribute is less than. + """ + return LessThan(self, value) + + def lte(self, value): + """Creates a condtion where the attribute is less than or equal to the + value. + + :param value: The value that the attribute is less than or equal to. + """ + return LessThanEquals(self, value) + + def gt(self, value): + """Creates a condtion where the attribute is greater than the value. + + :param value: The value that the attribute is greater than. + """ + return GreaterThan(self, value) + + def gte(self, value): + """Creates a condtion where the attribute is greater than or equal to + the value. + + :param value: The value that the attribute is greater than or equal to. + """ + return GreaterThanEquals(self, value) + + def begins_with(self, value): + """Creates a condtion where the attribute begins with the value. + + :param value: The value that the attribute begins with. + """ + return BeginsWith(self, value) + + def between(self, low_value, high_value): + """Creates a condtion where the attribute is between the low value and + the high value. + + :param low_value: The value that the attribute is greater than. + :param high_value: The value that the attribute is less than. + """ + return Between(self, low_value, high_value) + + +class ConditionAttributeBase(ConditionBase, AttributeBase): + """This base class is for conditions that can have attribute methods. + + One example is the Size condition. To complete a condition, you need + to apply another AttributeBase method like eq(). + """ + def __init__(self, *values): + ConditionBase.__init__(self, *values) + # This is assuming the first value to the condition is the attribute + # in which can be used to generate its attribute base. + AttributeBase.__init__(self, values[0].name) + + +class ComparisonCondition(ConditionBase): + expression_format = '{0} {operator} {1}' + + +class Equals(ComparisonCondition): + expression_operator = '=' + + +class NotEquals(ComparisonCondition): + expression_operator = '<>' + + +class LessThan(ComparisonCondition): + expression_operator = '<' + + +class LessThanEquals(ComparisonCondition): + expression_operator = '<=' + + +class GreaterThan(ComparisonCondition): + expression_operator = '>' + + +class GreaterThanEquals(ComparisonCondition): + expression_operator = '>=' + + +class In(ComparisonCondition): + expression_operator = 'IN' + has_grouped_values = True + + +class Between(ConditionBase): + expression_operator = 'BETWEEN' + expression_format = '{0} {operator} {1} AND {2}' + + +class BeginsWith(ConditionBase): + expression_operator = 'begins_with' + expression_format = '{operator}({0}, {1})' + + +class Contains(ConditionBase): + expression_operator = 'contains' + expression_format = '{operator}({0}, {1})' + + +class Size(ConditionAttributeBase): + expression_operator = 'size' + expression_format = '{operator}({0})' + + +class AttributeType(ConditionBase): + expression_operator = 'attribute_type' + expression_format = '{operator}({0}, {1})' + + +class AttributeExists(ConditionBase): + expression_operator = 'attribute_exists' + expression_format = '{operator}({0})' + + +class AttributeNotExists(ConditionBase): + expression_operator = 'attribute_not_exists' + expression_format = '{operator}({0})' + + +class And(ConditionBase): + expression_operator = 'AND' + expression_format = '({0} {operator} {1})' + + +class Or(ConditionBase): + expression_operator = 'OR' + expression_format = '({0} {operator} {1})' + + +class Not(ConditionBase): + expression_operator = 'NOT' + expression_format = '({operator} {0})' + + +class Key(AttributeBase): + pass + + +class Attr(AttributeBase): + """Represents an DynamoDB item's attribute.""" + def ne(self, value): + """Creates a condtion where the attribute is not equal to the value + + :param value: The value that the attribute is not equal to. + """ + return NotEquals(self, value) + + def is_in(self, value): + """Creates a condtion where the attribute is in the value, + + :type value: list + :param value: The value that the attribute is in. + """ + return In(self, value) + + def exists(self): + """Creates a condtion where the attribute exists.""" + return AttributeExists(self) + + def not_exists(self): + """Creates a condtion where the attribute does not exist.""" + return AttributeNotExists(self) + + def contains(self, value): + """Creates a condition where the attribute contains the value. + + :param value: The value the attribute contains. + """ + return Contains(self, value) + + def size(self): + """Creates a condition for the attribute size. + + Note another AttributeBase method must be called on the returned + size condition to be a valid DynamoDB condition. + """ + return Size(self) + + def attribute_type(self, value): + """Creates a condition for the attribute type. + + :param value: The type of the attribute. + """ + return AttributeType(self, value) + + +BuiltConditionExpression = namedtuple( + 'BuiltConditionExpression', + ['condition_expression', 'attribute_name_placeholders', + 'attribute_value_placeholders'] +) + + +class ConditionExpressionBuilder(object): + """This class is used to build condition expressions with placeholders""" + def __init__(self): + self._name_count = 0 + self._value_count = 0 + self._name_placeholder = 'n' + self._value_placeholder = 'v' + + def _get_name_placeholder(self): + return '#' + self._name_placeholder + str(self._name_count) + + def _get_value_placeholder(self): + return ':' + self._value_placeholder + str(self._value_count) + + def reset(self): + """Resets the placeholder name and values""" + self._name_count = 0 + self._value_count = 0 + + def build_expression(self, condition, is_key_condition=False): + """Builds the condition expression and the dictionary of placeholders. + + :type condition: ConditionBase + :param condition: A condition to be built into a condition expression + string with any necessary placeholders. + + :type is_key_condition: Boolean + :param is_key_condition: True if the expression is for a + KeyConditionExpression. False otherwise. + + :rtype: (string, dict, dict) + :returns: Will return a string representing the condition with + placeholders inserted where necessary, a dictionary of + placeholders for attribute names, and a dictionary of + placeholders for attribute values. Here is a sample return value: + + ('#n0 = :v0', {'#n0': 'myattribute'}, {':v1': 'myvalue'}) + """ + if not isinstance(condition, ConditionBase): + raise DynamoDBNeedsConditionError(condition) + attribute_name_placeholders = {} + attribute_value_placeholders = {} + condition_expression = self._build_expression( + condition, attribute_name_placeholders, + attribute_value_placeholders, is_key_condition=is_key_condition) + return BuiltConditionExpression( + condition_expression=condition_expression, + attribute_name_placeholders=attribute_name_placeholders, + attribute_value_placeholders=attribute_value_placeholders + ) + + def _build_expression(self, condition, attribute_name_placeholders, + attribute_value_placeholders, is_key_condition): + expression_dict = condition.get_expression() + replaced_values = [] + for value in expression_dict['values']: + # Build the necessary placeholders for that value. + # Placeholders are built for both attribute names and values. + replaced_value = self._build_expression_component( + value, attribute_name_placeholders, + attribute_value_placeholders, condition.has_grouped_values, + is_key_condition) + replaced_values.append(replaced_value) + # Fill out the expression using the operator and the + # values that have been replaced with placeholders. + return expression_dict['format'].format( + *replaced_values, operator=expression_dict['operator']) + + def _build_expression_component(self, value, attribute_name_placeholders, + attribute_value_placeholders, + has_grouped_values, is_key_condition): + # Continue to recurse if the value is a ConditionBase in order + # to extract out all parts of the expression. + if isinstance(value, ConditionBase): + return self._build_expression( + value, attribute_name_placeholders, + attribute_value_placeholders, is_key_condition) + # If it is not a ConditionBase, we can recurse no further. + # So we check if it is an attribute and add placeholders for + # its name + elif isinstance(value, AttributeBase): + if is_key_condition and not isinstance(value, Key): + raise DynamoDBNeedsKeyConditionError( + 'Attribute object %s is of type %s. ' + 'KeyConditionExpression only supports Attribute objects ' + 'of type Key' % (value.name, type(value))) + return self._build_name_placeholder( + value, attribute_name_placeholders) + # If it is anything else, we treat it as a value and thus placeholders + # are needed for the value. + else: + return self._build_value_placeholder( + value, attribute_value_placeholders, has_grouped_values) + + def _build_name_placeholder(self, value, attribute_name_placeholders): + attribute_name = value.name + # Figure out which parts of the attribute name that needs replacement. + attribute_name_parts = ATTR_NAME_REGEX.findall(attribute_name) + + # Add a temporary placeholder for each of these parts. + placeholder_format = ATTR_NAME_REGEX.sub('%s', attribute_name) + str_format_args = [] + for part in attribute_name_parts: + name_placeholder = self._get_name_placeholder() + self._name_count += 1 + str_format_args.append(name_placeholder) + # Add the placeholder and value to dictionary of name placeholders. + attribute_name_placeholders[name_placeholder] = part + # Replace the temporary placeholders with the designated placeholders. + return placeholder_format % tuple(str_format_args) + + def _build_value_placeholder(self, value, attribute_value_placeholders, + has_grouped_values=False): + # If the values are grouped, we need to add a placeholder for + # each element inside of the actual value. + if has_grouped_values: + placeholder_list = [] + for v in value: + value_placeholder = self._get_value_placeholder() + self._value_count += 1 + placeholder_list.append(value_placeholder) + attribute_value_placeholders[value_placeholder] = v + # Assuming the values are grouped by parenthesis. + # IN is the currently the only one that uses this so it maybe + # needed to be changed in future. + return '(' + ', '.join(placeholder_list) + ')' + # Otherwise, treat the value as a single value that needs only + # one placeholder. + else: + value_placeholder = self._get_value_placeholder() + self._value_count += 1 + attribute_value_placeholders[value_placeholder] = value + return value_placeholder diff --git a/boto3/dynamodb/table.py b/boto3/dynamodb/table.py new file mode 100644 index 0000000..519e2fa --- /dev/null +++ b/boto3/dynamodb/table.py @@ -0,0 +1,121 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +import logging + + +logger = logging.getLogger(__name__) + + +def register_table_methods(base_classes, **kwargs): + base_classes.insert(0, TableResource) + + +# This class can be used to add any additional methods we want +# onto a table resource. Ideally to avoid creating a new +# base class for every method we can just update this +# class instead. Just be sure to move the bulk of the +# actual method implementation to another class. +class TableResource(object): + def __init__(self, *args, **kwargs): + super(TableResource, self).__init__(*args, **kwargs) + + def batch_writer(self): + """Create a batch writer object. + + This method creates a context manager for writing + objects to Amazon DynamoDB in batch. + + The batch writer will automatically handle buffering and sending items + in batches. In addition, the batch writer will also automatically + handle any unprocessed items and resend them as needed. All you need + to do is call ``put_item`` for any items you want to add, and + ``delete_item`` for any items you want to delete. + + Example usage:: + + with table.batch_writer() as batch: + for _ in xrange(1000000): + batch.put_item(Item={'HashKey': '...', + 'Otherstuff': '...'}) + # You can also delete_items in a batch. + batch.delete_item(Key={'HashKey': 'SomeHashKey'}) + + """ + return BatchWriter(self.name, self.meta.client) + + +class BatchWriter(object): + """Automatically handle batch writes to DynamoDB for a single table.""" + def __init__(self, table_name, client, flush_amount=25): + """ + + :type table_name: str + :param table_name: The name of the table. The class handles + batch writes to a single table. + + :type client: ``botocore.client.Client`` + :param client: A botocore client. Note this client + **must** have the dynamodb customizations applied + to it for transforming AttributeValues into the + wire protocol. What this means in practice is that + you need to use a client that comes from a DynamoDB + resource if you're going to instantiate this class + directly, i.e + ``boto3.resource('dynamodb').Table('foo').meta.client``. + + :type flush_amount: int + :param flush_amount: The number of items to keep in + a local buffer before sending a batch_write_item + request to DynamoDB. + + + """ + self._table_name = table_name + self._client = client + self._items_buffer = [] + self._flush_amount = flush_amount + + def put_item(self, Item): + self._items_buffer.append({'PutRequest': {'Item': Item}}) + self._flush_if_needed() + + def delete_item(self, Key): + self._items_buffer.append({'DeleteRequest': {'Key': Key}}) + self._flush_if_needed() + + def _flush_if_needed(self): + if len(self._items_buffer) >= self._flush_amount: + self._flush() + + def _flush(self): + response = self._client.batch_write_item( + RequestItems={self._table_name: self._items_buffer}) + unprocessed_items = response['UnprocessedItems'] + + if unprocessed_items and unprocessed_items[self._table_name]: + # Any unprocessed_items are immediately added to the + # next batch we send. + self._items_buffer = unprocessed_items[self._table_name] + else: + self._items_buffer = [] + logger.debug("Batch write sent %s, unprocessed: %s", + self._flush_amount, len(self._items_buffer)) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, tb): + # When we exit, we need to keep flushing whatever's left + # until there's nothing left in our items buffer. + while self._items_buffer: + self._flush() diff --git a/boto3/dynamodb/transform.py b/boto3/dynamodb/transform.py new file mode 100644 index 0000000..f60bfec --- /dev/null +++ b/boto3/dynamodb/transform.py @@ -0,0 +1,298 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +import copy +from collections import Mapping, MutableSequence + +from boto3.dynamodb.types import TypeSerializer, TypeDeserializer +from boto3.dynamodb.conditions import ConditionBase +from boto3.dynamodb.conditions import ConditionExpressionBuilder +from boto3.docs.utils import DocumentModifiedShape + + +def register_high_level_interface(base_classes, **kwargs): + base_classes.insert(0, DynamoDBHighLevelResource) + + +def copy_dynamodb_params(params, **kwargs): + return copy.deepcopy(params) + + +class DynamoDBHighLevelResource(object): + def __init__(self, *args, **kwargs): + super(DynamoDBHighLevelResource, self).__init__(*args, **kwargs) + + # Apply handler that creates a copy of the user provided dynamodb + # item such that it can be modified. + self.meta.client.meta.events.register( + 'provide-client-params.dynamodb', + copy_dynamodb_params, + unique_id='dynamodb-create-params-copy' + ) + + self._injector = TransformationInjector() + # Apply the handler that generates condition expressions including + # placeholders. + self.meta.client.meta.events.register( + 'before-parameter-build.dynamodb', + self._injector.inject_condition_expressions, + unique_id='dynamodb-condition-expression') + + # Apply the handler that serializes the request from python + # types to dynamodb types. + self.meta.client.meta.events.register( + 'before-parameter-build.dynamodb', + self._injector.inject_attribute_value_input, + unique_id='dynamodb-attr-value-input') + + # Apply the handler that deserializes the response from dynamodb + # types to python types. + self.meta.client.meta.events.register( + 'after-call.dynamodb', + self._injector.inject_attribute_value_output, + unique_id='dynamodb-attr-value-output') + + # Apply the documentation customizations to account for + # the transformations. + attr_value_shape_docs = DocumentModifiedShape( + 'AttributeValue', + new_type='valid DynamoDB type', + new_description=( + '- The value of the attribute. The valid value types are ' + 'listed in the ' + ':ref:`DynamoDB Reference Guide`.' + ), + new_example_value = ( + '\'string\'|123|Binary(b\'bytes\')|True|None|set([\'string\'])|' + 'set([123])|set([Binary(b\'bytes\')])|[]|{}') + ) + + key_expression_shape_docs = DocumentModifiedShape( + 'KeyExpression', + new_type=( + 'condition from :py:class:`boto3.dynamodb.conditions.Key` ' + 'method' + ), + new_description=( + 'The condition(s) a key(s) must meet. Valid conditions are ' + 'listed in the ' + ':ref:`DynamoDB Reference Guide`.' + ), + new_example_value='Key(\'mykey\').eq(\'myvalue\')' + ) + + cond_expression_shape_docs = DocumentModifiedShape( + 'ConditionExpression', + new_type=( + 'condition from :py:class:`boto3.dynamodb.conditions.Attr` ' + 'method' + ), + new_description=( + 'The condition(s) an attribute(s) must meet. Valid conditions ' + 'are listed in the ' + ':ref:`DynamoDB Reference Guide`.' + ), + new_example_value='Attr(\'myattribute\').eq(\'myvalue\')' + ) + + self.meta.client.meta.events.register( + 'docs.*.dynamodb.*.complete-section', + attr_value_shape_docs.replace_documentation_for_matching_shape, + unique_id='dynamodb-attr-value-docs') + + self.meta.client.meta.events.register( + 'docs.*.dynamodb.*.complete-section', + key_expression_shape_docs.replace_documentation_for_matching_shape, + unique_id='dynamodb-key-expression-docs') + + self.meta.client.meta.events.register( + 'docs.*.dynamodb.*.complete-section', + cond_expression_shape_docs.replace_documentation_for_matching_shape, + unique_id='dynamodb-cond-expression-docs') + + +class TransformationInjector(object): + """Injects the transformations into the user provided parameters.""" + def __init__(self, transformer=None, condition_builder=None, + serializer=None, deserializer=None): + self._transformer = transformer + if transformer is None: + self._transformer = ParameterTransformer() + + self._condition_builder = condition_builder + if condition_builder is None: + self._condition_builder = ConditionExpressionBuilder() + + self._serializer = serializer + if serializer is None: + self._serializer = TypeSerializer() + + self._deserializer = deserializer + if deserializer is None: + self._deserializer = TypeDeserializer() + + def inject_condition_expressions(self, params, model, **kwargs): + """Injects the condition expression transformation into the parameters + + This injection includes transformations for ConditionExpression shapes + and KeyExpression shapes. It also handles any placeholder names and + values that are generated when transforming the condition expressions. + """ + self._condition_builder.reset() + generated_names = {} + generated_values = {} + + # Create and apply the Condition Expression transformation. + transformation = ConditionExpressionTransformation( + self._condition_builder, + placeholder_names=generated_names, + placeholder_values=generated_values, + is_key_condition=False + ) + self._transformer.transform( + params, model.input_shape, transformation, + 'ConditionExpression') + + # Create and apply the Key Condition Expression transformation. + transformation = ConditionExpressionTransformation( + self._condition_builder, + placeholder_names=generated_names, + placeholder_values=generated_values, + is_key_condition=True + ) + self._transformer.transform( + params, model.input_shape, transformation, + 'KeyExpression') + + expr_attr_names_input = 'ExpressionAttributeNames' + expr_attr_values_input = 'ExpressionAttributeValues' + + # Now that all of the condition expression transformation are done, + # update the placeholder dictionaries in the request. + if expr_attr_names_input in params: + params[expr_attr_names_input].update(generated_names) + else: + if generated_names: + params[expr_attr_names_input] = generated_names + + if expr_attr_values_input in params: + params[expr_attr_values_input].update(generated_values) + else: + if generated_values: + params[expr_attr_values_input] = generated_values + + def inject_attribute_value_input(self, params, model, **kwargs): + """Injects DynamoDB serialization into parameter input""" + self._transformer.transform( + params, model.input_shape, self._serializer.serialize, + 'AttributeValue') + + def inject_attribute_value_output(self, parsed, model, **kwargs): + """Injects DynamoDB deserialization into responses""" + self._transformer.transform( + parsed, model.output_shape, self._deserializer.deserialize, + 'AttributeValue') + + +class ConditionExpressionTransformation(object): + """Provides a transformation for condition expressions + + The ``ParameterTransformer`` class can call this class directly + to transform the condition expressions in the parameters provided. + """ + def __init__(self, condition_builder, placeholder_names, + placeholder_values, is_key_condition=False): + self._condition_builder = condition_builder + self._placeholder_names = placeholder_names + self._placeholder_values = placeholder_values + self._is_key_condition = is_key_condition + + def __call__(self, value): + if isinstance(value, ConditionBase): + # Create a conditional expression string with placeholders + # for the provided condition. + built_expression = self._condition_builder.build_expression( + value, is_key_condition=self._is_key_condition) + + self._placeholder_names.update( + built_expression.attribute_name_placeholders) + self._placeholder_values.update( + built_expression.attribute_value_placeholders) + + return built_expression.condition_expression + # Use the user provided value if it is not a ConditonBase object. + return value + + +class ParameterTransformer(object): + """Transforms the input to and output from botocore based on shape""" + + def transform(self, params, model, transformation, target_shape): + """Transforms the dynamodb input to or output from botocore + + It applies a specified transformation whenever a specific shape name + is encountered while traversing the parameters in the dictionary. + + :param params: The parameters structure to transform. + :param model: The operation model. + :param transformation: The function to apply the parameter + :param target_shape: The name of the shape to apply the + transformation to + """ + self._transform_parameters( + model, params, transformation, target_shape) + + def _transform_parameters(self, model, params, transformation, + target_shape): + type_name = model.type_name + if type_name in ['structure', 'map', 'list']: + getattr(self, '_transform_%s' % type_name)( + model, params, transformation, target_shape) + + def _transform_structure(self, model, params, transformation, + target_shape): + if not isinstance(params, Mapping): + return + for param in params: + if param in model.members: + member_model = model.members[param] + member_shape = member_model.name + if member_shape == target_shape: + params[param] = transformation(params[param]) + else: + self._transform_parameters( + member_model, params[param], transformation, + target_shape) + + def _transform_map(self, model, params, transformation, target_shape): + if not isinstance(params, Mapping): + return + value_model = model.value + value_shape = value_model.name + for key, value in params.items(): + if value_shape == target_shape: + params[key] = transformation(value) + else: + self._transform_parameters( + value_model, params[key], transformation, target_shape) + + def _transform_list(self, model, params, transformation, target_shape): + if not isinstance(params, MutableSequence): + return + member_model = model.member + member_shape = member_model.name + for i, item in enumerate(params): + if member_shape == target_shape: + params[i] = transformation(item) + else: + self._transform_parameters( + member_model, params[i], transformation, target_shape) diff --git a/boto3/dynamodb/types.py b/boto3/dynamodb/types.py new file mode 100644 index 0000000..56ee1f8 --- /dev/null +++ b/boto3/dynamodb/types.py @@ -0,0 +1,297 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from collections import Mapping, Set +from decimal import Decimal, Context, Clamped +from decimal import Overflow, Inexact, Underflow, Rounded + +from botocore.compat import six + + +STRING = 'S' +NUMBER = 'N' +BINARY = 'B' +STRING_SET = 'SS' +NUMBER_SET = 'NS' +BINARY_SET = 'BS' +NULL = 'NULL' +BOOLEAN = 'BOOL' +MAP = 'M' +LIST = 'L' + + +DYNAMODB_CONTEXT = Context( + Emin=-128, Emax=126, prec=38, + traps=[Clamped, Overflow, Inexact, Rounded, Underflow]) + + +BINARY_TYPES = (bytearray, six.binary_type) + + +class Binary(object): + """A class for representing Binary in dynamodb + + Especially for Python 2, use this class to explicitly specify + binary data for item in DynamoDB. It is essentially a wrapper around + binary. Unicode and Python 3 string types are not allowed. + """ + def __init__(self, value): + if not isinstance(value, BINARY_TYPES): + raise TypeError('Value must be of the following types: %s.' % + ', '.join([str(t) for t in BINARY_TYPES])) + self.value = value + + def __eq__(self, other): + if isinstance(other, Binary): + return self.value == other.value + return self.value == other + + def __ne__(self, other): + return not self.__eq__(other) + + def __repr__(self): + return 'Binary(%r)' % self.value + + def __str__(self): + return self.value + + def __hash__(self): + return hash(self.value) + + +class TypeSerializer(object): + """This class serializes Python data types to DynamoDB types.""" + def serialize(self, value): + """The method to serialize the Python data types. + + :param value: A python value to be serialized to DynamoDB. Here are + the various conversions: + + Python DynamoDB + ------ -------- + None {'NULL': True} + True/False {'BOOL': True/False} + int/Decimal {'N': str(value)} + string {'S': string} + Binary/bytearray/bytes (py3 only) {'B': bytes} + set([int/Decimal]) {'NS': [str(value)]} + set([string]) {'SS': [string]) + set([Binary/bytearray/bytes]) {'BS': [bytes]} + list {'L': list} + dict {'M': dict} + + For types that involve numbers, it is recommended that ``Decimal`` + objects are used to be able to round-trip the Python type. + For types that involve binary, it is recommended that ``Binary`` + objects are used to be able to round-trip the Python type. + + :rtype: dict + :returns: A dictionary that represents a dynamoDB data type. These + dictionaries can be directly passed to botocore methods. + """ + dynamodb_type = self._get_dynamodb_type(value) + serializer = getattr(self, '_serialize_%s' % dynamodb_type.lower()) + return {dynamodb_type: serializer(value)} + + def _get_dynamodb_type(self, value): + dynamodb_type = None + + if self._is_null(value): + dynamodb_type = NULL + + elif self._is_boolean(value): + dynamodb_type = BOOLEAN + + elif self._is_number(value): + dynamodb_type = NUMBER + + elif self._is_string(value): + dynamodb_type = STRING + + elif self._is_binary(value): + dynamodb_type = BINARY + + elif self._is_type_set(value, self._is_number): + dynamodb_type = NUMBER_SET + + elif self._is_type_set(value, self._is_string): + dynamodb_type = STRING_SET + + elif self._is_type_set(value, self._is_binary): + dynamodb_type = BINARY_SET + + elif self._is_map(value): + dynamodb_type = MAP + + elif self._is_list(value): + dynamodb_type = LIST + + else: + msg = 'Unsupported type "%s" for value "%s"' % (type(value), value) + raise TypeError(msg) + + return dynamodb_type + + def _is_null(self, value): + if value is None: + return True + return False + + def _is_boolean(self, value): + if isinstance(value, bool): + return True + return False + + def _is_number(self, value): + if isinstance(value, (six.integer_types, Decimal)): + return True + elif isinstance(value, float): + raise TypeError( + 'Float types are not supported. Use Decimal types instead.') + return False + + def _is_string(self, value): + if isinstance(value, six.string_types): + return True + return False + + def _is_binary(self, value): + if isinstance(value, Binary): + return True + elif isinstance(value, bytearray): + return True + elif six.PY3 and isinstance(value, six.binary_type): + return True + return False + + def _is_set(self, value): + if isinstance(value, Set): + return True + return False + + def _is_type_set(self, value, type_validator): + if self._is_set(value): + if False not in map(type_validator, value): + return True + return False + + def _is_map(self, value): + if isinstance(value, Mapping): + return True + return False + + def _is_list(self, value): + if isinstance(value, list): + return True + return False + + def _serialize_null(self, value): + return True + + def _serialize_bool(self, value): + return value + + def _serialize_n(self, value): + number = str(DYNAMODB_CONTEXT.create_decimal(value)) + if number in ['Infinity', 'NaN']: + raise TypeError('Infinity and NaN not supported') + return number + + def _serialize_s(self, value): + return value + + def _serialize_b(self, value): + if isinstance(value, Binary): + value = value.value + return value + + def _serialize_ss(self, value): + return [self._serialize_s(s) for s in value] + + def _serialize_ns(self, value): + return [self._serialize_n(n) for n in value] + + def _serialize_bs(self, value): + return [self._serialize_b(b) for b in value] + + def _serialize_l(self, value): + return [self.serialize(v) for v in value] + + def _serialize_m(self, value): + return dict([(k, self.serialize(v)) for k, v in value.items()]) + + +class TypeDeserializer(object): + """This class deserializes DynamoDB types to Python types.""" + def deserialize(self, value): + """The method to deserialize the DynamoDB data types. + + :param value: A DynamoDB value to be deserialized to a pythonic value. + Here are the various conversions: + + DynamoDB Python + -------- ------ + {'NULL': True} None + {'BOOL': True/False} True/False + {'N': str(value)} Decimal(str(value)) + {'S': string} string + {'B': bytes} Binary(bytes) + {'NS': [str(value)]} set([Decimal(str(value))]) + {'SS': [string]} set([string]) + {'BS': [bytes]} set([bytes]) + {'L': list} list + {'M': dict} dict + + :returns: The pythonic value of the DynamoDB type. + """ + + if not value: + raise TypeError('Value must be a nonempty dictionary whose key ' + 'is a valid dynamodb type.') + dynamodb_type = list(value.keys())[0] + try: + deserializer = getattr( + self, '_deserialize_%s' % dynamodb_type.lower()) + except AttributeError: + raise TypeError( + 'Dynamodb type %s is not supported' % dynamodb_type) + return deserializer(value[dynamodb_type]) + + def _deserialize_null(self, value): + return None + + def _deserialize_bool(self, value): + return value + + def _deserialize_n(self, value): + return DYNAMODB_CONTEXT.create_decimal(value) + + def _deserialize_s(self, value): + return value + + def _deserialize_b(self, value): + return Binary(value) + + def _deserialize_ns(self, value): + return set(map(self._deserialize_n, value)) + + def _deserialize_ss(self, value): + return set(map(self._deserialize_s, value)) + + def _deserialize_bs(self, value): + return set(map(self._deserialize_b, value)) + + def _deserialize_l(self, value): + return [self.deserialize(v) for v in value] + + def _deserialize_m(self, value): + return dict([(k, self.deserialize(v)) for k, v in value.items()]) diff --git a/boto3/ec2/__init__.py b/boto3/ec2/__init__.py new file mode 100644 index 0000000..c89416d --- /dev/null +++ b/boto3/ec2/__init__.py @@ -0,0 +1,12 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. diff --git a/boto3/ec2/createtags.py b/boto3/ec2/createtags.py new file mode 100644 index 0000000..14e0971 --- /dev/null +++ b/boto3/ec2/createtags.py @@ -0,0 +1,40 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + + +def inject_create_tags(event_name, class_attributes, **kwargs): + """This injects a custom create_tags method onto the ec2 service resource + + This is needed because the resource model is not able to express + creating multiple tag resources based on the fact you can apply a set + of tags to multiple ec2 resources. + """ + class_attributes['create_tags'] = create_tags + + +def create_tags(self, **kwargs): + # Call the client method + self.meta.client.create_tags(**kwargs) + resources = kwargs.get('Resources', []) + tags = kwargs.get('Tags', []) + tag_resources = [] + + # Generate all of the tag resources that just were created with the + # preceding client call. + for resource in resources: + for tag in tags: + # Add each tag from the tag set for each resource to the list + # that is returned by the method. + tag_resource = self.Tag(resource, tag['Key'], tag['Value']) + tag_resources.append(tag_resource) + return tag_resources diff --git a/boto3/exceptions.py b/boto3/exceptions.py new file mode 100644 index 0000000..b9825b7 --- /dev/null +++ b/boto3/exceptions.py @@ -0,0 +1,60 @@ +# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + +class ResourceLoadException(Exception): + pass + + +class NoVersionFound(Exception): + pass + + +class RetriesExceededError(Exception): + def __init__(self, last_exception, msg='Max Retries Exceeded'): + super(RetriesExceededError, self).__init__(msg) + self.last_exception = last_exception + + +class S3TransferFailedError(Exception): + pass + + +class S3UploadFailedError(Exception): + pass + + +class DynamoDBOperationNotSupportedError(Exception): + """Raised for operantions that are not supported for an operand""" + def __init__(self, operation, value): + msg = ( + '%s operation cannot be applied to value %s of type %s directly. ' + 'Must use AttributeBase object methods (i.e. Attr().eq()). to ' + 'generate ConditionBase instances first.' % + (operation, value, type(value))) + Exception.__init__(self, msg) + +# FIXME: Backward compatibility +DynanmoDBOperationNotSupportedError = DynamoDBOperationNotSupportedError + +class DynamoDBNeedsConditionError(Exception): + """Raised when input is not a condition""" + def __init__(self, value): + msg = ( + 'Expecting a ConditionBase object. Got %s of type %s. ' + 'Use AttributeBase object methods (i.e. Attr().eq()). to ' + 'generate ConditionBase instances.' % (value, type(value))) + Exception.__init__(self, msg) + + +class DynamoDBNeedsKeyConditionError(Exception): + pass diff --git a/boto3/resources/__init__.py b/boto3/resources/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/boto3/resources/action.py b/boto3/resources/action.py new file mode 100644 index 0000000..f65dd0e --- /dev/null +++ b/boto3/resources/action.py @@ -0,0 +1,198 @@ +# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + +import logging + +from botocore import xform_name + +from .params import create_request_parameters +from .response import RawHandler, ResourceHandler + + +logger = logging.getLogger(__name__) + + +class ServiceAction(object): + """ + A class representing a callable action on a resource, for example + ``sqs.get_queue_by_name(...)`` or ``s3.Bucket('foo').delete()``. + The action may construct parameters from existing resource identifiers + and may return either a raw response or a new resource instance. + + :type action_model: :py:class`~boto3.resources.model.Action` + :param action_model: The action model. + + :type factory: ResourceFactory + :param factory: The factory that created the resource class to which + this action is attached. + + :type service_context: :py:class:`~boto3.utils.ServiceContext` + :param service_context: Context about the AWS service + """ + def __init__(self, action_model, factory=None, service_context=None): + self._action_model = action_model + + # In the simplest case we just return the response, but if a + # resource is defined, then we must create these before returning. + resource_response_model = action_model.resource + if resource_response_model: + self._response_handler = ResourceHandler( + search_path=resource_response_model.path, + factory=factory, resource_model=resource_response_model, + service_context=service_context, + operation_name=action_model.request.operation + ) + else: + self._response_handler = RawHandler(action_model.path) + + def __call__(self, parent, *args, **kwargs): + """ + Perform the action's request operation after building operation + parameters and build any defined resources from the response. + + :type parent: :py:class:`~boto3.resources.base.ServiceResource` + :param parent: The resource instance to which this action is attached. + :rtype: dict or ServiceResource or list(ServiceResource) + :return: The response, either as a raw dict or resource instance(s). + """ + operation_name = xform_name(self._action_model.request.operation) + + # First, build predefined params and then update with the + # user-supplied kwargs, which allows overriding the pre-built + # params if needed. + params = create_request_parameters(parent, self._action_model.request) + params.update(kwargs) + + logger.info('Calling %s:%s with %r', parent.meta.service_name, + operation_name, params) + + response = getattr(parent.meta.client, operation_name)(**params) + + logger.debug('Response: %r', response) + + return self._response_handler(parent, params, response) + + +class BatchAction(ServiceAction): + """ + An action which operates on a batch of items in a collection, typically + a single page of results from the collection's underlying service + operation call. For example, this allows you to delete up to 999 + S3 objects in a single operation rather than calling ``.delete()`` on + each one individually. + + :type action_model: :py:class`~boto3.resources.model.Action` + :param action_model: The action model. + + :type factory: ResourceFactory + :param factory: The factory that created the resource class to which + this action is attached. + + :type service_context: :py:class:`~boto3.utils.ServiceContext` + :param service_context: Context about the AWS service + """ + def __call__(self, parent, *args, **kwargs): + """ + Perform the batch action's operation on every page of results + from the collection. + + :type parent: :py:class:`~boto3.resources.collection.ResourceCollection` + :param parent: The collection iterator to which this action + is attached. + :rtype: list(dict) + :return: A list of low-level response dicts from each call. + """ + service_name = None + client = None + responses = [] + operation_name = xform_name(self._action_model.request.operation) + + # Unlike the simple action above, a batch action must operate + # on batches (or pages) of items. So we get each page, construct + # the necessary parameters and call the batch operation. + for page in parent.pages(): + params = {} + for resource in page: + # There is no public interface to get a service name + # or low-level client from a collection, so we get + # these from the first resource in the collection. + if service_name is None: + service_name = resource.meta.service_name + if client is None: + client = resource.meta.client + + create_request_parameters( + resource, self._action_model.request, params=params) + + if not params: + # There are no items, no need to make a call. + break + + params.update(kwargs) + + logger.info('Calling %s:%s with %r', + service_name, operation_name, params) + + response = getattr(client, operation_name)(**params) + + logger.debug('Response: %r', response) + + responses.append( + self._response_handler(parent, params, response)) + + return responses + + +class WaiterAction(object): + """ + A class representing a callable waiter action on a resource, for example + ``s3.Bucket('foo').wait_until_bucket_exists()``. + The waiter action may construct parameters from existing resource + identifiers. + + :type waiter_model: :py:class`~boto3.resources.model.Waiter` + :param waiter_model: The action waiter. + :type waiter_resource_name: string + :param waiter_resource_name: The name of the waiter action for the + resource. It usually begins with a + ``wait_until_`` + """ + def __init__(self, waiter_model, waiter_resource_name): + self._waiter_model = waiter_model + self._waiter_resource_name = waiter_resource_name + + def __call__(self, parent, *args, **kwargs): + """ + Perform the wait operation after building operation + parameters. + + :type parent: :py:class:`~boto3.resources.base.ServiceResource` + :param parent: The resource instance to which this action is attached. + """ + client_waiter_name = xform_name(self._waiter_model.waiter_name) + + # First, build predefined params and then update with the + # user-supplied kwargs, which allows overriding the pre-built + # params if needed. + params = create_request_parameters(parent, self._waiter_model) + params.update(kwargs) + + logger.info('Calling %s:%s with %r', + parent.meta.service_name, + self._waiter_resource_name, params) + + client = parent.meta.client + waiter = client.get_waiter(client_waiter_name) + response = waiter.wait(**params) + + logger.debug('Response: %r', response) diff --git a/boto3/resources/base.py b/boto3/resources/base.py new file mode 100644 index 0000000..b3b9025 --- /dev/null +++ b/boto3/resources/base.py @@ -0,0 +1,142 @@ +# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + +import logging + +import boto3 + + +logger = logging.getLogger(__name__) + + +class ResourceMeta(object): + """ + An object containing metadata about a resource. + """ + def __init__(self, service_name, identifiers=None, client=None, + data=None, resource_model=None): + #: (``string``) The service name, e.g. 's3' + self.service_name = service_name + + if identifiers is None: + identifiers = [] + #: (``list``) List of identifier names + self.identifiers = identifiers + + #: (:py:class:`~botocore.client.BaseClient`) Low-level Botocore client + self.client = client + #: (``dict``) Loaded resource data attributes + self.data = data + + # The resource model for that resource + self.resource_model = resource_model + + def __repr__(self): + return 'ResourceMeta(\'{0}\', identifiers={1})'.format( + self.service_name, self.identifiers) + + def __eq__(self, other): + # Two metas are equal if their components are all equal + if other.__class__.__name__ != self.__class__.__name__: + return False + + return self.__dict__ == other.__dict__ + + def copy(self): + """ + Create a copy of this metadata object. + """ + params = self.__dict__.copy() + service_name = params.pop('service_name') + return ResourceMeta(service_name, **params) + + +class ServiceResource(object): + """ + A base class for resources. + + :type client: botocore.client + :param client: A low-level Botocore client instance + """ + + meta = None + """ + Stores metadata about this resource instance, such as the + ``service_name``, the low-level ``client`` and any cached ``data`` + from when the instance was hydrated. For example:: + + # Get a low-level client from a resource instance + client = resource.meta.client + response = client.operation(Param='foo') + + # Print the resource instance's service short name + print(resource.meta.service_name) + + See :py:class:`ResourceMeta` for more information. + """ + + def __init__(self, *args, **kwargs): + # Always work on a copy of meta, otherwise we would affect other + # instances of the same subclass. + self.meta = self.meta.copy() + + # Create a default client if none was passed + if kwargs.get('client') is not None: + self.meta.client = kwargs.get('client') + else: + self.meta.client = boto3.client(self.meta.service_name) + + # Allow setting identifiers as positional arguments in the order + # in which they were defined in the ResourceJSON. + for i, value in enumerate(args): + setattr(self, '_' + self.meta.identifiers[i], value) + + # Allow setting identifiers via keyword arguments. Here we need + # extra logic to ignore other keyword arguments like ``client``. + for name, value in kwargs.items(): + if name == 'client': + continue + + if name not in self.meta.identifiers: + raise ValueError('Unknown keyword argument: {0}'.format(name)) + + setattr(self, '_' + name, value) + + # Validate that all identifiers have been set. + for identifier in self.meta.identifiers: + if getattr(self, identifier) is None: + raise ValueError( + 'Required parameter {0} not set'.format(identifier)) + + def __repr__(self): + identifiers = [] + for identifier in self.meta.identifiers: + identifiers.append('{0}={1}'.format( + identifier, repr(getattr(self, identifier)))) + return "{0}({1})".format( + self.__class__.__name__, + ', '.join(identifiers), + ) + + def __eq__(self, other): + # Should be instances of the same resource class + if other.__class__.__name__ != self.__class__.__name__: + return False + + # Each of the identifiers should have the same value in both + # instances, e.g. two buckets need the same name to be equal. + for identifier in self.meta.identifiers: + if getattr(self, identifier) != getattr(other, identifier): + return False + + return True diff --git a/boto3/resources/collection.py b/boto3/resources/collection.py new file mode 100644 index 0000000..df7df95 --- /dev/null +++ b/boto3/resources/collection.py @@ -0,0 +1,523 @@ +# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + +import copy +import logging + +from botocore import xform_name +from botocore.utils import merge_dicts + +from .action import BatchAction +from .params import create_request_parameters +from .response import ResourceHandler +from ..docs import docstring + + +logger = logging.getLogger(__name__) + + +class ResourceCollection(object): + """ + Represents a collection of resources, which can be iterated through, + optionally with filtering. Collections automatically handle pagination + for you. + + See :ref:`guide_collections` for a high-level overview of collections, + including when remote service requests are performed. + + :type model: :py:class:`~boto3.resources.model.Collection` + :param model: Collection model + :type parent: :py:class:`~boto3.resources.base.ServiceResource` + :param parent: The collection's parent resource + :type handler: :py:class:`~boto3.resources.response.ResourceHandler` + :param handler: The resource response handler used to create resource + instances + """ + def __init__(self, model, parent, handler, **kwargs): + self._model = model + self._parent = parent + self._py_operation_name = xform_name( + model.request.operation) + self._handler = handler + self._params = kwargs + + def __repr__(self): + return '{0}({1}, {2})'.format( + self.__class__.__name__, + self._parent, + '{0}.{1}'.format( + self._parent.meta.service_name, + self._model.resource.type + ) + ) + + def __iter__(self): + """ + A generator which yields resource instances after doing the + appropriate service operation calls and handling any pagination + on your behalf. + + Page size, item limit, and filter parameters are applied + if they have previously been set. + + >>> bucket = s3.Bucket('boto3') + >>> for obj in bucket.objects.all(): + ... print(obj.key) + 'key1' + 'key2' + + """ + limit = self._params.get('limit', None) + + count = 0 + for page in self.pages(): + for item in page: + yield item + + # If the limit is set and has been reached, then + # we stop processing items here. + count += 1 + if limit is not None and count >= limit: + return + + def _clone(self, **kwargs): + """ + Create a clone of this collection. This is used by the methods + below to provide a chainable interface that returns copies + rather than the original. This allows things like: + + >>> base = collection.filter(Param1=1) + >>> query1 = base.filter(Param2=2) + >>> query2 = base.filter(Param3=3) + >>> query1.params + {'Param1': 1, 'Param2': 2} + >>> query2.params + {'Param1': 1, 'Param3': 3} + + :rtype: :py:class:`ResourceCollection` + :return: A clone of this resource collection + """ + params = copy.deepcopy(self._params) + merge_dicts(params, kwargs, append_lists=True) + clone = self.__class__(self._model, self._parent, + self._handler, **params) + return clone + + def pages(self): + """ + A generator which yields pages of resource instances after + doing the appropriate service operation calls and handling + any pagination on your behalf. Non-paginated calls will + return a single page of items. + + Page size, item limit, and filter parameters are applied + if they have previously been set. + + >>> bucket = s3.Bucket('boto3') + >>> for page in bucket.objects.pages(): + ... for obj in page: + ... print(obj.key) + 'key1' + 'key2' + + :rtype: list(:py:class:`~boto3.resources.base.ServiceResource`) + :return: List of resource instances + """ + client = self._parent.meta.client + cleaned_params = self._params.copy() + limit = cleaned_params.pop('limit', None) + page_size = cleaned_params.pop('page_size', None) + params = create_request_parameters( + self._parent, self._model.request) + merge_dicts(params, cleaned_params, append_lists=True) + + # Is this a paginated operation? If so, we need to get an + # iterator for the various pages. If not, then we simply + # call the operation and return the result as a single + # page in a list. For non-paginated results, we just ignore + # the page size parameter. + if client.can_paginate(self._py_operation_name): + logger.info('Calling paginated %s:%s with %r', + self._parent.meta.service_name, + self._py_operation_name, params) + paginator = client.get_paginator(self._py_operation_name) + pages = paginator.paginate( + PaginationConfig={ + 'MaxItems': limit, 'PageSize': page_size}, **params) + else: + logger.info('Calling %s:%s with %r', + self._parent.meta.service_name, + self._py_operation_name, params) + pages = [getattr(client, self._py_operation_name)(**params)] + + # Now that we have a page iterator or single page of results + # we start processing and yielding individual items. + count = 0 + for page in pages: + page_items = [] + for item in self._handler(self._parent, params, page): + page_items.append(item) + + # If the limit is set and has been reached, then + # we stop processing items here. + count += 1 + if limit is not None and count >= limit: + break + + yield page_items + + # Stop reading pages if we've reached out limit + if limit is not None and count >= limit: + break + + def all(self): + """ + Get all items from the collection, optionally with a custom + page size and item count limit. + + This method returns an iterable generator which yields + individual resource instances. Example use:: + + # Iterate through items + >>> for queue in sqs.queues.all(): + ... print(queue.url) + 'https://url1' + 'https://url2' + + # Convert to list + >>> queues = list(sqs.queues.all()) + >>> len(queues) + 2 + """ + return self._clone() + + def filter(self, **kwargs): + """ + Get items from the collection, passing keyword arguments along + as parameters to the underlying service operation, which are + typically used to filter the results. + + This method returns an iterable generator which yields + individual resource instances. Example use:: + + # Iterate through items + >>> for queue in sqs.queues.filter(Param='foo'): + ... print(queue.url) + 'https://url1' + 'https://url2' + + # Convert to list + >>> queues = list(sqs.queues.filter(Param='foo')) + >>> len(queues) + 2 + + :rtype: :py:class:`ResourceCollection` + """ + return self._clone(**kwargs) + + def limit(self, count): + """ + Return at most this many resources. + + >>> for bucket in s3.buckets.limit(5): + ... print(bucket.name) + 'bucket1' + 'bucket2' + 'bucket3' + 'bucket4' + 'bucket5' + + :type count: int + :param count: Return no more than this many items + :rtype: :py:class:`ResourceCollection` + """ + return self._clone(limit=count) + + def page_size(self, count): + """ + Fetch at most this many resources per service request. + + >>> for obj in s3.Bucket('boto3').objects.page_size(100): + ... print(obj.key) + + :type count: int + :param count: Fetch this many items per request + :rtype: :py:class:`ResourceCollection` + """ + return self._clone(page_size=count) + + +class CollectionManager(object): + """ + A collection manager provides access to resource collection instances, + which can be iterated and filtered. The manager exposes some + convenience functions that are also found on resource collections, + such as :py:meth:`~ResourceCollection.all` and + :py:meth:`~ResourceCollection.filter`. + + Get all items:: + + >>> for bucket in s3.buckets.all(): + ... print(bucket.name) + + Get only some items via filtering:: + + >>> for queue in sqs.queues.filter(QueueNamePrefix='AWS'): + ... print(queue.url) + + Get whole pages of items: + + >>> for page in s3.Bucket('boto3').objects.pages(): + ... for obj in page: + ... print(obj.key) + + A collection manager is not iterable. You **must** call one of the + methods that return a :py:class:`ResourceCollection` before trying + to iterate, slice, or convert to a list. + + See the :ref:`guide_collections` guide for a high-level overview + of collections, including when remote service requests are performed. + + :type collection_model: :py:class:`~boto3.resources.model.Collection` + :param model: Collection model + + :type parent: :py:class:`~boto3.resources.base.ServiceResource` + :param parent: The collection's parent resource + + :type factory: :py:class:`~boto3.resources.factory.ResourceFactory` + :param factory: The resource factory to create new resources + + :type service_context: :py:class:`~boto3.utils.ServiceContext` + :param service_context: Context about the AWS service + """ + # The class to use when creating an iterator + _collection_cls = ResourceCollection + + def __init__(self, collection_model, parent, factory, service_context): + self._model = collection_model + operation_name = self._model.request.operation + self._parent = parent + + search_path = collection_model.resource.path + self._handler = ResourceHandler( + search_path=search_path, factory=factory, + resource_model=collection_model.resource, + service_context=service_context, + operation_name=operation_name + ) + + def __repr__(self): + return '{0}({1}, {2})'.format( + self.__class__.__name__, + self._parent, + '{0}.{1}'.format( + self._parent.meta.service_name, + self._model.resource.type + ) + ) + + def iterator(self, **kwargs): + """ + Get a resource collection iterator from this manager. + + :rtype: :py:class:`ResourceCollection` + :return: An iterable representing the collection of resources + """ + return self._collection_cls(self._model, self._parent, + self._handler, **kwargs) + + # Set up some methods to proxy ResourceCollection methods + def all(self): + return self.iterator() + all.__doc__ = ResourceCollection.all.__doc__ + + def filter(self, **kwargs): + return self.iterator(**kwargs) + filter.__doc__ = ResourceCollection.filter.__doc__ + + def limit(self, count): + return self.iterator(limit=count) + limit.__doc__ = ResourceCollection.limit.__doc__ + + def page_size(self, count): + return self.iterator(page_size=count) + page_size.__doc__ = ResourceCollection.page_size.__doc__ + + def pages(self): + return self.iterator().pages() + pages.__doc__ = ResourceCollection.pages.__doc__ + + +class CollectionFactory(object): + """ + A factory to create new + :py:class:`CollectionManager` and :py:class:`ResourceCollection` + subclasses from a :py:class:`~boto3.resources.model.Collection` + model. These subclasses include methods to perform batch operations. + """ + def load_from_definition(self, resource_name, collection_model, + service_context, event_emitter): + """ + Loads a collection from a model, creating a new + :py:class:`CollectionManager` subclass + with the correct properties and methods, named based on the service + and resource name, e.g. ec2.InstanceCollectionManager. It also + creates a new :py:class:`ResourceCollection` subclass which is used + by the new manager class. + + :type resource_name: string + :param resource_name: Name of the resource to look up. For services, + this should match the ``service_name``. + + :type service_context: :py:class:`~boto3.utils.ServiceContext` + :param service_context: Context about the AWS service + + :type event_emitter: :py:class:`~botocore.hooks.HierarchialEmitter` + :param event_emitter: An event emitter + + :rtype: Subclass of :py:class:`CollectionManager` + :return: The collection class. + """ + attrs = {} + collection_name = collection_model.name + + # Create the batch actions for a collection + self._load_batch_actions( + attrs, resource_name, collection_model, + service_context.service_model, event_emitter) + self._load_documented_collection_methods( + attrs=attrs, resource_name=resource_name, + collection_model=collection_model, + service_model=service_context.service_model, + event_emitter=event_emitter) + + if service_context.service_name == resource_name: + cls_name = '{0}.{1}Collection'.format( + service_context.service_name, collection_name) + else: + cls_name = '{0}.{1}.{2}Collection'.format( + service_context.service_name, resource_name, collection_name) + + collection_cls = type(str(cls_name), (ResourceCollection,), + attrs) + + # Add the documentation to the collection methods + self._load_documented_collection_methods( + attrs=attrs, resource_name=resource_name, + collection_model=collection_model, + service_model=service_context.service_model, + event_emitter=event_emitter) + attrs['_collection_cls'] = collection_cls + cls_name += 'Manager' + + return type(str(cls_name), (CollectionManager,), attrs) + + def _load_batch_actions(self, attrs, resource_name, collection_model, + service_model, event_emitter): + """ + Batch actions on the collection become methods on both + the collection manager and iterators. + """ + for action_model in collection_model.batch_actions: + snake_cased = xform_name(action_model.name) + attrs[snake_cased] = self._create_batch_action( + resource_name, snake_cased, action_model, collection_model, + service_model, event_emitter) + + def _load_documented_collection_methods(factory_self, attrs, resource_name, + collection_model, service_model, + event_emitter): + # The CollectionManger already has these methods defined. However + # the docstrings are generic and not based for a particular service + # or resource. So we override these methods by proxying to the + # CollectionManager's builtin method and adding a docstring + # that pertains to the resource. + + # A collection's all() method. + def all(self): + return CollectionManager.all(self) + + all.__doc__ = docstring.CollectionMethodDocstring( + resource_name=resource_name, + action_name='all', + event_emitter=event_emitter, + collection_model=collection_model, + service_model=service_model, + include_signature=False + ) + attrs['all'] = all + + # The collection's filter() method. + def filter(self, **kwargs): + return CollectionManager.filter(self, **kwargs) + + filter.__doc__ = docstring.CollectionMethodDocstring( + resource_name=resource_name, + action_name='filter', + event_emitter=event_emitter, + collection_model=collection_model, + service_model=service_model, + include_signature=False + ) + attrs['filter'] = filter + + # The collection's limit method. + def limit(self, count): + return CollectionManager.limit(self, count) + + limit.__doc__ = docstring.CollectionMethodDocstring( + resource_name=resource_name, + action_name='limit', + event_emitter=event_emitter, + collection_model=collection_model, + service_model=service_model, + include_signature=False + ) + attrs['limit'] = limit + + # The collection's page_size method. + def page_size(self, count): + return CollectionManager.page_size(self, count) + + page_size.__doc__ = docstring.CollectionMethodDocstring( + resource_name=resource_name, + action_name='page_size', + event_emitter=event_emitter, + collection_model=collection_model, + service_model=service_model, + include_signature=False + ) + attrs['page_size'] = page_size + + def _create_batch_action(factory_self, resource_name, snake_cased, + action_model, collection_model, service_model, + event_emitter): + """ + Creates a new method which makes a batch operation request + to the underlying service API. + """ + action = BatchAction(action_model) + + def batch_action(self, *args, **kwargs): + return action(self, *args, **kwargs) + + batch_action.__name__ = str(snake_cased) + batch_action.__doc__ = docstring.BatchActionDocstring( + resource_name=resource_name, + event_emitter=event_emitter, + batch_action_model=action_model, + service_model=service_model, + collection_model=collection_model, + include_signature=False + ) + return batch_action diff --git a/boto3/resources/factory.py b/boto3/resources/factory.py new file mode 100644 index 0000000..c1d6472 --- /dev/null +++ b/boto3/resources/factory.py @@ -0,0 +1,474 @@ +# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + +import logging +from functools import partial + +from .action import ServiceAction +from .action import WaiterAction +from .base import ResourceMeta, ServiceResource +from .collection import CollectionFactory +from .model import ResourceModel +from .response import build_identifiers, ResourceHandler +from ..exceptions import ResourceLoadException +from ..docs import docstring + + +logger = logging.getLogger(__name__) + + +class ResourceFactory(object): + """ + A factory to create new :py:class:`~boto3.resources.base.ServiceResource` + classes from a :py:class:`~boto3.resources.model.ResourceModel`. There are + two types of lookups that can be done: one on the service itself (e.g. an + SQS resource) and another on models contained within the service (e.g. an + SQS Queue resource). + """ + def __init__(self, emitter): + self._collection_factory = CollectionFactory() + self._emitter = emitter + + def load_from_definition(self, resource_name, + single_resource_json_definition, service_context): + """ + Loads a resource from a model, creating a new + :py:class:`~boto3.resources.base.ServiceResource` subclass + with the correct properties and methods, named based on the service + and resource name, e.g. EC2.Instance. + + :type resource_name: string + :param resource_name: Name of the resource to look up. For services, + this should match the ``service_name``. + + :type single_resource_json_definition: dict + :param single_resource_json_definition: + The loaded json of a single service resource or resource + definition. + + :type service_context: :py:class:`~boto3.utils.ServiceContext` + :param service_context: Context about the AWS service + + :rtype: Subclass of :py:class:`~boto3.resources.base.ServiceResource` + :return: The service or resource class. + """ + + + logger.debug('Loading %s:%s', service_context.service_name, + resource_name) + + # Using the loaded JSON create a ResourceModel object. + resource_model = ResourceModel( + resource_name, single_resource_json_definition, + service_context.resource_json_definitions + ) + + # Do some renaming of the shape if there was a naming collision + # that needed to be accounted for. + shape = None + if resource_model.shape: + shape = service_context.service_model.shape_for( + resource_model.shape) + resource_model.load_rename_map(shape) + + # Set some basic info + meta = ResourceMeta( + service_context.service_name, resource_model=resource_model) + attrs = { + 'meta': meta, + } + + # Create and load all of attributes of the resource class based + # on the models. + + # Identifiers + self._load_identifiers( + attrs=attrs, meta=meta, resource_name=resource_name, + resource_model=resource_model + ) + + # Load/Reload actions + self._load_actions( + attrs=attrs, resource_name=resource_name, + resource_model=resource_model, service_context=service_context + ) + + # Attributes that get auto-loaded + self._load_attributes( + attrs=attrs, meta=meta, resource_model=resource_model, + service_context=service_context) + + # Collections and their corresponding methods + self._load_collections( + attrs=attrs, resource_model=resource_model, + service_context=service_context) + + #References and Subresources + self._load_has_relations( + attrs=attrs, resource_name=resource_name, + resource_model=resource_model, service_context=service_context + ) + + # Waiter resource actions + self._load_waiters( + attrs=attrs, resource_name=resource_name, + resource_model=resource_model, service_context=service_context + ) + + # Create the name based on the requested service and resource + cls_name = resource_name + if service_context.service_name == resource_name: + cls_name = 'ServiceResource' + cls_name = service_context.service_name + '.' + cls_name + + base_classes = [ServiceResource] + if self._emitter is not None: + self._emitter.emit('creating-resource-class.%s' % cls_name, + class_attributes=attrs, + base_classes=base_classes) + return type(str(cls_name), tuple(base_classes), attrs) + + def _load_identifiers(self, attrs, meta, resource_model, resource_name): + """ + Populate required identifiers. These are arguments without which + the resource cannot be used. Identifiers become arguments for + operations on the resource. + """ + for identifier in resource_model.identifiers: + meta.identifiers.append(identifier.name) + attrs[identifier.name] = self._create_identifier( + identifier, resource_name) + + def _load_actions(self, attrs, resource_name, resource_model, + service_context): + """ + Actions on the resource become methods, with the ``load`` method + being a special case which sets internal data for attributes, and + ``reload`` is an alias for ``load``. + """ + if resource_model.load: + attrs['load'] = self._create_action( + action_model=resource_model.load, resource_name=resource_name, + service_context=service_context, is_load=True) + attrs['reload'] = attrs['load'] + + for action in resource_model.actions: + attrs[action.name] = self._create_action( + action_model=action, resource_name=resource_name, + service_context=service_context) + + def _load_attributes(self, attrs, meta, resource_model, service_context): + """ + Load resource attributes based on the resource shape. The shape + name is referenced in the resource JSON, but the shape itself + is defined in the Botocore service JSON, hence the need for + access to the ``service_model``. + """ + if resource_model.shape: + shape = service_context.service_model.shape_for( + resource_model.shape) + + attributes = resource_model.get_attributes(shape) + for name, (orig_name, member) in attributes.items(): + attrs[name] = self._create_autoload_property( + name=orig_name, snake_cased=name, member_model=member) + + def _load_collections(self, attrs, resource_model, service_context): + """ + Load resource collections from the model. Each collection becomes + a :py:class:`~boto3.resources.collection.CollectionManager` instance + on the resource instance, which allows you to iterate and filter + through the collection's items. + """ + for collection_model in resource_model.collections: + attrs[collection_model.name] = self._create_collection( + resource_name=resource_model.name, + collection_model=collection_model, + service_context=service_context + ) + + def _load_has_relations(self, attrs, resource_name, resource_model, + service_context): + """ + Load related resources, which are defined via a ``has`` + relationship but conceptually come in two forms: + + 1. A reference, which is a related resource instance and can be + ``None``, such as an EC2 instance's ``vpc``. + 2. A subresource, which is a resource constructor that will always + return a resource instance which shares identifiers/data with + this resource, such as ``s3.Bucket('name').Object('key')``. + """ + for reference in resource_model.references: + # This is a dangling reference, i.e. we have all + # the data we need to create the resource, so + # this instance becomes an attribute on the class. + attrs[reference.name] = self._create_reference( + reference_model=reference, + resource_name=resource_name, + service_context=service_context + ) + + for subresource in resource_model.subresources: + # This is a sub-resource class you can create + # by passing in an identifier, e.g. s3.Bucket(name). + attrs[subresource.name] = self._create_class_partial( + subresource_model=subresource, + resource_name=resource_name, + service_context=service_context + ) + + def _load_waiters(self, attrs, resource_name, resource_model, + service_context): + """ + Load resource waiters from the model. Each waiter allows you to + wait until a resource reaches a specific state by polling the state + of the resource. + """ + for waiter in resource_model.waiters: + attrs[waiter.name] = self._create_waiter( + resource_waiter_model=waiter, + resource_name=resource_name, + service_context=service_context + ) + + def _create_identifier(factory_self, identifier, resource_name): + """ + Creates a read-only property for identifier attributes. + """ + def get_identifier(self): + # The default value is set to ``None`` instead of + # raising an AttributeError because when resources are + # instantiated a check is made such that none of the + # identifiers have a value ``None``. If any are ``None``, + # a more informative user error than a generic AttributeError + # is raised. + return getattr(self, '_' + identifier.name, None) + + get_identifier.__name__ = str(identifier.name) + get_identifier.__doc__ = docstring.IdentifierDocstring( + resource_name=resource_name, + identifier_model=identifier, + include_signature=False + ) + + return property(get_identifier) + + def _create_autoload_property(factory_self, name, snake_cased, + member_model): + """ + Creates a new property on the resource to lazy-load its value + via the resource's ``load`` method (if it exists). + """ + # The property loader will check to see if this resource has already + # been loaded and return the cached value if possible. If not, then + # it first checks to see if it CAN be loaded (raise if not), then + # calls the load before returning the value. + def property_loader(self): + if self.meta.data is None: + if hasattr(self, 'load'): + self.load() + else: + raise ResourceLoadException( + '{0} has no load method'.format(self.__class__.__name__)) + + return self.meta.data.get(name) + + property_loader.__name__ = str(snake_cased) + property_loader.__doc__ = docstring.AttributeDocstring( + attr_name=snake_cased, + attr_model=member_model, + include_signature=False + ) + + return property(property_loader) + + def _create_waiter(factory_self, resource_waiter_model, resource_name, + service_context): + """ + Creates a new wait method for each resource where both a waiter and + resource model is defined. + """ + waiter = WaiterAction(resource_waiter_model, + waiter_resource_name=resource_waiter_model.name) + def do_waiter(self, *args, **kwargs): + waiter(self, *args, **kwargs) + + do_waiter.__name__ = str(resource_waiter_model.name) + do_waiter.__doc__ = docstring.ResourceWaiterDocstring( + resource_name=resource_name, + event_emitter=factory_self._emitter, + service_model=service_context.service_model, + resource_waiter_model=resource_waiter_model, + service_waiter_model=service_context.service_waiter_model, + include_signature=False + ) + return do_waiter + + def _create_collection(factory_self, resource_name, collection_model, + service_context): + """ + Creates a new property on the resource to lazy-load a collection. + """ + cls = factory_self._collection_factory.load_from_definition( + resource_name=resource_name, collection_model=collection_model, + service_context=service_context, + event_emitter=factory_self._emitter) + + def get_collection(self): + return cls( + collection_model=collection_model, parent=self, + factory=factory_self, service_context=service_context) + + get_collection.__name__ = str(collection_model.name) + get_collection.__doc__ = docstring.CollectionDocstring( + collection_model=collection_model, include_signature=False) + return property(get_collection) + + def _create_reference(factory_self, reference_model, resource_name, + service_context): + """ + Creates a new property on the resource to lazy-load a reference. + """ + # References are essentially an action with no request + # or response, so we can re-use the response handlers to + # build up resources from identifiers and data members. + handler = ResourceHandler( + search_path=reference_model.resource.path, factory=factory_self, + resource_model=reference_model.resource, + service_context=service_context + ) + + # Are there any identifiers that need access to data members? + # This is important when building the resource below since + # it requires the data to be loaded. + needs_data = any(i.source == 'data' for i in + reference_model.resource.identifiers) + + def get_reference(self): + # We need to lazy-evaluate the reference to handle circular + # references between resources. We do this by loading the class + # when first accessed. + # This is using a *response handler* so we need to make sure + # our data is loaded (if possible) and pass that data into + # the handler as if it were a response. This allows references + # to have their data loaded properly. + if needs_data and self.meta.data is None and hasattr(self, 'load'): + self.load() + return handler(self, {}, self.meta.data) + + get_reference.__name__ = str(reference_model.name) + get_reference.__doc__ = docstring.ReferenceDocstring( + reference_model=reference_model, + include_signature=False + ) + return property(get_reference) + + def _create_class_partial(factory_self, subresource_model, resource_name, + service_context): + """ + Creates a new method which acts as a functools.partial, passing + along the instance's low-level `client` to the new resource + class' constructor. + """ + name = subresource_model.resource.type + # We need a new method here because we want access to the + # instance's client. + def create_resource(self, *args, **kwargs): + positional_args = [] + + # We lazy-load the class to handle circular references. + json_def = service_context.resource_json_definitions.get(name, {}) + resource_cls = factory_self.load_from_definition( + resource_name=name, + single_resource_json_definition=json_def, + service_context=service_context + ) + + # Assumes that identifiers are in order, which lets you do + # e.g. ``sqs.Queue('foo').Message('bar')`` to create a new message + # linked with the ``foo`` queue and which has a ``bar`` receipt + # handle. If we did kwargs here then future positional arguments + # would lead to failure. + identifiers = subresource_model.resource.identifiers + if identifiers is not None: + for identifier, value in build_identifiers(identifiers, self): + positional_args.append(value) + + return partial(resource_cls, *positional_args, + client=self.meta.client)(*args, **kwargs) + + create_resource.__name__ = str(name) + create_resource.__doc__ = docstring.SubResourceDocstring( + resource_name=resource_name, + sub_resource_model=subresource_model, + service_model=service_context.service_model, + include_signature=False + ) + return create_resource + + def _create_action(factory_self, action_model, resource_name, + service_context, is_load=False): + """ + Creates a new method which makes a request to the underlying + AWS service. + """ + # Create the action in in this closure but before the ``do_action`` + # method below is invoked, which allows instances of the resource + # to share the ServiceAction instance. + action = ServiceAction( + action_model, factory=factory_self, + service_context=service_context + ) + + # A resource's ``load`` method is special because it sets + # values on the resource instead of returning the response. + if is_load: + # We need a new method here because we want access to the + # instance via ``self``. + def do_action(self, *args, **kwargs): + response = action(self, *args, **kwargs) + self.meta.data = response + # Create the docstring for the load/reload mehtods. + lazy_docstring = docstring.LoadReloadDocstring( + action_name=action_model.name, + resource_name=resource_name, + event_emitter=factory_self._emitter, + load_model=action_model, + service_model=service_context.service_model, + include_signature=False + ) + else: + # We need a new method here because we want access to the + # instance via ``self``. + def do_action(self, *args, **kwargs): + response = action(self, *args, **kwargs) + + if hasattr(self, 'load'): + # Clear cached data. It will be reloaded the next + # time that an attribute is accessed. + # TODO: Make this configurable in the future? + self.meta.data = None + + return response + lazy_docstring = docstring.ActionDocstring( + resource_name=resource_name, + event_emitter=factory_self._emitter, + action_model=action_model, + service_model=service_context.service_model, + include_signature=False + ) + + do_action.__name__ = str(action_model.name) + do_action.__doc__ = lazy_docstring + return do_action diff --git a/boto3/resources/model.py b/boto3/resources/model.py new file mode 100644 index 0000000..7061183 --- /dev/null +++ b/boto3/resources/model.py @@ -0,0 +1,619 @@ +# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + +""" +The models defined in this file represent the resource JSON description +format and provide a layer of abstraction from the raw JSON. The advantages +of this are: + +* Pythonic interface (e.g. ``action.request.operation``) +* Consumers need not change for minor JSON changes (e.g. renamed field) + +These models are used both by the resource factory to generate resource +classes as well as by the documentation generator. +""" + +import logging + +from botocore import xform_name + + +logger = logging.getLogger(__name__) + + +class Identifier(object): + """ + A resource identifier, given by its name. + + :type name: string + :param name: The name of the identifier + """ + def __init__(self, name): + #: (``string``) The name of the identifier + self.name = name + + +class Action(object): + """ + A service operation action. + + :type name: string + :param name: The name of the action + :type definition: dict + :param definition: The JSON definition + :type resource_defs: dict + :param resource_defs: All resources defined in the service + """ + def __init__(self, name, definition, resource_defs): + self._definition = definition + + #: (``string``) The name of the action + self.name = name + #: (:py:class:`Request`) This action's request or ``None`` + self.request = None + if 'request' in definition: + self.request = Request(definition.get('request', {})) + #: (:py:class:`ResponseResource`) This action's resource or ``None`` + self.resource = None + if 'resource' in definition: + self.resource = ResponseResource(definition.get('resource', {}), + resource_defs) + #: (``string``) The JMESPath search path or ``None`` + self.path = definition.get('path') + + + +class DefinitionWithParams(object): + """ + An item which has parameters exposed via the ``params`` property. + A request has an operation and parameters, while a waiter has + a name, a low-level waiter name and parameters. + + :type definition: dict + :param definition: The JSON definition + """ + def __init__(self, definition): + self._definition = definition + + @property + def params(self): + """ + Get a list of auto-filled parameters for this request. + + :type: list(:py:class:`Parameter`) + """ + params = [] + + for item in self._definition.get('params', []): + params.append(Parameter(**item)) + + return params + + +class Parameter(object): + """ + An auto-filled parameter which has a source and target. For example, + the ``QueueUrl`` may be auto-filled from a resource's ``url`` identifier + when making calls to ``queue.receive_messages``. + + :type target: string + :param target: The destination parameter name, e.g. ``QueueUrl`` + :type source_type: string + :param source_type: Where the source is defined. + :type source: string + :param source: The source name, e.g. ``Url`` + """ + def __init__(self, target, source, name=None, path=None, value=None, + **kwargs): + #: (``string``) The destination parameter name + self.target = target + #: (``string``) Where the source is defined + self.source = source + #: (``string``) The name of the source, if given + self.name = name + #: (``string``) The JMESPath query of the source + self.path = path + #: (``string|int|float|bool``) The source constant value + self.value = value + + # Complain if we encounter any unknown values. + if kwargs: + logger.warning('Unknown parameter options found: %s', kwargs) + + +class Request(DefinitionWithParams): + """ + A service operation action request. + + :type definition: dict + :param definition: The JSON definition + """ + def __init__(self, definition): + super(Request, self).__init__(definition) + + #: (``string``) The name of the low-level service operation + self.operation = definition.get('operation') + + +class Waiter(DefinitionWithParams): + """ + An event waiter specification. + + :type name: string + :param name: Name of the waiter + :type definition: dict + :param definition: The JSON definition + """ + PREFIX = 'WaitUntil' + + def __init__(self, name, definition): + super(Waiter, self).__init__(definition) + + #: (``string``) The name of this waiter + self.name = name + + #: (``string``) The name of the underlying event waiter + self.waiter_name = definition.get('waiterName') + + +class ResponseResource(object): + """ + A resource response to create after performing an action. + + :type definition: dict + :param definition: The JSON definition + :type resource_defs: dict + :param resource_defs: All resources defined in the service + """ + def __init__(self, definition, resource_defs): + self._definition = definition + self._resource_defs = resource_defs + + #: (``string``) The name of the response resource type + self.type = definition.get('type') + + #: (``string``) The JMESPath search query or ``None`` + self.path = definition.get('path') + + @property + def identifiers(self): + """ + A list of resource identifiers. + + :type: list(:py:class:`Identifier`) + """ + identifiers = [] + + for item in self._definition.get('identifiers', []): + identifiers.append( + Parameter(**item)) + + return identifiers + + @property + def model(self): + """ + Get the resource model for the response resource. + + :type: :py:class:`ResourceModel` + """ + return ResourceModel(self.type, self._resource_defs[self.type], + self._resource_defs) + + +class Collection(Action): + """ + A group of resources. See :py:class:`Action`. + + :type name: string + :param name: The name of the collection + :type definition: dict + :param definition: The JSON definition + :type resource_defs: dict + :param resource_defs: All resources defined in the service + """ + @property + def batch_actions(self): + """ + Get a list of batch actions supported by the resource type + contained in this action. This is a shortcut for accessing + the same information through the resource model. + + :rtype: list(:py:class:`Action`) + """ + return self.resource.model.batch_actions + + +class ResourceModel(object): + """ + A model representing a resource, defined via a JSON description + format. A resource has identifiers, attributes, actions, + sub-resources, references and collections. For more information + on resources, see :ref:`guide_resources`. + + :type name: string + :param name: The name of this resource, e.g. ``sqs`` or ``Queue`` + :type definition: dict + :param definition: The JSON definition + :type resource_defs: dict + :param resource_defs: All resources defined in the service + """ + def __init__(self, name, definition, resource_defs): + self._definition = definition + self._resource_defs = resource_defs + self._renamed = {} + + #: (``string``) The name of this resource + self.name = name + #: (``string``) The service shape name for this resource or ``None`` + self.shape = definition.get('shape') + + def load_rename_map(self, shape=None): + """ + Load a name translation map given a shape. This will set + up renamed values for any collisions, e.g. if the shape, + an action, and a subresource all are all named ``foo`` + then the resource will have an action ``foo``, a subresource + named ``Foo`` and a property named ``foo_attribute``. + This is the order of precedence, from most important to + least important: + + * Load action (resource.load) + * Identifiers + * Actions + * Subresources + * References + * Collections + * Waiters + * Attributes (shape members) + + Batch actions are only exposed on collections, so do not + get modified here. Subresources use upper camel casing, so + are unlikely to collide with anything but other subresources. + + Creates a structure like this:: + + renames = { + ('action', 'id'): 'id_action', + ('collection', 'id'): 'id_collection', + ('attribute', 'id'): 'id_attribute' + } + + # Get the final name for an action named 'id' + name = renames.get(('action', 'id'), 'id') + + :type shape: botocore.model.Shape + :param shape: The underlying shape for this resource. + """ + # Meta is a reserved name for resources + names = set(['meta']) + self._renamed = {} + + if self._definition.get('load'): + names.add('load') + + for item in self._definition.get('identifiers', []): + self._load_name_with_category(names, item['name'], 'identifier') + + for name in self._definition.get('actions', {}): + self._load_name_with_category(names, name, 'action') + + for name, ref in self._get_has_definition().items(): + # Subresources require no data members, just typically + # identifiers and user input. + data_required = False + for identifier in ref['resource']['identifiers']: + if identifier['source'] == 'data': + data_required = True + break + + if not data_required: + self._load_name_with_category(names, name, 'subresource', + snake_case=False) + else: + self._load_name_with_category(names, name, 'reference') + + for name in self._definition.get('hasMany', {}): + self._load_name_with_category(names, name, 'collection') + + for name in self._definition.get('waiters', {}): + self._load_name_with_category(names, Waiter.PREFIX + name, + 'waiter') + + if shape is not None: + for name in shape.members.keys(): + self._load_name_with_category(names, name, 'attribute') + + def _load_name_with_category(self, names, name, category, + snake_case=True): + """ + Load a name with a given category, possibly renaming it + if that name is already in use. The name will be stored + in ``names`` and possibly be set up in ``self._renamed``. + + :type names: set + :param names: Existing names (Python attributes, properties, or + methods) on the resource. + :type name: string + :param name: The original name of the value. + :type category: string + :param category: The value type, such as 'identifier' or 'action' + :type snake_case: bool + :param snake_case: True (default) if the name should be snake cased. + """ + if snake_case: + name = xform_name(name) + + if name in names: + logger.debug('Renaming %s %s %s' % (self.name, category, name)) + self._renamed[(category, name)] = name + '_' + category + name += '_' + category + + if name in names: + # This isn't good, let's raise instead of trying to keep + # renaming this value. + raise ValueError('Problem renaming {0} {1} to {2}!'.format( + self.name, category, name)) + + names.add(name) + + def _get_name(self, category, name, snake_case=True): + """ + Get a possibly renamed value given a category and name. This + uses the rename map set up in ``load_rename_map``, so that + method must be called once first. + + :type category: string + :param category: The value type, such as 'identifier' or 'action' + :type name: string + :param name: The original name of the value + :type snake_case: bool + :param snake_case: True (default) if the name should be snake cased. + :rtype: string + :return: Either the renamed value if it is set, otherwise the + original name. + """ + if snake_case: + name = xform_name(name) + + return self._renamed.get((category, name), name) + + def get_attributes(self, shape): + """ + Get a dictionary of attribute names to original name and shape + models that represent the attributes of this resource. Looks + like the following: + + { + 'some_name': ('SomeName', ) + } + + :type shape: botocore.model.Shape + :param shape: The underlying shape for this resource. + :rtype: dict + :return: Mapping of resource attributes. + """ + attributes = {} + identifier_names = [i.name for i in self.identifiers] + + for name, member in shape.members.items(): + snake_cased = xform_name(name) + if snake_cased in identifier_names: + # Skip identifiers, these are set through other means + continue + snake_cased = self._get_name('attribute', snake_cased, + snake_case=False) + attributes[snake_cased] = (name, member) + + return attributes + + @property + def identifiers(self): + """ + Get a list of resource identifiers. + + :type: list(:py:class:`Identifier`) + """ + identifiers = [] + + for item in self._definition.get('identifiers', []): + name = self._get_name('identifier', item['name']) + identifiers.append(Identifier(name)) + + return identifiers + + @property + def load(self): + """ + Get the load action for this resource, if it is defined. + + :type: :py:class:`Action` or ``None`` + """ + action = self._definition.get('load') + + if action is not None: + action = Action('load', action, self._resource_defs) + + return action + + @property + def actions(self): + """ + Get a list of actions for this resource. + + :type: list(:py:class:`Action`) + """ + actions = [] + + for name, item in self._definition.get('actions', {}).items(): + name = self._get_name('action', name) + actions.append(Action(name, item, self._resource_defs)) + + return actions + + @property + def batch_actions(self): + """ + Get a list of batch actions for this resource. + + :type: list(:py:class:`Action`) + """ + actions = [] + + for name, item in self._definition.get('batchActions', {}).items(): + name = self._get_name('batch_action', name) + actions.append(Action(name, item, self._resource_defs)) + + return actions + + def _get_has_definition(self): + """ + Get a ``has`` relationship definition from a model, where the + service resource model is treated special in that it contains + a relationship to every resource defined for the service. This + allows things like ``s3.Object('bucket-name', 'key')`` to + work even though the JSON doesn't define it explicitly. + + :rtype: dict + :return: Mapping of names to subresource and reference + definitions. + """ + if self.name not in self._resource_defs: + # This is the service resource, so let us expose all of + # the defined resources as subresources. + definition = {} + + for name, resource_def in self._resource_defs.items(): + # It's possible for the service to have renamed a + # resource or to have defined multiple names that + # point to the same resource type, so we need to + # take that into account. + found = False + has_items = self._definition.get('has', {}).items() + for has_name, has_def in has_items: + if has_def.get('resource', {}).get('type') == name: + definition[has_name] = has_def + found = True + + if not found: + # Create a relationship definition and attach it + # to the model, such that all identifiers must be + # supplied by the user. It will look something like: + # + # { + # 'resource': { + # 'type': 'ResourceName', + # 'identifiers': [ + # {'target': 'Name1', 'source': 'input'}, + # {'target': 'Name2', 'source': 'input'}, + # ... + # ] + # } + # } + # + fake_has = { + 'resource': { + 'type': name, + 'identifiers': [] + } + } + + for identifier in resource_def.get('identifiers', []): + fake_has['resource']['identifiers'].append({ + 'target': identifier['name'], 'source': 'input' + }) + + definition[name] = fake_has + else: + definition = self._definition.get('has', {}) + + return definition + + def _get_related_resources(self, subresources): + """ + Get a list of sub-resources or references. + + :type subresources: bool + :param subresources: ``True`` to get sub-resources, ``False`` to + get references. + :rtype: list(:py:class:`ResponseResource`) + """ + resources = [] + + for name, definition in self._get_has_definition().items(): + if subresources: + name = self._get_name('subresource', name, snake_case=False) + else: + name = self._get_name('reference', name) + action = Action(name, definition, self._resource_defs) + + data_required = False + for identifier in action.resource.identifiers: + if identifier.source == 'data': + data_required = True + break + + if subresources and not data_required: + resources.append(action) + elif not subresources and data_required: + resources.append(action) + + return resources + + @property + def subresources(self): + """ + Get a list of sub-resources. + + :type: list(:py:class`ResponseResource`) + """ + return self._get_related_resources(True) + + @property + def references(self): + """ + Get a list of reference resources. + + :type: list(:py:class:`ResponseResource`) + """ + return self._get_related_resources(False) + + @property + def collections(self): + """ + Get a list of collections for this resource. + + :type: list(:py:class:`Collection`) + """ + collections = [] + + for name, item in self._definition.get('hasMany', {}).items(): + name = self._get_name('collection', name) + collections.append(Collection(name, item, self._resource_defs)) + + return collections + + @property + def waiters(self): + """ + Get a list of waiters for this resource. + + :type: list(:py:class:`Waiter`) + """ + waiters = [] + + for name, item in self._definition.get('waiters', {}).items(): + name = self._get_name('waiter', Waiter.PREFIX + name) + waiters.append(Waiter(name, item)) + + return waiters diff --git a/boto3/resources/params.py b/boto3/resources/params.py new file mode 100644 index 0000000..95e4660 --- /dev/null +++ b/boto3/resources/params.py @@ -0,0 +1,164 @@ +# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + +import re + +import jmespath +from botocore import xform_name + +from ..exceptions import ResourceLoadException + + +INDEX_RE = re.compile('\[(.*)\]$') + + +def get_data_member(parent, path): + """ + Get a data member from a parent using a JMESPath search query, + loading the parent if required. If the parent cannot be loaded + and no data is present then an exception is raised. + + :type parent: ServiceResource + :param parent: The resource instance to which contains data we + are interested in. + :type path: string + :param path: The JMESPath expression to query + :raises ResourceLoadException: When no data is present and the + resource cannot be loaded. + :returns: The queried data or ``None``. + """ + # Ensure the parent has its data loaded, if possible. + if parent.meta.data is None: + if hasattr(parent, 'load'): + parent.load() + else: + raise ResourceLoadException( + '{0} has no load method!'.format(parent.__class__.__name__)) + + return jmespath.search(path, parent.meta.data) + + +def create_request_parameters(parent, request_model, params=None): + """ + Handle request parameters that can be filled in from identifiers, + resource data members or constants. + + By passing ``params``, you can invoke this method multiple times and + build up a parameter dict over time, which is particularly useful + for reverse JMESPath expressions that append to lists. + + :type parent: ServiceResource + :param parent: The resource instance to which this action is attached. + :type request_model: :py:class:`~boto3.resources.model.Request` + :param request_model: The action request model. + :type params: dict + :param params: If set, then add to this existing dict. It is both + edited in-place and returned. + :rtype: dict + :return: Pre-filled parameters to be sent to the request operation. + """ + if params is None: + params = {} + + for param in request_model.params: + source = param.source + target = param.target + + if source == 'identifier': + # Resource identifier, e.g. queue.url + value = getattr(parent, xform_name(param.name)) + elif source == 'data': + # If this is a data member then it may incur a load + # action before returning the value. + value = get_data_member(parent, param.path) + elif source in ['string', 'integer', 'boolean']: + # These are hard-coded values in the definition + value = param.value + elif source == 'input': + # This is provided by the user, so ignore it here + continue + else: + raise NotImplementedError( + 'Unsupported source type: {0}'.format(source)) + + build_param_structure(params, target, value) + + return params + +def build_param_structure(params, target, value): + """ + This method provides a basic reverse JMESPath implementation that + lets you go from a JMESPath-like string to a possibly deeply nested + object. The ``params`` are mutated in-place, so subsequent calls + can modify the same element by its index. + + >>> build_param_structure(params, 'test[0]', 1) + >>> print(params) + {'test': [1]} + + >>> build_param_structure(params, 'foo.bar[0].baz', 'hello world') + >>> print(params) + {'test': [1], 'foo': {'bar': [{'baz': 'hello, world'}]}} + + """ + pos = params + parts = target.split('.') + + # First, split into parts like 'foo', 'bar[0]', 'baz' and process + # each piece. It can either be a list or a dict, depending on if + # an index like `[0]` is present. We detect this via a regular + # expression, and keep track of where we are in params via the + # pos variable, walking down to the last item. Once there, we + # set the value. + for i, part in enumerate(parts): + # Is it indexing an array? + result = INDEX_RE.search(part) + if result: + if result.group(1): + # We have an explicit index + index = int(result.group(1)) + + # Strip index off part name + part = part[:-len(str(index) + '[]')] + else: + # Index will be set after we know the proper part + # name and that it's a list instance. + index = None + part = part[:-2] + + if part not in pos or not isinstance(pos[part], list): + pos[part] = [] + + # This means we should append, e.g. 'foo[]' + if index is None: + index = len(pos[part]) + + while len(pos[part]) <= index: + # Assume it's a dict until we set the final value below + pos[part].append({}) + + # Last item? Set the value, otherwise set the new position + if i == len(parts) - 1: + pos[part][index] = value + else: + # The new pos is the *item* in the array, not the array! + pos = pos[part][index] + else: + if part not in pos: + pos[part] = {} + + # Last item? Set the value, otherwise set the new position + if i == len(parts) - 1: + pos[part] = value + else: + pos = pos[part] diff --git a/boto3/resources/response.py b/boto3/resources/response.py new file mode 100644 index 0000000..fc50d5a --- /dev/null +++ b/boto3/resources/response.py @@ -0,0 +1,299 @@ +# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + +import jmespath +from botocore import xform_name + +from ..exceptions import ResourceLoadException +from .params import get_data_member + + +def all_not_none(iterable): + """ + Return True if all elements of the iterable are not None (or if the + iterable is empty). This is like the built-in ``all``, except checks + against None, so 0 and False are allowable values. + """ + for element in iterable: + if element is None: + return False + return True + + +def build_identifiers(identifiers, parent, params=None, raw_response=None): + """ + Builds a mapping of identifier names to values based on the + identifier source location, type, and target. Identifier + values may be scalars or lists depending on the source type + and location. + + :type identifiers: list + :param identifiers: List of :py:class:`~boto3.resources.model.Parameter` + definitions + :type parent: ServiceResource + :param parent: The resource instance to which this action is attached. + :type params: dict + :param params: Request parameters sent to the service. + :type raw_response: dict + :param raw_response: Low-level operation response. + :rtype: list + :return: An ordered list of ``(name, value)`` identifier tuples. + """ + results = [] + + for identifier in identifiers: + source = identifier.source + target = identifier.target + + if source == 'response': + value = jmespath.search(identifier.path, raw_response) + elif source == 'requestParameter': + value = jmespath.search(identifier.path, params) + elif source == 'identifier': + value = getattr(parent, xform_name(identifier.name)) + elif source == 'data': + # If this is a data member then it may incur a load + # action before returning the value. + value = get_data_member(parent, identifier.path) + elif source == 'input': + # This value is set by the user, so ignore it here + continue + else: + raise NotImplementedError( + 'Unsupported source type: {0}'.format(source)) + + results.append((xform_name(target), value)) + + return results + + +def build_empty_response(search_path, operation_name, service_model): + """ + Creates an appropriate empty response for the type that is expected, + based on the service model's shape type. For example, a value that + is normally a list would then return an empty list. A structure would + return an empty dict, and a number would return None. + + :type search_path: string + :param search_path: JMESPath expression to search in the response + :type operation_name: string + :param operation_name: Name of the underlying service operation. + :type service_model: :ref:`botocore.model.ServiceModel` + :param service_model: The Botocore service model + :rtype: dict, list, or None + :return: An appropriate empty value + """ + response = None + + operation_model = service_model.operation_model(operation_name) + shape = operation_model.output_shape + + if search_path: + # Walk the search path and find the final shape. For example, given + # a path of ``foo.bar[0].baz``, we first find the shape for ``foo``, + # then the shape for ``bar`` (ignoring the indexing), and finally + # the shape for ``baz``. + for item in search_path.split('.'): + item = item.strip('[0123456789]$') + + if shape.type_name == 'structure': + shape = shape.members[item] + elif shape.type_name == 'list': + shape = shape.member + else: + raise NotImplementedError( + 'Search path hits shape type {0} from {1}'.format( + shape.type_name, item)) + + # Anything not handled here is set to None + if shape.type_name == 'structure': + response = {} + elif shape.type_name == 'list': + response = [] + elif shape.type_name == 'map': + response = {} + + return response + + +class RawHandler(object): + """ + A raw action response handler. This passed through the response + dictionary, optionally after performing a JMESPath search if one + has been defined for the action. + + :type search_path: string + :param search_path: JMESPath expression to search in the response + :rtype: dict + :return: Service response + """ + def __init__(self, search_path): + self.search_path = search_path + + def __call__(self, parent, params, response): + """ + :type parent: ServiceResource + :param parent: The resource instance to which this action is attached. + :type params: dict + :param params: Request parameters sent to the service. + :type response: dict + :param response: Low-level operation response. + """ + # TODO: Remove the '$' check after JMESPath supports it + if self.search_path and self.search_path != '$': + response = jmespath.search(self.search_path, response) + + return response + + +class ResourceHandler(object): + """ + Creates a new resource or list of new resources from the low-level + response based on the given response resource definition. + + :type search_path: string + :param search_path: JMESPath expression to search in the response + + :type factory: ResourceFactory + :param factory: The factory that created the resource class to which + this action is attached. + + :type resource_model: :py:class:`~boto3.resources.model.ResponseResource` + :param resource_model: Response resource model. + + :type service_context: :py:class:`~boto3.utils.ServiceContext` + :param service_context: Context about the AWS service + + :type operation_name: string + :param operation_name: Name of the underlying service operation, if it + exists. + + :rtype: ServiceResource or list + :return: New resource instance(s). + """ + def __init__(self, search_path, factory, resource_model, + service_context, operation_name=None): + self.search_path = search_path + self.factory = factory + self.resource_model = resource_model + self.operation_name = operation_name + self.service_context = service_context + + def __call__(self, parent, params, response): + """ + :type parent: ServiceResource + :param parent: The resource instance to which this action is attached. + :type params: dict + :param params: Request parameters sent to the service. + :type response: dict + :param response: Low-level operation response. + """ + resource_name = self.resource_model.type + json_definition = self.service_context.resource_json_definitions.get( + resource_name) + + # Load the new resource class that will result from this action. + resource_cls = self.factory.load_from_definition( + resource_name=resource_name, + single_resource_json_definition=json_definition, + service_context=self.service_context + ) + raw_response = response + search_response = None + + # Anytime a path is defined, it means the response contains the + # resource's attributes, so resource_data gets set here. It + # eventually ends up in resource.meta.data, which is where + # the attribute properties look for data. + if self.search_path: + search_response = jmespath.search(self.search_path, raw_response) + + # First, we parse all the identifiers, then create the individual + # response resources using them. Any identifiers that are lists + # will have one item consumed from the front of the list for each + # resource that is instantiated. Items which are not a list will + # be set as the same value on each new resource instance. + identifiers = dict(build_identifiers( + self.resource_model.identifiers, parent, params, + raw_response)) + + # If any of the identifiers is a list, then the response is plural + plural = [v for v in identifiers.values() if isinstance(v, list)] + + if plural: + response = [] + + # The number of items in an identifier that is a list will + # determine how many resource instances to create. + for i in range(len(plural[0])): + # Response item data is *only* available if a search path + # was given. This prevents accidentally loading unrelated + # data that may be in the response. + response_item = None + if search_response: + response_item = search_response[i] + response.append(self.handle_response_item(resource_cls, + parent, identifiers, response_item)) + elif all_not_none(identifiers.values()): + # All identifiers must always exist, otherwise the resource + # cannot be instantiated. + response = self.handle_response_item(resource_cls, + parent, identifiers, search_response) + else: + # The response should be empty, but that may mean an + # empty dict, list, or None based on whether we make + # a remote service call and what shape it is expected + # to return. + response = None + if self.operation_name is not None: + # A remote service call was made, so try and determine + # its shape. + response = build_empty_response(self.search_path, + self.operation_name, self.service_context.service_model) + + return response + + def handle_response_item(self, resource_cls, parent, identifiers, + resource_data): + """ + Handles the creation of a single response item by setting + parameters and creating the appropriate resource instance. + + :type resource_cls: ServiceResource subclass + :param resource_cls: The resource class to instantiate. + :type parent: ServiceResource + :param parent: The resource instance to which this action is attached. + :type identifiers: dict + :param identifiers: Map of identifier names to value or values. + :type resource_data: dict or None + :param resource_data: Data for resource attributes. + :rtype: ServiceResource + :return: New resource instance. + """ + kwargs = { + 'client': parent.meta.client, + } + + for name, value in identifiers.items(): + # If value is a list, then consume the next item + if isinstance(value, list): + value = value.pop(0) + + kwargs[name] = value + + resource = resource_cls(**kwargs) + + if resource_data is not None: + resource.meta.data = resource_data + + return resource diff --git a/boto3/s3/__init__.py b/boto3/s3/__init__.py new file mode 100644 index 0000000..c89416d --- /dev/null +++ b/boto3/s3/__init__.py @@ -0,0 +1,12 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. diff --git a/boto3/s3/inject.py b/boto3/s3/inject.py new file mode 100644 index 0000000..02a7a68 --- /dev/null +++ b/boto3/s3/inject.py @@ -0,0 +1,167 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from boto3.s3.transfer import S3Transfer +from boto3 import utils + +from botocore.exceptions import ClientError + + +def inject_s3_transfer_methods(class_attributes, **kwargs): + utils.inject_attribute(class_attributes, 'upload_file', upload_file) + utils.inject_attribute(class_attributes, 'download_file', download_file) + + +def inject_bucket_methods(class_attributes, **kwargs): + utils.inject_attribute(class_attributes, 'load', bucket_load) + utils.inject_attribute(class_attributes, 'upload_file', bucket_upload_file) + utils.inject_attribute( + class_attributes, 'download_file', bucket_download_file) + + +def inject_object_methods(class_attributes, **kwargs): + utils.inject_attribute(class_attributes, 'upload_file', object_upload_file) + utils.inject_attribute( + class_attributes, 'download_file', object_download_file) + + +def bucket_load(self, *args, **kwargs): + """Calls s3.Client.list_buckets() to update the attributes of the Bucket resource.""" + # The docstring above is phrased this way to match what the autogenerated + # docs produce. + + # We can't actually get the bucket's attributes from a HeadBucket, + # so we need to use a ListBuckets and search for our bucket. + response = self.meta.client.list_buckets() + for bucket_data in response['Buckets']: + if bucket_data['Name'] == self.name: + self.meta.data = bucket_data + break + else: + raise ClientError({'Error': {'Code': '404', 'Message': 'NotFound'}}, + 'ListBuckets') + + +def upload_file(self, Filename, Bucket, Key, ExtraArgs=None, + Callback=None, Config=None): + """Upload a file to an S3 object. + + Usage:: + + import boto3 + s3 = boto3.resource('s3') + s3.meta.client.upload_file('/tmp/hello.txt', 'mybucket', 'hello.txt') + + Similar behavior as S3Transfer's upload_file() method, + except that parameters are capitalized. Detailed examples can be found at + :ref:`S3Transfer's Usage `. + """ + transfer = S3Transfer(self, Config) + return transfer.upload_file( + filename=Filename, bucket=Bucket, key=Key, + extra_args=ExtraArgs, callback=Callback) + + +def download_file(self, Bucket, Key, Filename, ExtraArgs=None, + Callback=None, Config=None): + """Download an S3 object to a file. + + Usage:: + + import boto3 + s3 = boto3.resource('s3') + s3.meta.client.download_file('mybucket', 'hello.txt', '/tmp/hello.txt') + + Similar behavior as S3Transfer's download_file() method, + except that parameters are capitalized. Detailed examples can be found at + :ref:`S3Transfer's Usage `. + """ + transfer = S3Transfer(self, Config) + return transfer.download_file( + bucket=Bucket, key=Key, filename=Filename, + extra_args=ExtraArgs, callback=Callback) + + +def bucket_upload_file(self, Filename, Key, + ExtraArgs=None, Callback=None, Config=None): + """Upload a file to an S3 object. + + Usage:: + + import boto3 + s3 = boto3.resource('s3') + s3.Bucket('mybucket').upload_file('/tmp/hello.txt', 'hello.txt') + + Similar behavior as S3Transfer's upload_file() method, + except that parameters are capitalized. Detailed examples can be found at + :ref:`S3Transfer's Usage `. + """ + return self.meta.client.upload_file( + Filename=Filename, Bucket=self.name, Key=Key, + ExtraArgs=ExtraArgs, Callback=Callback, Config=Config) + + +def bucket_download_file(self, Key, Filename, + ExtraArgs=None, Callback=None, Config=None): + """Download an S3 object to a file. + + Usage:: + + import boto3 + s3 = boto3.resource('s3') + s3.Bucket('mybucket').download_file('hello.txt', '/tmp/hello.txt') + + Similar behavior as S3Transfer's download_file() method, + except that parameters are capitalized. Detailed examples can be found at + :ref:`S3Transfer's Usage `. + """ + return self.meta.client.download_file( + Bucket=self.name, Key=Key, Filename=Filename, + ExtraArgs=ExtraArgs, Callback=Callback, Config=Config) + + +def object_upload_file(self, Filename, + ExtraArgs=None, Callback=None, Config=None): + """Upload a file to an S3 object. + + Usage:: + + import boto3 + s3 = boto3.resource('s3') + s3.Object('mybucket', 'hello.txt').upload_file('/tmp/hello.txt') + + Similar behavior as S3Transfer's upload_file() method, + except that parameters are capitalized. Detailed examples can be found at + :ref:`S3Transfer's Usage `. + """ + return self.meta.client.upload_file( + Filename=Filename, Bucket=self.bucket_name, Key=self.key, + ExtraArgs=ExtraArgs, Callback=Callback, Config=Config) + + +def object_download_file(self, Filename, + ExtraArgs=None, Callback=None, Config=None): + """Download an S3 object to a file. + + Usage:: + + import boto3 + s3 = boto3.resource('s3') + s3.Object('mybucket', 'hello.txt').download_file('/tmp/hello.txt') + + Similar behavior as S3Transfer's download_file() method, + except that parameters are capitalized. Detailed examples can be found at + :ref:`S3Transfer's Usage `. + """ + return self.meta.client.download_file( + Bucket=self.bucket_name, Key=self.key, Filename=Filename, + ExtraArgs=ExtraArgs, Callback=Callback, Config=Config) diff --git a/boto3/s3/transfer.py b/boto3/s3/transfer.py new file mode 100644 index 0000000..6ec5ad9 --- /dev/null +++ b/boto3/s3/transfer.py @@ -0,0 +1,725 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +"""Abstractions over S3's upload/download operations. + +This module provides high level abstractions for efficient +uploads/downloads. It handles several things for the user: + +* Automatically switching to multipart transfers when + a file is over a specific size threshold +* Uploading/downloading a file in parallel +* Throttling based on max bandwidth +* Progress callbacks to monitor transfers +* Retries. While botocore handles retries for streaming uploads, + it is not possible for it to handle retries for streaming + downloads. This module handles retries for both cases so + you don't need to implement any retry logic yourself. + +This module has a reasonable set of defaults. It also allows you +to configure many aspects of the transfer process including: + +* Multipart threshold size +* Max parallel downloads +* Max bandwidth +* Socket timeouts +* Retry amounts + +There is no support for s3->s3 multipart copies at this +time. + + +.. _ref_s3transfer_usage: + +Usage +===== + +The simplest way to use this module is: + +.. code-block:: python + + client = boto3.client('s3', 'us-west-2') + transfer = S3Transfer(client) + # Upload /tmp/myfile to s3://bucket/key + transfer.upload_file('/tmp/myfile', 'bucket', 'key') + + # Download s3://bucket/key to /tmp/myfile + transfer.download_file('bucket', 'key', '/tmp/myfile') + +The ``upload_file`` and ``download_file`` methods also accept +``**kwargs``, which will be forwarded through to the corresponding +client operation. Here are a few examples using ``upload_file``:: + + # Making the object public + transfer.upload_file('/tmp/myfile', 'bucket', 'key', + extra_args={'ACL': 'public-read'}) + + # Setting metadata + transfer.upload_file('/tmp/myfile', 'bucket', 'key', + extra_args={'Metadata': {'a': 'b', 'c': 'd'}}) + + # Setting content type + transfer.upload_file('/tmp/myfile.json', 'bucket', 'key', + extra_args={'ContentType': "application/json"}) + + +The ``S3Transfer`` clas also supports progress callbacks so you can +provide transfer progress to users. Both the ``upload_file`` and +``download_file`` methods take an optional ``callback`` parameter. +Here's an example of how to print a simple progress percentage +to the user: + +.. code-block:: python + + class ProgressPercentage(object): + def __init__(self, filename): + self._filename = filename + self._size = float(os.path.getsize(filename)) + self._seen_so_far = 0 + self._lock = threading.Lock() + + def __call__(self, bytes_amount): + # To simplify we'll assume this is hooked up + # to a single filename. + with self._lock: + self._seen_so_far += bytes_amount + percentage = (self._seen_so_far / self._size) * 100 + sys.stdout.write( + "\r%s %s / %s (%.2f%%)" % (self._filename, self._seen_so_far, + self._size, percentage)) + sys.stdout.flush() + + + transfer = S3Transfer(boto3.client('s3', 'us-west-2')) + # Upload /tmp/myfile to s3://bucket/key and print upload progress. + transfer.upload_file('/tmp/myfile', 'bucket', 'key', + callback=ProgressPercentage('/tmp/myfile')) + + + +You can also provide a TransferConfig object to the S3Transfer +object that gives you more fine grained control over the +transfer. For example: + +.. code-block:: python + + client = boto3.client('s3', 'us-west-2') + config = TransferConfig( + multipart_threshold=8 * 1024 * 1024, + max_concurrency=10, + num_download_attempts=10, + ) + transfer = S3Transfer(client, config) + transfer.upload_file('/tmp/foo', 'bucket', 'key') + + +""" +import os +import math +import functools +import logging +import socket +import threading +import random +import string +import boto3 +from concurrent import futures + +from botocore.compat import six +from botocore.vendored.requests.packages.urllib3.exceptions import \ + ReadTimeoutError +from botocore.exceptions import IncompleteReadError + +import boto3.compat +from boto3.exceptions import RetriesExceededError, S3UploadFailedError + + +logger = logging.getLogger(__name__) +queue = six.moves.queue + +MB = 1024 * 1024 +SHUTDOWN_SENTINEL = object() + + +def random_file_extension(num_digits=8): + return ''.join(random.choice(string.hexdigits) for _ in range(num_digits)) + + +def disable_upload_callbacks(request, operation_name, **kwargs): + if operation_name in ['PutObject', 'UploadPart'] and \ + hasattr(request.body, 'disable_callback'): + request.body.disable_callback() + + +def enable_upload_callbacks(request, operation_name, **kwargs): + if operation_name in ['PutObject', 'UploadPart'] and \ + hasattr(request.body, 'enable_callback'): + request.body.enable_callback() + + +class QueueShutdownError(Exception): + pass + + +class ReadFileChunk(object): + def __init__(self, fileobj, start_byte, chunk_size, full_file_size, + callback=None, enable_callback=True): + """ + + Given a file object shown below: + + |___________________________________________________| + 0 | | full_file_size + |----chunk_size---| + start_byte + + :type fileobj: file + :param fileobj: File like object + + :type start_byte: int + :param start_byte: The first byte from which to start reading. + + :type chunk_size: int + :param chunk_size: The max chunk size to read. Trying to read + pass the end of the chunk size will behave like you've + reached the end of the file. + + :type full_file_size: int + :param full_file_size: The entire content length associated + with ``fileobj``. + + :type callback: function(amount_read) + :param callback: Called whenever data is read from this object. + + """ + self._fileobj = fileobj + self._start_byte = start_byte + self._size = self._calculate_file_size( + self._fileobj, requested_size=chunk_size, + start_byte=start_byte, actual_file_size=full_file_size) + self._fileobj.seek(self._start_byte) + self._amount_read = 0 + self._callback = callback + self._callback_enabled = enable_callback + + @classmethod + def from_filename(cls, filename, start_byte, chunk_size, callback=None, + enable_callback=True): + """Convenience factory function to create from a filename. + + :type start_byte: int + :param start_byte: The first byte from which to start reading. + + :type chunk_size: int + :param chunk_size: The max chunk size to read. Trying to read + pass the end of the chunk size will behave like you've + reached the end of the file. + + :type full_file_size: int + :param full_file_size: The entire content length associated + with ``fileobj``. + + :type callback: function(amount_read) + :param callback: Called whenever data is read from this object. + + :type enable_callback: bool + :param enable_callback: Indicate whether to invoke callback + during read() calls. + + :rtype: ``ReadFileChunk`` + :return: A new instance of ``ReadFileChunk`` + + """ + f = open(filename, 'rb') + file_size = os.fstat(f.fileno()).st_size + return cls(f, start_byte, chunk_size, file_size, callback, + enable_callback) + + def _calculate_file_size(self, fileobj, requested_size, start_byte, + actual_file_size): + max_chunk_size = actual_file_size - start_byte + return min(max_chunk_size, requested_size) + + def read(self, amount=None): + if amount is None: + amount_to_read = self._size - self._amount_read + else: + amount_to_read = min(self._size - self._amount_read, amount) + data = self._fileobj.read(amount_to_read) + self._amount_read += len(data) + if self._callback is not None and self._callback_enabled: + self._callback(len(data)) + return data + + def enable_callback(self): + self._callback_enabled = True + + def disable_callback(self): + self._callback_enabled = False + + def seek(self, where): + self._fileobj.seek(self._start_byte + where) + self._amount_read = where + + def close(self): + self._fileobj.close() + + def tell(self): + return self._amount_read + + def __len__(self): + # __len__ is defined because requests will try to determine the length + # of the stream to set a content length. In the normal case + # of the file it will just stat the file, but we need to change that + # behavior. By providing a __len__, requests will use that instead + # of stat'ing the file. + return self._size + + def __enter__(self): + return self + + def __exit__(self, *args, **kwargs): + self.close() + + def __iter__(self): + # This is a workaround for http://bugs.python.org/issue17575 + # Basically httplib will try to iterate over the contents, even + # if its a file like object. This wasn't noticed because we've + # already exhausted the stream so iterating over the file immediately + # stops, which is what we're simulating here. + return iter([]) + + +class StreamReaderProgress(object): + """Wrapper for a read only stream that adds progress callbacks.""" + def __init__(self, stream, callback=None): + self._stream = stream + self._callback = callback + + def read(self, *args, **kwargs): + value = self._stream.read(*args, **kwargs) + if self._callback is not None: + self._callback(len(value)) + return value + + +class OSUtils(object): + def get_file_size(self, filename): + return os.path.getsize(filename) + + def open_file_chunk_reader(self, filename, start_byte, size, callback): + return ReadFileChunk.from_filename(filename, start_byte, + size, callback, + enable_callback=False) + + def open(self, filename, mode): + return open(filename, mode) + + def remove_file(self, filename): + """Remove a file, noop if file does not exist.""" + # Unlike os.remove, if the file does not exist, + # then this method does nothing. + try: + os.remove(filename) + except OSError: + pass + + def rename_file(self, current_filename, new_filename): + boto3.compat.rename_file(current_filename, new_filename) + + +class MultipartUploader(object): + # These are the extra_args that need to be forwarded onto + # subsequent upload_parts. + UPLOAD_PART_ARGS = [ + 'SSECustomerKey', + 'SSECustomerAlgorithm', + 'SSECustomerKeyMD5', + 'RequestPayer', + ] + + def __init__(self, client, config, osutil, + executor_cls=futures.ThreadPoolExecutor): + self._client = client + self._config = config + self._os = osutil + self._executor_cls = executor_cls + + def _extra_upload_part_args(self, extra_args): + # Only the args in UPLOAD_PART_ARGS actually need to be passed + # onto the upload_part calls. + upload_parts_args = {} + for key, value in extra_args.items(): + if key in self.UPLOAD_PART_ARGS: + upload_parts_args[key] = value + return upload_parts_args + + def upload_file(self, filename, bucket, key, callback, extra_args): + response = self._client.create_multipart_upload(Bucket=bucket, + Key=key, **extra_args) + upload_id = response['UploadId'] + try: + parts = self._upload_parts(upload_id, filename, bucket, key, + callback, extra_args) + except Exception as e: + logger.debug("Exception raised while uploading parts, " + "aborting multipart upload.", exc_info=True) + self._client.abort_multipart_upload( + Bucket=bucket, Key=key, UploadId=upload_id) + raise S3UploadFailedError( + "Failed to upload %s to %s: %s" % ( + filename, '/'.join([bucket, key]), e)) + self._client.complete_multipart_upload( + Bucket=bucket, Key=key, UploadId=upload_id, + MultipartUpload={'Parts': parts}) + + def _upload_parts(self, upload_id, filename, bucket, key, callback, + extra_args): + upload_parts_extra_args = self._extra_upload_part_args(extra_args) + parts = [] + part_size = self._config.multipart_chunksize + num_parts = int( + math.ceil(self._os.get_file_size(filename) / float(part_size))) + max_workers = self._config.max_concurrency + with self._executor_cls(max_workers=max_workers) as executor: + upload_partial = functools.partial( + self._upload_one_part, filename, bucket, key, upload_id, + part_size, upload_parts_extra_args, callback) + for part in executor.map(upload_partial, range(1, num_parts + 1)): + parts.append(part) + return parts + + def _upload_one_part(self, filename, bucket, key, + upload_id, part_size, extra_args, + callback, part_number): + open_chunk_reader = self._os.open_file_chunk_reader + with open_chunk_reader(filename, part_size * (part_number - 1), + part_size, callback) as body: + response = self._client.upload_part( + Bucket=bucket, Key=key, + UploadId=upload_id, PartNumber=part_number, Body=body, + **extra_args) + etag = response['ETag'] + return {'ETag': etag, 'PartNumber': part_number} + + +class ShutdownQueue(queue.Queue): + """A queue implementation that can be shutdown. + + Shutting down a queue means that this class adds a + trigger_shutdown method that will trigger all subsequent + calls to put() to fail with a ``QueueShutdownError``. + + It purposefully deviates from queue.Queue, and is *not* meant + to be a drop in replacement for ``queue.Queue``. + + """ + def _init(self, maxsize): + self._shutdown = False + self._shutdown_lock = threading.Lock() + # queue.Queue is an old style class so we don't use super(). + return queue.Queue._init(self, maxsize) + + def trigger_shutdown(self): + with self._shutdown_lock: + self._shutdown = True + logger.debug("The IO queue is now shutdown.") + + def put(self, item): + # Note: this is not sufficient, it's still possible to deadlock! + # Need to hook into the condition vars used by this class. + with self._shutdown_lock: + if self._shutdown: + raise QueueShutdownError("Cannot put item to queue when " + "queue has been shutdown.") + return queue.Queue.put(self, item) + + +class MultipartDownloader(object): + def __init__(self, client, config, osutil, + executor_cls=futures.ThreadPoolExecutor): + self._client = client + self._config = config + self._os = osutil + self._executor_cls = executor_cls + self._ioqueue = ShutdownQueue(self._config.max_io_queue) + + def download_file(self, bucket, key, filename, object_size, + extra_args, callback=None): + with self._executor_cls(max_workers=2) as controller: + # 1 thread for the future that manages the uploading of files + # 1 thread for the future that manages IO writes. + download_parts_handler = functools.partial( + self._download_file_as_future, + bucket, key, filename, object_size, callback) + parts_future = controller.submit(download_parts_handler) + + io_writes_handler = functools.partial( + self._perform_io_writes, filename) + io_future = controller.submit(io_writes_handler) + results = futures.wait([parts_future, io_future], + return_when=futures.FIRST_EXCEPTION) + self._process_future_results(results) + + def _process_future_results(self, futures): + finished, unfinished = futures + for future in finished: + future.result() + + def _download_file_as_future(self, bucket, key, filename, object_size, + callback): + part_size = self._config.multipart_chunksize + num_parts = int(math.ceil(object_size / float(part_size))) + max_workers = self._config.max_concurrency + download_partial = functools.partial( + self._download_range, bucket, key, filename, + part_size, num_parts, callback) + try: + with self._executor_cls(max_workers=max_workers) as executor: + list(executor.map(download_partial, range(num_parts))) + finally: + self._ioqueue.put(SHUTDOWN_SENTINEL) + + def _calculate_range_param(self, part_size, part_index, num_parts): + start_range = part_index * part_size + if part_index == num_parts - 1: + end_range = '' + else: + end_range = start_range + part_size - 1 + range_param = 'bytes=%s-%s' % (start_range, end_range) + return range_param + + def _download_range(self, bucket, key, filename, + part_size, num_parts, callback, part_index): + try: + range_param = self._calculate_range_param( + part_size, part_index, num_parts) + + max_attempts = self._config.num_download_attempts + last_exception = None + for i in range(max_attempts): + try: + logger.debug("Making get_object call.") + response = self._client.get_object( + Bucket=bucket, Key=key, Range=range_param) + streaming_body = StreamReaderProgress( + response['Body'], callback) + buffer_size = 1024 * 16 + current_index = part_size * part_index + for chunk in iter(lambda: streaming_body.read(buffer_size), + b''): + self._ioqueue.put((current_index, chunk)) + current_index += len(chunk) + return + except (socket.timeout, socket.error, + ReadTimeoutError, IncompleteReadError) as e: + logger.debug("Retrying exception caught (%s), " + "retrying request, (attempt %s / %s)", e, i, + max_attempts, exc_info=True) + last_exception = e + continue + raise RetriesExceededError(last_exception) + finally: + logger.debug("EXITING _download_range for part: %s", part_index) + + def _perform_io_writes(self, filename): + with self._os.open(filename, 'wb') as f: + while True: + task = self._ioqueue.get() + if task is SHUTDOWN_SENTINEL: + logger.debug("Shutdown sentinel received in IO handler, " + "shutting down IO handler.") + return + else: + try: + offset, data = task + f.seek(offset) + f.write(data) + except Exception as e: + logger.debug("Caught exception in IO thread: %s", + e, exc_info=True) + self._ioqueue.trigger_shutdown() + raise + + +class TransferConfig(object): + def __init__(self, + multipart_threshold=8 * MB, + max_concurrency=10, + multipart_chunksize=8 * MB, + num_download_attempts=5, + max_io_queue=100): + self.multipart_threshold = multipart_threshold + self.max_concurrency = max_concurrency + self.multipart_chunksize = multipart_chunksize + self.num_download_attempts = num_download_attempts + self.max_io_queue = max_io_queue + + +class S3Transfer(object): + + ALLOWED_DOWNLOAD_ARGS = [ + 'VersionId', + 'SSECustomerAlgorithm', + 'SSECustomerKey', + 'SSECustomerKeyMD5', + 'RequestPayer', + ] + + ALLOWED_UPLOAD_ARGS = [ + 'ACL', + 'CacheControl', + 'ContentDisposition', + 'ContentEncoding', + 'ContentLanguage', + 'ContentType', + 'Expires', + 'GrantFullControl', + 'GrantRead', + 'GrantReadACP', + 'GrantWriteACL', + 'Metadata', + 'RequestPayer', + 'ServerSideEncryption', + 'StorageClass', + 'SSECustomerAlgorithm', + 'SSECustomerKey', + 'SSECustomerKeyMD5', + 'SSEKMSKeyId', + ] + + def __init__(self, client, config=None, osutil=None): + self._client = client + if config is None: + config = TransferConfig() + self._config = config + if osutil is None: + osutil = OSUtils() + self._osutil = osutil + + def upload_file(self, filename, bucket, key, + callback=None, extra_args=None): + """Upload a file to an S3 object. + + Variants have also been injected into S3 client, Bucket and Object. + You don't have to use S3Transfer.upload_file() directly. + """ + if extra_args is None: + extra_args = {} + self._validate_all_known_args(extra_args, self.ALLOWED_UPLOAD_ARGS) + events = self._client.meta.events + events.register_first('request-created.s3', + disable_upload_callbacks, + unique_id='s3upload-callback-disable') + events.register_last('request-created.s3', + enable_upload_callbacks, + unique_id='s3upload-callback-enable') + if self._osutil.get_file_size(filename) >= \ + self._config.multipart_threshold: + self._multipart_upload(filename, bucket, key, callback, extra_args) + else: + self._put_object(filename, bucket, key, callback, extra_args) + + def _put_object(self, filename, bucket, key, callback, extra_args): + # We're using open_file_chunk_reader so we can take advantage of the + # progress callback functionality. + open_chunk_reader = self._osutil.open_file_chunk_reader + with open_chunk_reader(filename, 0, + self._osutil.get_file_size(filename), + callback=callback) as body: + self._client.put_object(Bucket=bucket, Key=key, Body=body, + **extra_args) + + def download_file(self, bucket, key, filename, extra_args=None, + callback=None): + """Download an S3 object to a file. + + Variants have also been injected into S3 client, Bucket and Object. + You don't have to use S3Transfer.download_file() directly. + """ + # This method will issue a ``head_object`` request to determine + # the size of the S3 object. This is used to determine if the + # object is downloaded in parallel. + if extra_args is None: + extra_args = {} + self._validate_all_known_args(extra_args, self.ALLOWED_DOWNLOAD_ARGS) + object_size = self._object_size(bucket, key, extra_args) + temp_filename = filename + os.extsep + random_file_extension() + try: + self._download_file(bucket, key, temp_filename, object_size, + extra_args, callback) + except Exception: + logger.debug("Exception caught in download_file, removing partial " + "file: %s", temp_filename, exc_info=True) + self._osutil.remove_file(temp_filename) + raise + else: + self._osutil.rename_file(temp_filename, filename) + + def _download_file(self, bucket, key, filename, object_size, + extra_args, callback): + if object_size >= self._config.multipart_threshold: + self._ranged_download(bucket, key, filename, object_size, + extra_args, callback) + else: + self._get_object(bucket, key, filename, extra_args, callback) + + def _validate_all_known_args(self, actual, allowed): + for kwarg in actual: + if kwarg not in allowed: + raise ValueError( + "Invalid extra_args key '%s', " + "must be one of: %s" % ( + kwarg, ', '.join(allowed))) + + def _ranged_download(self, bucket, key, filename, object_size, + extra_args, callback): + downloader = MultipartDownloader(self._client, self._config, + self._osutil) + downloader.download_file(bucket, key, filename, object_size, + extra_args, callback) + + def _get_object(self, bucket, key, filename, extra_args, callback): + # precondition: num_download_attempts > 0 + max_attempts = self._config.num_download_attempts + last_exception = None + for i in range(max_attempts): + try: + return self._do_get_object(bucket, key, filename, + extra_args, callback) + except (socket.timeout, socket.error, + ReadTimeoutError, IncompleteReadError) as e: + # TODO: we need a way to reset the callback if the + # download failed. + logger.debug("Retrying exception caught (%s), " + "retrying request, (attempt %s / %s)", e, i, + max_attempts, exc_info=True) + last_exception = e + continue + raise RetriesExceededError(last_exception) + + def _do_get_object(self, bucket, key, filename, extra_args, callback): + response = self._client.get_object(Bucket=bucket, Key=key, + **extra_args) + streaming_body = StreamReaderProgress( + response['Body'], callback) + with self._osutil.open(filename, 'wb') as f: + for chunk in iter(lambda: streaming_body.read(8192), b''): + f.write(chunk) + + def _object_size(self, bucket, key, extra_args): + return self._client.head_object( + Bucket=bucket, Key=key, **extra_args)['ContentLength'] + + def _multipart_upload(self, filename, bucket, key, callback, extra_args): + uploader = MultipartUploader(self._client, self._config, self._osutil) + uploader.upload_file(filename, bucket, key, callback, extra_args) diff --git a/boto3/session.py b/boto3/session.py new file mode 100644 index 0000000..8f1c7f4 --- /dev/null +++ b/boto3/session.py @@ -0,0 +1,345 @@ +# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + +import copy +import os + +import botocore.session +from botocore.client import Config + +import boto3 +import boto3.utils + +from .resources.factory import ResourceFactory + + +class Session(object): + """ + A session stores configuration state and allows you to create service + clients and resources. + + :type aws_access_key_id: string + :param aws_access_key_id: AWS access key ID + :type aws_secret_access_key: string + :param aws_secret_access_key: AWS secret access key + :type aws_session_token: string + :param aws_session_token: AWS temporary session token + :type region_name: string + :param region_name: Default region when creating new connections + :type botocore_session: botocore.session.Session + :param botocore_session: Use this Botocore session instead of creating + a new default one. + :type profile_name: string + :param profile_name: The name of a profile to use. If not given, then + the default profile is used. + """ + def __init__(self, aws_access_key_id=None, aws_secret_access_key=None, + aws_session_token=None, region_name=None, + botocore_session=None, profile_name=None): + if botocore_session is not None: + self._session = botocore_session + else: + # Create a new default session + self._session = botocore.session.get_session() + + # Setup custom user-agent string if it isn't already customized + if self._session.user_agent_name == 'Botocore': + botocore_info = 'Botocore/{0}'.format( + self._session.user_agent_version) + if self._session.user_agent_extra: + self._session.user_agent_extra += ' ' + botocore_info + else: + self._session.user_agent_extra = botocore_info + self._session.user_agent_name = 'Boto3' + self._session.user_agent_version = boto3.__version__ + + if profile_name is not None: + self._session.set_config_variable('profile', profile_name) + + if aws_access_key_id or aws_secret_access_key or aws_session_token: + self._session.set_credentials(aws_access_key_id, + aws_secret_access_key, aws_session_token) + + if region_name is not None: + self._session.set_config_variable('region', region_name) + + self.resource_factory = ResourceFactory( + self._session.get_component('event_emitter')) + self._setup_loader() + self._register_default_handlers() + + def __repr__(self): + return 'Session(region={0})'.format( + repr(self._session.get_config_variable('region'))) + + @property + def profile_name(self): + """ + The **read-only** profile name. + """ + return self._session.profile or 'default' + + @property + def events(self): + """ + The event emitter for a session + """ + return self._session.get_component('event_emitter') + + def _setup_loader(self): + """ + Setup loader paths so that we can load resources. + """ + self._loader = self._session.get_component('data_loader') + self._loader.search_paths.append( + os.path.join(os.path.dirname(__file__), 'data')) + + def get_available_services(self): + """ + Get a list of available services that can be loaded as low-level + clients via :py:meth:`Session.client`. + + :rtype: list + :return: List of service names + """ + return self._session.get_available_services() + + def get_available_resources(self): + """ + Get a list of available services that can be loaded as resource + clients via :py:meth:`Session.resource`. + + :rtype: list + :return: List of service names + """ + return self._loader.list_available_services(type_name='resources-1') + + def client(self, service_name, region_name=None, api_version=None, + use_ssl=True, verify=None, endpoint_url=None, + aws_access_key_id=None, aws_secret_access_key=None, + aws_session_token=None, config=None): + """ + Create a low-level service client by name. + + :type service_name: string + :param service_name: The name of a service, e.g. 's3' or 'ec2'. You + can get a list of available services via + :py:meth:`get_available_services`. + + :type region_name: string + :param region_name: The name of the region associated with the client. + A client is associated with a single region. + + :type api_version: string + :param api_version: The API version to use. By default, botocore will + use the latest API version when creating a client. You only need + to specify this parameter if you want to use a previous API version + of the client. + + :type use_ssl: boolean + :param use_ssl: Whether or not to use SSL. By default, SSL is used. Note that + not all services support non-ssl connections. + + :type verify: boolean/string + :param verify: Whether or not to verify SSL certificates. By default SSL certificates + are verified. You can provide the following values: + + * False - do not validate SSL certificates. SSL will still be + used (unless use_ssl is False), but SSL certificates + will not be verified. + * path/to/cert/bundle.pem - A filename of the CA cert bundle to + uses. You can specify this argument if you want to use a different + CA cert bundle than the one used by botocore. + + :type endpoint_url: string + :param endpoint_url: The complete URL to use for the constructed client. + Normally, botocore will automatically construct the appropriate URL + to use when communicating with a service. You can specify a + complete URL (including the "http/https" scheme) to override this + behavior. If this value is provided, then ``use_ssl`` is ignored. + + :type aws_access_key_id: string + :param aws_access_key_id: The access key to use when creating + the client. This is entirely optional, and if not provided, + the credentials configured for the session will automatically + be used. You only need to provide this argument if you want + to override the credentials used for this specific client. + + :type aws_secret_access_key: string + :param aws_secret_access_key: The secret key to use when creating + the client. Same semantics as aws_access_key_id above. + + :type aws_session_token: string + :param aws_session_token: The session token to use when creating + the client. Same semantics as aws_access_key_id above. + + :type config: botocore.client.Config + :param config: Advanced client configuration options. If region_name + is specified in the client config, its value will take precedence + over environment variables and configuration values, but not over + a region_name value passed explicitly to the method. + + :return: Service client instance + + """ + return self._session.create_client( + service_name, region_name=region_name, api_version=api_version, + use_ssl=use_ssl, verify=verify, endpoint_url=endpoint_url, + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + aws_session_token=aws_session_token, config=config) + + def resource(self, service_name, region_name=None, api_version=None, + use_ssl=True, verify=None, endpoint_url=None, + aws_access_key_id=None, aws_secret_access_key=None, + aws_session_token=None, config=None): + """ + Create a resource service client by name. + + :type service_name: string + :param service_name: The name of a service, e.g. 's3' or 'ec2'. You + can get a list of available services via + :py:meth:`get_available_resources`. + + :type region_name: string + :param region_name: The name of the region associated with the client. + A client is associated with a single region. + + :type api_version: string + :param api_version: The API version to use. By default, botocore will + use the latest API version when creating a client. You only need + to specify this parameter if you want to use a previous API version + of the client. + + :type use_ssl: boolean + :param use_ssl: Whether or not to use SSL. By default, SSL is used. Note that + not all services support non-ssl connections. + + :type verify: boolean/string + :param verify: Whether or not to verify SSL certificates. By default SSL certificates + are verified. You can provide the following values: + + * False - do not validate SSL certificates. SSL will still be + used (unless use_ssl is False), but SSL certificates + will not be verified. + * path/to/cert/bundle.pem - A filename of the CA cert bundle to + uses. You can specify this argument if you want to use a different + CA cert bundle than the one used by botocore. + + :type endpoint_url: string + :param endpoint_url: The complete URL to use for the constructed client. + Normally, botocore will automatically construct the appropriate URL + to use when communicating with a service. You can specify a + complete URL (including the "http/https" scheme) to override this + behavior. If this value is provided, then ``use_ssl`` is ignored. + + :type aws_access_key_id: string + :param aws_access_key_id: The access key to use when creating + the client. This is entirely optional, and if not provided, + the credentials configured for the session will automatically + be used. You only need to provide this argument if you want + to override the credentials used for this specific client. + + :type aws_secret_access_key: string + :param aws_secret_access_key: The secret key to use when creating + the client. Same semantics as aws_access_key_id above. + + :type aws_session_token: string + :param aws_session_token: The session token to use when creating + the client. Same semantics as aws_access_key_id above. + + :type config: botocore.client.Config + :param config: Advanced client configuration options. If region_name + is specified in the client config, its value will take precedence + over environment variables and configuration values, but not over + a region_name value passed explicitly to the method. If + user_agent_extra is specified in the client config, it overrides + the default user_agent_extra provided by the resource API. + + :return: Subclass of :py:class:`~boto3.resources.base.ServiceResource` + """ + if api_version is None: + api_version = self._loader.determine_latest_version( + service_name, 'resources-1') + resource_model = self._loader.load_service_model( + service_name, 'resources-1', api_version) + + # Creating a new resource instance requires the low-level client + # and service model, the resource version and resource JSON data. + # We pass these to the factory and get back a class, which is + # instantiated on top of the low-level client. + if config is not None: + if config.user_agent_extra is None: + config = copy.deepcopy(config) + config.user_agent_extra = 'Resource' + else: + config = Config(user_agent_extra='Resource') + client = self.client( + service_name, region_name=region_name, api_version=api_version, + use_ssl=use_ssl, verify=verify, endpoint_url=endpoint_url, + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + aws_session_token=aws_session_token, config=config) + service_model = client.meta.service_model + + # Create a ServiceContext object to serve as a reference to + # important read-only information about the general service. + service_context = boto3.utils.ServiceContext( + service_name=service_name, service_model=service_model, + resource_json_definitions=resource_model['resources'], + service_waiter_model=boto3.utils.LazyLoadedWaiterModel( + self._session, service_name, api_version) + ) + + # Create the service resource class. + cls = self.resource_factory.load_from_definition( + resource_name=service_name, + single_resource_json_definition=resource_model['service'], + service_context=service_context + ) + + return cls(client=client) + + def _register_default_handlers(self): + + # S3 customizations + self._session.register( + 'creating-client-class.s3', + boto3.utils.lazy_call( + 'boto3.s3.inject.inject_s3_transfer_methods')) + self._session.register( + 'creating-resource-class.s3.Bucket', + boto3.utils.lazy_call( + 'boto3.s3.inject.inject_bucket_methods')) + self._session.register( + 'creating-resource-class.s3.Object', + boto3.utils.lazy_call( + 'boto3.s3.inject.inject_object_methods')) + + # DynamoDb customizations + self._session.register( + 'creating-resource-class.dynamodb', + boto3.utils.lazy_call( + 'boto3.dynamodb.transform.register_high_level_interface'), + unique_id='high-level-dynamodb') + self._session.register( + 'creating-resource-class.dynamodb.Table', + boto3.utils.lazy_call( + 'boto3.dynamodb.table.register_table_methods'), + unique_id='high-level-dynamodb-table') + + # EC2 Customizations + self._session.register( + 'creating-resource-class.ec2.ServiceResource', + boto3.utils.lazy_call( + 'boto3.ec2.createtags.inject_create_tags')) diff --git a/boto3/utils.py b/boto3/utils.py new file mode 100644 index 0000000..58fff79 --- /dev/null +++ b/boto3/utils.py @@ -0,0 +1,89 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +import sys +from collections import namedtuple + + +_ServiceContext = namedtuple( + 'ServiceContext', + ['service_name', 'service_model', 'service_waiter_model', + 'resource_json_definitions'] +) + + +class ServiceContext(_ServiceContext): + """Provides important service-wide, read-only information about a service + + :type service_name: str + :param service_name: The name of the service + + :type service_model: :py:class:`botocore.model.ServiceModel` + :param service_model: The model of the service. + + :type service_waiter_model: :py:class:`botocore.waiter.WaiterModel` or + a waiter model-like object such as + :py:class:`boto3.utils.LazyLoadedWaiterModel` + :param service_waiter_model: The waiter model of the service. + + :type resource_json_definitions: dict + :param resource_json_definitions: The loaded json models of all resource + shapes for a service. It is equivalient of loading a + ``resource-1.json`` and retrieving the value at the key "resources". + """ + pass + + +def import_module(name): + """Import module given a name. + + Does not support relative imports. + + """ + __import__(name) + return sys.modules[name] + + +def lazy_call(full_name): + def _handler(**kwargs): + module, function_name = full_name.rsplit('.', 1) + module = import_module(module) + return getattr(module, function_name)(**kwargs) + return _handler + + +def inject_attribute(class_attributes, name, value): + if name in class_attributes: + raise RuntimeError( + 'Cannot inject class attribute "%s", attribute ' + 'already exists in class dict.' % name) + else: + class_attributes[name] = value + + +class LazyLoadedWaiterModel(object): + """A lazily loaded waiter model + + This does not load the service waiter model until an attempt is made + to retrieve the waiter model for a specific waiter. This is helpful + in docstring generation where we do not need to actually need to grab + the waiter-2.json until it is accessed through a ``get_waiter`` call + when the docstring is generated/accessed. + """ + def __init__(self, bc_session, service_name, api_version): + self._session = bc_session + self._service_name = service_name + self._api_version = api_version + + def get_waiter(self, waiter_name): + return self._session.get_waiter_model( + self._service_name, self._api_version).get_waiter(waiter_name) diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 0000000..9a2d005 --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,153 @@ +# Makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +PAPER = +BUILDDIR = build + +# Internal variables. +PAPEROPT_a4 = -D latex_paper_size=a4 +PAPEROPT_letter = -D latex_paper_size=letter +ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source +# the i18n builder cannot share the environment and doctrees with the others +I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source + +.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext + +help: + @echo "Please use \`make ' where is one of" + @echo " html to make standalone HTML files" + @echo " dirhtml to make HTML files named index.html in directories" + @echo " singlehtml to make a single large HTML file" + @echo " pickle to make pickle files" + @echo " json to make JSON files" + @echo " htmlhelp to make HTML files and a HTML help project" + @echo " qthelp to make HTML files and a qthelp project" + @echo " devhelp to make HTML files and a Devhelp project" + @echo " epub to make an epub" + @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" + @echo " latexpdf to make LaTeX files and run them through pdflatex" + @echo " text to make text files" + @echo " man to make manual pages" + @echo " texinfo to make Texinfo files" + @echo " info to make Texinfo files and run them through makeinfo" + @echo " gettext to make PO message catalogs" + @echo " changes to make an overview of all changed/added/deprecated items" + @echo " linkcheck to check all external links for integrity" + @echo " doctest to run all doctests embedded in the documentation (if enabled)" + +clean: + -rm -rf $(BUILDDIR)/* + +html: + $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + +dirhtml: + $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." + +singlehtml: + $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml + @echo + @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." + +pickle: + $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle + @echo + @echo "Build finished; now you can process the pickle files." + +json: + $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json + @echo + @echo "Build finished; now you can process the JSON files." + +htmlhelp: + $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp + @echo + @echo "Build finished; now you can run HTML Help Workshop with the" \ + ".hhp project file in $(BUILDDIR)/htmlhelp." + +qthelp: + $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp + @echo + @echo "Build finished; now you can run "qcollectiongenerator" with the" \ + ".qhcp project file in $(BUILDDIR)/qthelp, like this:" + @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Boto3.qhcp" + @echo "To view the help file:" + @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Boto3.qhc" + +devhelp: + $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp + @echo + @echo "Build finished." + @echo "To view the help file:" + @echo "# mkdir -p $$HOME/.local/share/devhelp/Boto3" + @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Boto3" + @echo "# devhelp" + +epub: + $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub + @echo + @echo "Build finished. The epub file is in $(BUILDDIR)/epub." + +latex: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo + @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." + @echo "Run \`make' in that directory to run these through (pdf)latex" \ + "(use \`make latexpdf' here to do that automatically)." + +latexpdf: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through pdflatex..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +text: + $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text + @echo + @echo "Build finished. The text files are in $(BUILDDIR)/text." + +man: + $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man + @echo + @echo "Build finished. The manual pages are in $(BUILDDIR)/man." + +texinfo: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo + @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." + @echo "Run \`make' in that directory to run these through makeinfo" \ + "(use \`make info' here to do that automatically)." + +info: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo "Running Texinfo files through makeinfo..." + make -C $(BUILDDIR)/texinfo info + @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." + +gettext: + $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale + @echo + @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." + +changes: + $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes + @echo + @echo "The overview file is in $(BUILDDIR)/changes." + +linkcheck: + $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck + @echo + @echo "Link check complete; look for any errors in the above output " \ + "or in $(BUILDDIR)/linkcheck/output.txt." + +doctest: + $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest + @echo "Testing of doctests in the sources finished, look at the " \ + "results in $(BUILDDIR)/doctest/output.txt." diff --git a/docs/make.bat b/docs/make.bat new file mode 100644 index 0000000..be24259 --- /dev/null +++ b/docs/make.bat @@ -0,0 +1,190 @@ +@ECHO OFF + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set BUILDDIR=build +set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% source +set I18NSPHINXOPTS=%SPHINXOPTS% source +if NOT "%PAPER%" == "" ( + set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% + set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% +) + +if "%1" == "" goto help + +if "%1" == "help" ( + :help + echo.Please use `make ^` where ^ is one of + echo. html to make standalone HTML files + echo. dirhtml to make HTML files named index.html in directories + echo. singlehtml to make a single large HTML file + echo. pickle to make pickle files + echo. json to make JSON files + echo. htmlhelp to make HTML files and a HTML help project + echo. qthelp to make HTML files and a qthelp project + echo. devhelp to make HTML files and a Devhelp project + echo. epub to make an epub + echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter + echo. text to make text files + echo. man to make manual pages + echo. texinfo to make Texinfo files + echo. gettext to make PO message catalogs + echo. changes to make an overview over all changed/added/deprecated items + echo. linkcheck to check all external links for integrity + echo. doctest to run all doctests embedded in the documentation if enabled + goto end +) + +if "%1" == "clean" ( + for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i + del /q /s %BUILDDIR%\* + goto end +) + +if "%1" == "html" ( + %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/html. + goto end +) + +if "%1" == "dirhtml" ( + %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. + goto end +) + +if "%1" == "singlehtml" ( + %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. + goto end +) + +if "%1" == "pickle" ( + %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can process the pickle files. + goto end +) + +if "%1" == "json" ( + %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can process the JSON files. + goto end +) + +if "%1" == "htmlhelp" ( + %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can run HTML Help Workshop with the ^ +.hhp project file in %BUILDDIR%/htmlhelp. + goto end +) + +if "%1" == "qthelp" ( + %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can run "qcollectiongenerator" with the ^ +.qhcp project file in %BUILDDIR%/qthelp, like this: + echo.^> qcollectiongenerator %BUILDDIR%\qthelp\Boto3.qhcp + echo.To view the help file: + echo.^> assistant -collectionFile %BUILDDIR%\qthelp\Boto3.ghc + goto end +) + +if "%1" == "devhelp" ( + %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. + goto end +) + +if "%1" == "epub" ( + %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The epub file is in %BUILDDIR%/epub. + goto end +) + +if "%1" == "latex" ( + %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. + goto end +) + +if "%1" == "text" ( + %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The text files are in %BUILDDIR%/text. + goto end +) + +if "%1" == "man" ( + %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The manual pages are in %BUILDDIR%/man. + goto end +) + +if "%1" == "texinfo" ( + %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. + goto end +) + +if "%1" == "gettext" ( + %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The message catalogs are in %BUILDDIR%/locale. + goto end +) + +if "%1" == "changes" ( + %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes + if errorlevel 1 exit /b 1 + echo. + echo.The overview file is in %BUILDDIR%/changes. + goto end +) + +if "%1" == "linkcheck" ( + %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck + if errorlevel 1 exit /b 1 + echo. + echo.Link check complete; look for any errors in the above output ^ +or in %BUILDDIR%/linkcheck/output.txt. + goto end +) + +if "%1" == "doctest" ( + %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest + if errorlevel 1 exit /b 1 + echo. + echo.Testing of doctests in the sources finished, look at the ^ +results in %BUILDDIR%/doctest/output.txt. + goto end +) + +:end diff --git a/docs/source/_templates/globaltoc.html b/docs/source/_templates/globaltoc.html new file mode 100644 index 0000000..2099880 --- /dev/null +++ b/docs/source/_templates/globaltoc.html @@ -0,0 +1,13 @@ + diff --git a/docs/source/conf.py b/docs/source/conf.py new file mode 100644 index 0000000..7a3ea24 --- /dev/null +++ b/docs/source/conf.py @@ -0,0 +1,273 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +# +# Boto3 documentation build configuration file, created by +# sphinx-quickstart on Wed Sep 3 11:11:30 2014. +# +# This file is execfile()d with the current directory set to its containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os + +import boto3 +import boto3.session +from boto3.docs import generate_docs + + +session = boto3.session.Session(region_name='us-east-1') +generate_docs(os.path.dirname(os.path.abspath(__file__)), session) + + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +#sys.path.insert(0, os.path.abspath('.')) + +# -- General configuration ----------------------------------------------------- + +# If your documentation needs a minimal Sphinx version, state it here. +#needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be extensions +# coming with Sphinx (named 'sphinx.ext.*') or your custom ones. +extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode'] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix of source filenames. +source_suffix = '.rst' + +# The encoding of source files. +#source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = 'Boto 3 Docs' +copyright = '2014, Amazon.com, Inc.' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The short X.Y version. +version = boto3.__version__ +# The full version, including alpha/beta/rc tags. +release = boto3.__version__ + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +#language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +#today = '' +# Else, today_fmt is used as the format for a strftime call. +#today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [] + +# The reST default role (used for this markup: `text`) to use for all documents. +#default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +#add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +#add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +#show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# A list of ignored prefixes for module index sorting. +#modindex_common_prefix = [] + + +import guzzle_sphinx_theme + +extensions.append("guzzle_sphinx_theme") +html_translator_class = 'guzzle_sphinx_theme.HTMLTranslator' +html_theme_path = guzzle_sphinx_theme.html_theme_path() +html_theme = 'guzzle_sphinx_theme' +# Guzzle theme options (see theme.conf for more information) + +html_theme_options = { + # hack to add tracking + "google_analytics_account": os.getenv('TRACKING', False), + "base_url": "http://docs.aws.amazon.com/aws-sdk-php/guide/latest/" +} + +# -- Options for HTML output --------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +#html_theme = 'default' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +#html_theme_options = {} + +# Add any paths that contain custom themes here, relative to this directory. +#html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +#html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +#html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +#html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +#html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +#html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +#html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +html_show_sourcelink = False +html_sidebars = { + '**': ['logo-text.html', + 'globaltoc.html', + 'searchbox.html'] +} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +#html_additional_pages = {} + +# If false, no module index is generated. +#html_domain_indices = True + +# If false, no index is generated. +#html_use_index = True + +# If true, the index is split into individual pages for each letter. +#html_split_index = False + +# If true, links to the reST sources are added to the pages. +#html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +#html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +#html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +#html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +#html_file_suffix = None + +# Output file base name for HTML help builder. +htmlhelp_basename = 'Boto3doc' + + +# -- Options for LaTeX output -------------------------------------------------- + +latex_elements = { +# The paper size ('letterpaper' or 'a4paper'). +#'papersize': 'letterpaper', + +# The font size ('10pt', '11pt' or '12pt'). +#'pointsize': '10pt', + +# Additional stuff for the LaTeX preamble. +#'preamble': '', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, author, documentclass [howto/manual]). +latex_documents = [ + ('index', 'Boto3.tex', 'Boto3 Documentation', + 'Amazon.com, Inc.', 'manual'), +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +#latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +#latex_use_parts = False + +# If true, show page references after internal links. +#latex_show_pagerefs = False + +# If true, show URL addresses after external links. +#latex_show_urls = False + +# Documents to append as an appendix to all manuals. +#latex_appendices = [] + +# If false, no module index is generated. +#latex_domain_indices = True + + +# -- Options for manual page output -------------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ('index', 'boto3', 'Boto3 Documentation', + ['Amazon.com, Inc.'], 1) +] + +# If true, show URL addresses after external links. +#man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------------ + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ('index', 'Boto3', 'Boto3 Documentation', + 'Amazon.com, Inc.', 'Boto3', 'One line description of project.', + 'Miscellaneous'), +] + +# Documents to append as an appendix to all manuals. +#texinfo_appendices = [] + +# If false, no module index is generated. +#texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +#texinfo_show_urls = 'footnote' + +autoclass_content = 'both' diff --git a/docs/source/guide/clients.rst b/docs/source/guide/clients.rst new file mode 100644 index 0000000..2e2feda --- /dev/null +++ b/docs/source/guide/clients.rst @@ -0,0 +1,109 @@ +.. _guide_clients: + +Low-level Clients +================= +Clients provide a low-level interface to AWS whose methods map close to 1:1 +with service APIs. All service operations are supported by clients. Clients +are generated from a JSON service definition file. + +Creating Clients +---------------- +Clients are created in a similar fashion to resources:: + + import boto3 + + # Create a low-level client with the service name + sqs = boto3.client('sqs') + +It is also possible to access the low-level client from an existing +resource:: + + # Create the resource + sqs_resource = boto3.resource('sqs') + + # Get the client from the resource + sqs = sqs_resource.meta.client + +Service Operations +------------------ +Service operations map to client methods of the same name and provide +access to the same operation parameters via keyword arguments:: + + # Make a call using the low-level client + response = sqs.send_message(QueueUrl='...', MessageBody='...') + +As can be seen above, the method arguments map directly to the associated +`SQS API `_. + +.. note:: + + The method names have been snake-cased for better looking Python code. + + Parameters **must** be sent as keyword arguments. They will not work + as positional arguments. + +Handling Responses +------------------ +Responses are returned as python dictionaries. It is up to you to traverse +or otherwise process the response for the data you need, keeping in mind +that responses may not always include all of the expected data. In the +example below, ``response.get('QueueUrls', [])`` is used to ensure that a +list is always returned, even when the response has no key ``'QueueUrls'``:: + + # List all your queues + response = sqs.list_queues() + for url in response.get('QueueUrls', []): + print(url) + +The ``response`` in the example above looks something like this: + +.. code-block:: json + + { + "QueueUrls": [ + "http://url1", + "http://url2", + "http://url3" + ] + } + +Waiters +------- +Waiters use a client's service operations to poll the status of an AWS resource +and suspend execution until the AWS resource reaches the state that the +waiter is polling for or a failure occurs while polling. +Using clients, you can learn the name of each waiter that a client has access +to:: + + import boto3 + + s3 = boto3.client('s3') + sqs = boto3.client('sqs') + + # List all of the possible waiters for both clients + print("s3 waiters:") + s3.waiter_names + + print("sqs waiters:") + sqs.waiter_names + +Note if a client does not have any waiters, it will return an empty list when +accessing its ``waiter_names`` attribute:: + + s3 waiters: + [u'bucket_exists', u'bucket_not_exists', u'object_exists', u'object_not_exists'] + sqs waiters: + [] + +Using a client's ``get_waiter()`` method, you can obtain a specific waiter +from its list of possible waiters:: + + # Retrieve waiter instance that will wait till a specified + # S3 bucket exists + s3_bucket_exists_waiter = s3.get_waiter('bucket_exists') + +Then to actually start waiting, you must call the waiter's ``wait()`` method +with the method's appropriate parameters passed in:: + + # Begin waiting for the S3 bucket, mybucket, to exist + s3_bucket_exists_waiter.wait(Bucket='mybucket') diff --git a/docs/source/guide/collections.rst b/docs/source/guide/collections.rst new file mode 100644 index 0000000..6179299 --- /dev/null +++ b/docs/source/guide/collections.rst @@ -0,0 +1,134 @@ +.. _guide_collections: + +Collections +=========== + +Overview +-------- +A collection provides an iterable interface to a group of resources. +Collections behave similarly to +`Django QuerySets `_ +and expose a similar API. A collection seamlessly handles pagination for +you, making it possible to easily iterate over all items from all pages of +data. Example of a collection:: + + # SQS list all queues + sqs = boto3.resource('sqs') + for queue in sqs.queues.all(): + print(queue.url) + +When Collections Make Requests +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Collections can be created and manipulated without any request being made +to the underlying service. A collection makes a remote service request under +the following conditions: + +* **Iteration**:: + + for bucket in s3.buckets.all(): + print(bucket.name) + +* **Conversion to list()**:: + + buckets = list(s3.buckets.all()) + +* **Batch actions (see below)**:: + + s3.Bucket('my-bucket').objects.delete() + +Filtering +--------- +Some collections support extra arguments to filter the returned data set, +which are passed into the underlying service operation. Use the +:py:meth:`~boto3.resources.collection.Collection.filter` method to filter +the results:: + + # S3 list all keys with the prefix '/photos' + s3 = boto3.resource('s3') + for bucket in s3.buckets.all(): + for obj in bucket.objects.filter(Prefix='/photos'): + print('{0}:{1}'.format(bucket.name, obj.key)) + +.. warning:: + + Behind the scenes, the above example will call ``ListBuckets``, + ``ListObjects``, and ``HeadObject`` many times. If you have a large + number of S3 objects then this could incur a significant cost. + +Chainability +------------ +Collection methods are chainable. They return copies of the collection +rather than modifying the collection, including a deep copy of any +associated operation parameters. For example, this allows you +to build up multiple collections from a base which they all have +in common:: + + # EC2 find instances + ec2 = boto3.resource('ec2') + base = ec2.instances.filter(InstanceIds=['id1', 'id2', 'id3']) + + filters = [{ + 'name': 'tenancy', + 'value': 'dedicated' + }] + filtered1 = base.filter(Filters=filters) + + # Note, this does NOT modify the filters in ``filtered1``! + filters.append({'name': 'instance-type', 'value': 't1.micro'}) + filtered2 = base.filter(Filters=filters) + + print('All instances:') + for instance in base: + print(instance.id) + + print('Dedicated instances:') + for instance in filtered1: + print(instance.id) + + print('Dedicated micro instances:') + for instance in filtered2: + print(instance.id) + +Limiting Results +---------------- +It is possible to limit the number of items returned from a collection +by using either the +:py:meth:`~boto3.resources.collection.ResourceCollection.limit` method:: + + # S3 iterate over first ten buckets + for bucket in s3.buckets.limit(10): + print(bucket.name) + +In both cases, up to 10 items total will be returned. If you do not +have 10 buckets, then all of your buckets will be returned. + +Controlling Page Size +--------------------- +Collections automatically handle paging through results, but you may want +to control the number of items returned from a single service operation +call. You can do so using the +:py:meth:`~boto3.resources.collection.ResourceCollection.page_size` method:: + + # S3 iterate over all objects 100 at a time + for obj in bucket.objects.page_size(100): + print(obj.key) + + +By default, S3 will return 1000 objects at a time, so the above code +would let you process the items in smaller batches, which could be +beneficial for slow or unreliable internet connections. + +Batch Actions +------------- +Some collections support batch actions, which are actions that operate +on an entire page of results at a time. They will automatically handle +pagination:: + + # S3 delete everything in `my-bucket` + s3 = boto3.resource('s3') + s3.buckets('my-bucket').objects.delete() + +.. danger:: + + The above example will **completely erase all data** in the ``my-bucket`` + bucket! Please be careful with batch actions. diff --git a/docs/source/guide/configuration.rst b/docs/source/guide/configuration.rst new file mode 100644 index 0000000..2114856 --- /dev/null +++ b/docs/source/guide/configuration.rst @@ -0,0 +1,117 @@ +.. _guide_configuration: + +Configuration +============= +Boto can be configured in multiple ways. Regardless of the source or sources +that you choose, you **must** have AWS credentials and a region set in +order to make requests. + +Interactive Configuration +------------------------- +If you have the `AWS CLI `_, then you can use +its interactive ``configure`` command to set up your credentials and +default region:: + + aws configure + +Follow the prompts and it will generate configuration files in the +correct locations for you. + +Configuration Sources +--------------------- +There are multiple sources from which configuration data can be loaded. +The general order in which they are checked is as follows: + +1. Method parameters +2. Environment variables +3. Configuration files +4. EC2 Instance metadata + +If a configuration value is set in multiple places, then the first +will be used according the the order above. For example, if I have +set a default region in both my environment variables and configuration +file, then the environment variable is used. + +Available Options +----------------- +The available options for various configuration sources are listed below. + +Method Parameters +~~~~~~~~~~~~~~~~~ +When creating a session, client, or resource you can pass in credential +and configuration options:: + + from boto3.session import Session + + session = Session(aws_access_key_id='', + aws_secret_access_key='', + region_name='') + + ec2 = session.resource('ec2') + ec2_us_west_2 = session.resource('ec2', region_name='us-west-2') + + # List all of my EC2 instances in my default region. + print('Default region:') + for instance in ec2.instances.all(): + print(instance.id) + + # List all of my EC2 instances in us-west-2. + print('US West 2 region:') + for instance in ec2_us_west_2.instances.all(): + print(instance.id) + +For a list of all options, look at the :py:class:`~boto3.session.Session` +documentation. + +Environment Variables +~~~~~~~~~~~~~~~~~~~~~ + +``AWS_ACCESS_KEY_ID`` + The access key for your AWS account. + +``AWS_SECRET_ACCESS_KEY`` + The secret key for your AWS account. + +``AWS_DEFAULT_REGION`` + The default region to use, e.g. `us-east-1`. + +``AWS_PROFILE`` + The default credential and configuration profile to use, if any. + + +Configuration Files +~~~~~~~~~~~~~~~~~~~ +There are two configuration files that Boto checks. The first is the +shared credential file, which holds only credentials and is shared between +various SDKs and tools like Boto and the AWS CLI. By default, this +file is located at ``~/.aws/credentials``:: + + [default] + # The access key for your AWS account + aws_access_key_id= + + # The secret key for your AWS account + aws_secret_access_key= + +Credentials can also be set for individual profiles:: + + [dev-profile] + # The access key for your dev-profile account + aws_access_key_id= + + # The secret key for your dev-profile account + aws_secret_access_key= + +The second configuration file stores all settings which are not +credentials. Its default location is ``~/.aws/config``:: + + [default] + # The default region when making requests + region= + +It also supports profiles, but these are prefixed with the word +``profile`` because this file supports sections other than profiles:: + + [profile dev-profile] + # The default region when using the dev-profile account + region= diff --git a/docs/source/guide/dynamodb.rst b/docs/source/guide/dynamodb.rst new file mode 100644 index 0000000..49516f8 --- /dev/null +++ b/docs/source/guide/dynamodb.rst @@ -0,0 +1,406 @@ +.. _dynamodb_guide: + +DynamoDB +======== +By following this guide, you will learn how to use the +:py:class:`DynamoDB.ServiceResource` and :py:class:`DynamoDB.Table` +resources in order to create tables, write items to tables, modify existing +items, retrieve items, and query/filter the items in the table. + + +Creating a New Table +-------------------- + +In order to create a new table, use the +:py:meth:`DynamoDB.ServiceResource.create_table` method:: + + import boto3 + + # Get the service resource. + dynamodb = boto3.resource('dynamodb') + + # Create the DynamoDB table. + table = dynamodb.create_table( + TableName='users', + KeySchema=[ + { + 'AttributeName': 'username', + 'KeyType': 'HASH' + }, + { + 'AttributeName': 'last_name', + 'KeyType': 'RANGE' + } + ], + AttributeDefinitions=[ + { + 'AttributeName': 'username', + 'AttributeType': 'S' + }, + { + 'AttributeName': 'last_name', + 'AttributeType': 'S' + }, + + ], + ProvisionedThroughput={ + 'ReadCapacityUnits': 5, + 'WriteCapacityUnits': 5 + } + ) + + # Wait until the table exists. + table.meta.client.get_waiter('table_exists').wait(TableName='users') + + # Print out some data about the table. + print(table.item_count) + +Expected Output:: + + 0 + +This creates a table named ``users`` that respectively has the hash and +range primary keys ``username`` and ``last_name``. +This method will return a :py:class:`DynamoDB.Table` resource to call +additional methods on the created table. + + +Using an Existing Table +----------------------- +It is also possible to create a :py:class:`DynamoDB.Table` resource from +an existing table:: + + import boto3 + + # Get the service resource. + dynamodb = boto3.resource('dynamodb') + + # Instantiate a table resource object without actually + # creating a DynamoDB table. Note that the attributes of this table + # are lazy-loaded: a request is not made nor are the attribute + # values populated until the attributes + # on the table resource are accessed or its load() method is called. + table = dynamodb.Table('users') + + # Print out some data about the table. + # This will cause a request to be made to DynamoDB and its attribute + # values will be set based on the response. + print(table.creation_date_time) + +Expected Output (Pleas note that probably the actual times will not match up):: + + 2015-06-26 12:42:45.149000-07:00 + + +Creating a New Item +------------------- + +Once you have a :py:class:`DynamoDB.Table` resource you can add new items +to the table using :py:meth:`DynamoDB.Table.put_item`:: + + table.put_item( + Item={ + 'username': 'janedoe', + 'first_name': 'Jane', + 'last_name': 'Doe', + 'age': 25, + 'account_type': 'standard_user', + } + ) + +For all of the valid types that can be used for an item, refer to +:ref:`ref_valid_dynamodb_types`. + + +Getting an Item +--------------- +You can then retrieve the object using :py:meth:`DynamoDB.Table.get_item`:: + + response = table.get_item( + Key={ + 'username': 'janedoe', + 'last_name': 'Doe' + } + ) + item = response['Item'] + print(item) + + +Expected Output:: + + {u'username': u'janedoe', + u'first_name': u'Jane', + u'last_name': u'Doe', + u'account_type': u'standard_user', + u'age': Decimal('25')} + + +Updating Item +------------- + +Using the retrieved item, you can update attributes of the item in the table:: + + item['age'] = 26 + table.put_item(Item=item) + +Then if you retrieve the item again, it will be updated appropriately:: + + response = table.get_item( + Key={ + 'username': 'janedoe', + 'last_name': 'Doe' + } + ) + item = response['Item'] + print(item) + + +Expected Output:: + + {u'username': u'janedoe', + u'first_name': u'Jane', + u'last_name': u'Doe', + u'account_type': u'standard_user', + u'age': Decimal('26')} + + +Deleting Item +------------- +You can also delete the item using :py:meth:`DynamoDB.Table.delete_item`:: + + table.delete_item( + Key={ + 'username': 'janedoe', + 'last_name': 'Doe' + } + ) + + +Batch Writing +------------- +If you are loading a lot of data at a time, you can make use of +:py:meth:`DyanmoDB.Table.batch_writer` so you can both speed up the process and +reduce the number of write requests made to the service. + +This method returns a handle to a batch writer object that will automatically +handle buffering and sending items in batches. In addition, the +batch writer will also automatically handle any unprocessed items and +resend them as needed. All you need to do is call ``put_item`` for any +items you want to add, and ``delete_item`` for any items you want to delete:: + + with table.batch_writer() as batch: + batch.put_item( + Item={ + 'account_type': 'standard_user', + 'username': 'johndoe', + 'first_name': 'John', + 'last_name': 'Doe', + 'age': 25, + 'address': { + 'road': '1 Jefferson Street', + 'city': 'Los Angeles', + 'state': 'CA', + 'zipcode': 90001 + } + } + ) + batch.put_item( + Item={ + 'account_type': 'super_user', + 'username': 'janedoering', + 'first_name': 'Jane', + 'last_name': 'Doering', + 'age': 40, + 'address': { + 'road': '2 Washington Avenue', + 'city': 'Seattle', + 'state': 'WA', + 'zipcode': 98109 + } + } + ) + batch.put_item( + Item={ + 'account_type': 'standard_user', + 'username': 'bobsmith', + 'first_name': 'Bob', + 'last_name': 'Smith', + 'age': 18, + 'address': { + 'road': '3 Madison Lane', + 'city': 'Louisville', + 'state': 'KY', + 'zipcode': 40213 + } + } + ) + batch.put_item( + Item={ + 'account_type': 'super_user', + 'username': 'alicedoe', + 'first_name': 'Alice', + 'last_name': 'Doe', + 'age': 27, + 'address': { + 'road': '1 Jefferson Street', + 'city': 'Los Angeles', + 'state': 'CA', + 'zipcode': 90001 + } + } + ) + +The batch writer is even able to handle a very large amount of writes to the +table. + +:: + + with table.batch_writer() as batch: + for i in range(50): + batch.put_item( + Item={ + 'account_type': 'anonymous', + 'username': 'user' + str(i), + 'first_name': 'unknown', + 'last_name': 'unknown' + } + ) + + +Querying and Scanning +--------------------- + +With the table full of items, you can then query or scan the items in the table +using the :py:meth:`DynamoDB.Table.query` or :py:meth:`DynamoDB.Table.scan` +methods respectively. To add conditions to scanning and querying the table, +you will need to import the :py:class:`boto3.dynamodb.conditions.Key` and +:py:class:`boto3.dynamodb.conditions.Attr` classes. The +:py:class:`boto3.dynamodb.conditions.Key` should be used when the +condition is related to the key of the item. +The :py:class:`boto3.dynamodb.conditions.Attr` should be used when the +condition is related to an attribute of the item:: + + from boto3.dynamodb.conditions import Key, Attr + + +This queries for all of the users whose ``username`` key equals ``johndoe``:: + + response = table.query( + KeyConditionExpression=Key('username').eq('johndoe') + ) + items = response['Items'] + print(items) + + +Expected Output:: + + [{u'username': u'johndoe', + u'first_name': u'John', + u'last_name': u'Doe', + u'account_type': u'standard_user', + u'age': Decimal('25'), + u'address': {u'city': u'Los Angeles', + u'state': u'CA', + u'zipcode': Decimal('90001'), + u'road': u'1 Jefferson Street'}}] + + +Similiarly you can scan the table based on attributes of the items. For +example, this scans for all the users whose ``age`` is less than ``27``:: + + response = table.scan( + FilterExpression=Attr('age').lt(27) + ) + items = response['Items'] + print(items) + + +Expected Output:: + + [{u'username': u'johndoe', + u'first_name': u'John', + u'last_name': u'Doe', + u'account_type': u'standard_user', + u'age': Decimal('25'), + u'address': {u'city': u'Los Angeles', + u'state': u'CA', + u'zipcode': Decimal('90001'), + u'road': u'1 Jefferson Street'}}, + {u'username': u'bobsmith', + u'first_name': u'Bob', + u'last_name': u'Smith', + u'account_type': u'standard_user', + u'age': Decimal('18'), + u'address': {u'city': u'Louisville', + u'state': u'KY', + u'zipcode': Decimal('40213'), + u'road': u'3 Madison Lane'}}] + + +You are also able to chain conditions together using the logical operators: +``&`` (and), ``|`` (or), and ``~`` (not). For example, this scans for all +users whose ``first_name`` starts with ``J`` and whose ``account_type`` is +``super_user``:: + + response = table.scan( + FilterExpression=Attr('first_name').begins_with('J') & Attr('account_type').eq('super_user') + ) + items = response['Items'] + print(items) + + +Expected Output:: + + [{u'username': u'janedoering', + u'first_name': u'Jane', + u'last_name': u'Doering', + u'account_type': u'super_user', + u'age': Decimal('40'), + u'address': {u'city': u'Seattle', + u'state': u'WA', + u'zipcode': Decimal('98109'), + u'road': u'2 Washington Avenue'}}] + + +You can even scan based on conditions of a nested attribute. For example this +scans for all users whose ``state`` in their ``address`` is ``CA``:: + + response = table.scan( + FilterExpression=Attr('address.state').eq('CA') + ) + items = response['Items'] + print(items) + + +Expected Output:: + + [{u'username': u'johndoe', + u'first_name': u'John', + u'last_name': u'Doe', + u'account_type': u'standard_user', + u'age': Decimal('25'), + u'address': {u'city': u'Los Angeles', + u'state': u'CA', + u'zipcode': Decimal('90001'), + u'road': u'1 Jefferson Street'}}, + {u'username': u'alicedoe', + u'first_name': u'Alice', + u'last_name': u'Doe', + u'account_type': u'super_user', + u'age': Decimal('27'), + u'address': {u'city': u'Los Angeles', + u'state': u'CA', + u'zipcode': Decimal('90001'), + u'road': u'1 Jefferson Street'}}] + + +For more information on the various conditions you can use for queries and +scans, refer to :ref:`ref_dynamodb_conditions`. + + +Deleting a Table +---------------- +Finally, if you want to delete your table call +:py:meth:`DynamoDB.Table.delete`:: + + table.delete() diff --git a/docs/source/guide/events.rst b/docs/source/guide/events.rst new file mode 100644 index 0000000..895572f --- /dev/null +++ b/docs/source/guide/events.rst @@ -0,0 +1,423 @@ +Extensibility Guide +=================== + +All of Boto3's resource and client classes are generated at runtime. +This means that you cannot directly inherit and then extend the +functionality of these classes because they do not exist until the +program actually starts running. + + +However it is still possible to extend the functionality of classes through +Boto3's event system. + + +An Introduction to the Event System +----------------------------------- + +Boto3's event system allows users to register a function to +a specific event. Then once the running program reaches a line that +emits that specific event, Boto3 will call every function +registered to the event in the order in which they were registered. +When Boto3 calls each of these registered functions, +it will call each of them with a specific set of +keyword arguments that are associated with that event. +Then once the registered function +is called, the function may modify the keyword arguments passed to that +function or return a value. +Here is an example of how the event system works:: + + import boto3 + + s3 = boto3.client('s3') + + # Access the event system on the S3 client + event_system = s3.meta.events + + # Create a function + def add_my_bucket(params, **kwargs): + # Add the name of the bucket you want to default to. + if 'Bucket' not in params: + params['Bucket'] = 'mybucket' + + # Register the function to an event + event_system.register('provide-client-params.s3.ListObjects', add_my_bucket) + + response = s3.list_objects() + +In this example, the handler ``add_my_bucket`` +is registered such that the handler will inject the +value ``'mybucket`` for the ``Bucket`` parameter whenever the the +``list_objects`` client call is made without the ``Bucket`` parameter. Note +that if the same ``list_objects`` call is made without the ``Bucket`` +parameter and the registered handler, it will result in a validation error. + +Here are the takeaways from this example: + +* All clients have their own event system that you can use to fire events + and register functions. You can access the event system through the + ``meta.events`` attribute on the client. +* All functions registered to the event system must have ``**kwargs`` in + the function signature. This is because emitting an event can have any + number of keyword arguments emitted along side it, and so if your + function is called without ``**kwargs``, its signature will have to + match every keyword argument emitted by the event. This also allows for + more keyword arguments to be added to the emitted event in the future + without breaking existing handlers. +* To register a function to an event, call the ``register`` method on the + event system with the name of the event you want to register the + function to and the function handle. Note that if you register the event + after the event is emitted, the function will not be called unless the + event is emitted again. In the example, the ``add_my_bucket`` handler + was registered to the ``'provide-client-params.s3.ListObjects'`` event, + which is an event that can be used to inject and modify parameters passed + in by the client method. To read more about the event refer to + `provide-client-params`_ + + +A Hierarchical Structure +------------------------ + +The event system also provides a hierarchy for registering events such that +you can register a function to a set of events depending on the event name +heirarchy. + +An event name can have its own heirachy by specifying ``.`` in its name. For +example, take the event name ``'general.specific.more_specific'``. When +this event is emitted, the registered functions will be called in the order +from most specific to least specific registration. So in this example, the +functions will be called in the following order: + +1) Functions registered to ``'general.specific.more_specific'`` +2) Functions registered to ``'general.specific'`` +3) Functions registered to ``'general'`` + +Here is a deeper example of how the event system works with respect to +its hierarchial structure:: + + import boto3 + + s3 = boto3.client('s3') + + # Access the event system on the S3 client + event_system = s3.meta.events + + def add_my_general_bucket(params, **kwargs): + if 'Bucket' not in params: + params['Bucket'] = 'mybucket' + + def add_my_specific_bucket(params, **kwargs): + if 'Bucket' not in params: + params['Bucket'] = 'myspecificbucket' + + event_system.register('provide-client-params.s3', add_my_general_bucket) + event_system.register('provide-client-params.s3.ListObjects', add_my_specific_bucket) + + list_obj_response = s3.list_objects() + put_obj_response = s3.put_object(Key='mykey', Body=b'my body') + +In this example, the ``list_objects`` method call will use the +``'myspecificbucket'`` for the bucket instead of ``'mybucket'`` because +the ``add_my_specific_bucket`` method was registered to the +``'provide-client-params.s3.ListObjects'`` event which is more specific than +the ``'provide-client-params.s3'`` event. Thus, the +``add_my_specific_bucket`` function is called before the +``add_my_general_bucket`` function is called when the event is emitted. + +However for the ``put_object`` call, the bucket used is ``'mybucket'``. This +is because the event emitted for the ``put_object`` client call is +``'provide-client-params.s3.PutObject'`` and the ``add_my_general_bucket`` +method is called via its registration to ``'provide-client-params.s3'``. The +``'provide-client-params.s3.ListObjects'`` event is never emitted so the +registered ``add_my_specific_bucket`` function is never called. + + +Wildcard Matching +----------------- + +Another aspect of Boto3's event system is that it has the capability +to do wildcard matching using the ``'*'`` notation. Here is an example +of using wildcards in the event system:: + + import boto3 + + s3 = boto3.client('s3') + + # Access the event system on the S3 client + event_system = s3.meta.events + + def add_my_wildcard_bucket(params, **kwargs): + if 'Bucket' not in params: + params['Bucket'] = 'mybucket' + + event_system.register('provide-client-params.s3.*', add_my_wildcard_bucket) + response = s3.list_objects() + + +The ``'*'`` allows you to register to a group of events without having to +know the actual name of the event. This is useful when you have to apply +the same handler in multiple places. Also note that if the wildcard is used, +it must be isolated. It does not handle globbing with additional characters. +So in the previous example, if the ``my_wildcard_function`` was registered +to ``'provide-client-params.s3.*objects'``, the handler would not be +called because it will consider ``'provide-client-params.s3.*objects'`` to be +a specific event. + +The wildcard also respects the hierarchical structure of the event system. +If another handler was registered to the ``'provide-client-params.s3'`` event, +the ``add_my_wildcard_bucket`` would be called first because it is registered +to ``'provide-client-params.s3.*'`` which is more specific than the event +``'provide-client.s3'``. + + +Isolation of Event Systems +-------------------------- + +The event system in Boto3 has the notion of isolation: +all clients maintain their own set of registered handlers. For example if a +handler is registered to one client's event system, it will not be registered +to another client's event system:: + + import boto3 + + client1 = boto3.client('s3') + client2 = boto3.client('s3') + + def add_my_bucket(params, **kwargs): + if 'Bucket' not in params: + params['Bucket'] = 'mybucket' + + def add_my_other_bucket(params, **kwargs): + if 'Bucket' not in params: + params['Bucket'] = 'myotherbucket' + + client1.meta.events.register( + 'provide-client-params.s3.ListObjects', add_my_bucket) + client2.meta.events.register( + 'provide-client-params.s3.ListObjects', add_my_other_bucket) + + client1_response = client1.list_objects() + client2_response = client2.list_objects() + + +Thanks to the isolation of clients' event systems, ``client1`` will inject +``'mybucket'`` for its ``list_objects`` method call while ``client2`` will +inject ``'myotherbucket'`` for its ``list_objects`` method call because +``add_my_bucket`` was registered to ``client1`` while ``add_my_other_bucket`` +was registered to ``client2``. + + +Boto3 Specific Events +--------------------- + +Boto3 emits a set of events that users can register to +customize clients or resources and modify the behavior of method calls. + +Here is the list of events that users of boto3 can register handlers to: + +* ``'creating-client-class`` +* ``'creating-resource-class`` +* ``'provide-client-params'`` + + +creating-client-class +~~~~~~~~~~~~~~~~~~~~~ + +:Full Event Name: + ``'creating-client-class.service-name'`` + + Note: ``service-name`` refers to the value used to instantiate a client i.e. + ``boto3.client('service-name')`` + +:Description: + This event is emitted upon creation of the client class for a service. The + client class for a service is not created until the first instantiation of + the client class. Use this event for adding methods to the client class + or adding classes for the client class to inherit from. + +:Keyword Arguments Emitted: + + :type class_attributes: dict + :param class_attributes: A dictionary where the keys are the names of the + attributes of the class and the values are the actual attributes of + the class. + + :type base_classes: list + :param base_classes: A list of classes that the client class will inherit + from where the order of inheritance is the same as the order of the list. + +:Expected Return Value: Do not return anything. + +:Example: + Here is an example of how to add a method to the client class:: + + from boto3.session import Session + + def custom_method(self): + print('This is my custom method') + + def add_custom_method(class_attributes, **kwargs): + class_attributes['my_method'] = custom_method + + session = Session() + session.events.register('creating-client-class.s3', add_custom_method) + + client = session.client('s3') + client.my_method() + + This should output:: + + This is my custom method + + + Here is an example of how to add a new class for the client class to + inherit from:: + + from boto3.session import Session + + class MyClass(object): + def __init__(self, *args, **kwargs): + super(MyClass, self).__init__(*args, **kwargs) + print('Client instantiated!') + + def add_custom_class(base_classes, **kwargs): + base_classes.insert(0, MyClass) + + session = Session() + session.events.register('creating-client-class.s3', add_custom_class) + + client = session.client('s3') + + This should output:: + + Client instantiated! + + +creating-resource-class +~~~~~~~~~~~~~~~~~~~~~~~ + +:Full Event Name: + ``'creating-resource-class.service-name.resource-name'`` + + Note: ``service-name`` refers to the value used to instantiate a service + resource i.e. ``boto3.resource('service-name')`` and ``resource-name`` + refers to the name of the resource class. + +:Description: + This event is emitted upon creation of the resource class. The + resource class is not created until the first instantiation of + the resource class. Use this event for adding methods to the resource + class or adding classes for the resource class to inherit from. + +:Keyword Arguments Emitted: + + :type class_attributes: dict + :param class_attributes: A dictionary where the keys are the names of the + attributes of the class and the values are the actual attributes of + the class. + + :type base_classes: list + :param base_classes: A list of classes that the resource class will inherit + from where the order of inheritance is the same as the order of the list. + +:Expected Return Value: Do not return anything. + +:Example: + Here is an example of how to add a method to a resource class:: + + from boto3.session import Session + + def custom_method(self): + print('This is my custom method') + + def add_custom_method(class_attributes, **kwargs): + class_attributes['my_method'] = custom_method + + session = Session() + session.events.register('creating-resource-class.s3.ServiceResource', + add_custom_method) + + resource = session.resource('s3') + resource.my_method() + + This should output:: + + This is my custom method + + + Here is an example of how to add a new class for a resource class to + inherit from:: + + from boto3.session import Session + + class MyClass(object): + def __init__(self, *args, **kwargs): + super(MyClass, self).__init__(*args, **kwargs) + print('Resource instantiated!') + + def add_custom_class(base_classes, **kwargs): + base_classes.insert(0, MyClass) + + session = Session() + session.events.register('creating-resource-class.s3.ServiceResource', + add_custom_class) + + resource = session.resource('s3') + + This should output:: + + Resource instantiated! + + +provide-client-params +~~~~~~~~~~~~~~~~~~~~~ + +:Full Event Name: + ``'provide-client.service-name.operation-name'`` + + Note: ``service-name`` refers to the value used to instantiate a client i.e. + ``boto3.client('service-name')``. ``operation-name`` refers to the + underlying API operation of the corresponding client method. To access + the operation API name, retrieve the value from the + ``client.meta.method_to_api_mapping`` dictionary using the name of the + desired client method as the key. + +:Description: + This event is emitted before validation of the parameters passed to + client method. Use this event to inject or modify parameters prior + to the parameters being validated and built into a request that is sent + over the wire. + +:Keyword Arguments Emitted: + + :type params: dict + :param params: A dictionary where the keys are the names of the + parameters passed through the client method and the values are the values + of those parameters. + + :type model: ``botocore.model.OperationModel`` + :param model: A model representing the underlying API operation of the + client method. + +:Expected Return Value: Do not return anything or return a new dictionary of + parameters to use when making the request. + +:Example: + Here is an example of how to inject a parameter using the event:: + + import boto3 + + s3 = boto3.client('s3') + + # Access the event system on the S3 client + event_system = s3.meta.events + + # Create a function + def add_my_bucket(params, **kwargs): + # Add the name of the bucket you want to default to. + if 'Bucket' not in params: + params['Bucket'] = 'mybucket' + + # Register the function to an event + event_system.register('provide-client-params.s3.ListObjects', add_my_bucket) + + response = s3.list_objects() diff --git a/docs/source/guide/index.rst b/docs/source/guide/index.rst new file mode 100644 index 0000000..e71c2fb --- /dev/null +++ b/docs/source/guide/index.rst @@ -0,0 +1,33 @@ +.. _user_guides: + ++++++++++++ +User Guides ++++++++++++ + +Migration Guides +================ +.. toctree:: + + new + migration + + +General Feature Guides +====================== + +.. toctree:: + + resources + collections + clients + session + configuration + events + + +Service Feature Guides +====================== + +.. toctree:: + + dynamodb diff --git a/docs/source/guide/migration.rst b/docs/source/guide/migration.rst new file mode 100644 index 0000000..3ad16ca --- /dev/null +++ b/docs/source/guide/migration.rst @@ -0,0 +1,64 @@ +.. _guide_migration: + +Migrating from Boto 2.x +======================= +Current Boto users can begin using Boto 3 right away. The two modules can +live side-by-side in the same project, which means that a piecemeal +approach can be used. New features can be written in Boto 3, or existing +code can be migrated over as needed, piece by piece. + +High Level Concepts +------------------- +Boto 2.x modules are typically split into two categories, those which include a high-level object-oriented interface and those which include only a low-level interface which matches the underlying Amazon Web Services API. Some modules are completely high-level (like Amazon S3 or EC2), some include high-level code on top of a low-level connection (like Amazon DynamoDB), and others are 100% low-level (like Amazon Elastic Transcoder). + +In Boto 3 this general low-level and high-level concept hasn't changed much, but there are two important points to understand. + +Data Driven +~~~~~~~~~~~ +First, in Boto 3 classes are created at runtime from JSON data files that describe AWS APIs and organizational structures built atop of them. These data files are loaded at runtime and can be modified and updated without the need of installing an entirely new SDK release. + +A side effect of having all the services generated from JSON files is that there is now consistency between all AWS service modules. One important change is that *all* API call parameters must now be passed as **keyword arguments**, and these keyword arguments take the form defined by the upstream service. Though there are exceptions, this typically means ``UpperCamelCasing`` parameter names. You will see this in the service-specific migration guides linked to below. + +Resource Objects +~~~~~~~~~~~~~~~~ +Second, while every service now uses the runtime-generated low-level client, some services additionally have high-level generated objects that we refer to as ``Resources``. The lower-level is comparable to Boto 2.x layer 1 connection objects in that they provide a one to one mapping of API operations and return low-level responses. The higher level is comparable to the high-level customizations from Boto 2.x: an S3 ``Key``, an EC2 ``Instance``, and a DynamoDB ``Table`` are all considered resources in Boto 3. Just like a Boto 2.x ``S3Connection``'s ``list_buckets`` will return ``Bucket`` objects, the Boto 3 resource interface provides actions and collections that return resources. Some services may also have hand-written customizations built on top of the runtime-generated high-level resources (such as utilities for working with S3 multipart uploads). + +:: + + import boto, boto3 + + # Low-level connections + conn = boto.connect_elastictranscoder() + client = boto3.client('elastictranscoder') + + # High-level connections & resource objects + from boto.s3.bucket import Bucket + s3_conn = boto.connect_s3() + boto2_bucket = Bucket('mybucket') + + s3 = boto3.resource('s3') + boto3_bucket = s3.Bucket('mybucket') + +Installation & Configuration +---------------------------- +The :ref:`guide_quickstart` guide provides instructions for installing Boto 3. You can also follow the instructions there to set up new credential files, or you can continue to use your existing Boto 2.x credentials. Please note that Boto 3, the AWS CLI, and several other SDKs all use the shared credentials file (usually at ``~/.aws/credentials``). + +Once configured, you may begin using Boto 3:: + + import boto3 + + for bucket in boto3.resource('s3').buckets.all(): + print(bucket.name) + +See the :ref:`tutorial_list` and `Boto 3 Documentation `__ for more information. + +The rest of this document will describe specific common usage scenarios of Boto 2 code and how to accomplish the same tasks with Boto 3. + +Services +-------- + +.. toctree:: + :maxdepth: 2 + + migrations3 + migrationec2 diff --git a/docs/source/guide/migrationec2.rst b/docs/source/guide/migrationec2.rst new file mode 100644 index 0000000..07a5217 --- /dev/null +++ b/docs/source/guide/migrationec2.rst @@ -0,0 +1,125 @@ +.. _guide_migration_ec2: + +Amazon EC2 +========== +Boto 2.x contains a number of customizations to make working with Amazon EC2 instances, storage and networks easy. Boto 3 exposes these same objects through its resources interface in a unified and consistent way. + +Creating the Connection +----------------------- +Boto 3 has both low-level clients and higher-level resources. For Amazon EC2, the higher-level resources are the most similar to Boto 2.x's ``ec2`` and ``vpc`` modules:: + + # Boto 2.x + import boto + ec2_connection = boto.connect_ec2() + vpc_connection = boto.connect_vpc() + + # Boto 3 + import boto3 + ec2 = boto3.resource('ec2') + +Launching New Instances +----------------------- +Launching new instances requires an image ID and the number of instances to launch. It can also take several optional parameters, such as the instance type and security group:: + + # Boto 2.x + ec2_connection.run_instances('') + + # Boto 3 + ec2.create_instances(ImageId='', MinCount=1, MaxCount=5) + +Stopping & Terminating Instances +-------------------------------- +Stopping and terminating multiple instances given a list of instance IDs uses Boto 3 collection filtering:: + + ids = ['instance-id-1', 'instance-id-2', ...] + + # Boto 2.x + ec2_connection.stop_instances(instance_ids=ids) + ec2_connection.terminate_instances(instance_ids=ids) + + # Boto 3 + ec2.instances.filter(InstanceIds=ids).stop() + ec2.instances.filter(InstanceIds=ids).terminate() + +Checking What Instances Are Running +----------------------------------- +Boto 3 collections come in handy when listing all your running instances as well. Every collection exposes a ``filter`` method that allows you to pass additional parameters to the underlying service API operation. The EC2 instances collection takes a parameter called ``Filters`` which is a list of names and values, for example:: + + # Boto 2.x + reservations = ec2_connection.get_all_reservations( + filters={'instance-state-name': 'running'}) + for reservation in reservations: + for instance in reservation.instances: + print(instance.instance_id, instance.instance_type) + + # Boto 3 + # Use the filter() method of the instances collection to retrieve + # all running EC2 instances. + instances = ec2.instances.filter( + Filters=[{'Name': 'instance-state-name', 'Values': ['running']}]) + for instance in instances: + print(instance.id, instance.instance_type) + +Checking Health Status Of Instances +----------------------------------- +It is possible to get scheduled maintenance information for your running instances. At the time of this writing Boto 3 does not have a status resource, so you must drop down to the low-level client via ``ec2.meta.client``:: + + # Boto 2.x + for status in ec2_connection.get_all_instance_statuses(): + print(status) + + # Boto 3 + for status in ec2.meta.client.describe_instance_status()['InstanceStatuses']: + print(status) + +Working with EBS Snapshots +-------------------------- +Snapshots provide a way to create a copy of an EBS volume, as well as make new volumes from the snapshot which can be attached to an instance:: + + # Boto 2.x + snapshot = ec2_connection.create_snapshot('volume-id', 'Description') + volume = snapshot.create_volume('us-west-2') + ec2_connection.attach_volume(volume.id, 'instance-id', '/dev/sdy') + ec2_connection.delete_snapshot(snapshot.id) + + # Boto 3 + snapshot = ec2.create_snapshot(VolumeId='volume-id', Description='description') + volume = ec2.create_volume(SnapshotId=snapshot.id, AvailabilityZone='us-west-2a') + ec2.Instance('instance-id').attach_volume(VolumeId=volume.id, Device='/dev/sdy') + snapshot.delete() + +Creating a VPC, Subnet, and Gateway +----------------------------------- +Creating VPC resources in Boto 3 is very similar to Boto 2.x:: + + # Boto 2.x + vpc = vpc_connection.create_vpc('10.0.0.0/24') + subnet = vpc_connection.create_subnet(vpc.id, '10.0.0.0/25') + gateway = vpc_connection.create_internet_gateway() + + # Boto 3 + vpc = ec2.create_vpc(CidrBlock='10.0.0.0/24') + subnet = vpc.create_subnet(CidrBlock='10.0.0.0/25') + gateway = ec2.create_internet_gateway() + +Attaching and Detaching an Elastic IP and Gateway +------------------------------------------------- +Elastic IPs and gateways provide a way for instances inside of a VPC to communicate with the outside world:: + + # Boto 2.x + ec2_connection.attach_internet_gateway(gateway.id, vpc.id) + ec2_connection.detach_internet_gateway(gateway.id, vpc.id) + + from boto.ec2.address import Address + address = Address() + address.allocation_id = 'eipalloc-35cf685d' + address.associate('i-71b2f60b') + address.disassociate() + + # Boto 3 + gateway.attach_to_vpc(VpcId=vpc.id) + gateway.detach_from_vpc(VpcId=vpc.id) + + address = ec2.VpcAddress('eipalloc-35cf685d') + address.associate('i-71b2f60b') + address.association.delete() diff --git a/docs/source/guide/migrations3.rst b/docs/source/guide/migrations3.rst new file mode 100644 index 0000000..81b625c --- /dev/null +++ b/docs/source/guide/migrations3.rst @@ -0,0 +1,164 @@ +.. _guide_migration_s3: + +Amazon S3 +========= +Boto 2.x contains a number of customizations to make working with Amazon S3 buckets and keys easy. Boto 3 exposes these same objects through its resources interface in a unified and consistent way. + +Creating the Connection +----------------------- +Boto 3 has both low-level clients and higher-level resources. For Amazon S3, the higher-level resources are the most similar to Boto 2.x's ``s3`` module:: + + # Boto 2.x + import boto + s3_connection = boto.connect_s3() + + # Boto 3 + import boto3 + s3 = boto3.resource('s3') + +Creating a Bucket +----------------- +Creating a bucket in Boto 2 and Boto 3 is very similar, except that in Boto 3 all action parameters must be passed via keyword arguments and a bucket configuration must be specified manually:: + + # Boto 2.x + s3_connection.create_bucket('mybucket') + s3_connection.create_bucket('mybucket', location=Location.USWest) + + # Boto 3 + s3.create_bucket(Bucket='mybucket') + s3.create_bucket(Bucket='mybucket', CreateBucketConfiguration={ + 'LocationConstraint': 'us-west-1'}) + +Storing Data +------------ +Storing data from a file, stream, or string is easy:: + + # Boto 2.x + from boto.s3.key import Key + key = Key('hello.txt') + key.set_contents_from_file('/tmp/hello.txt') + + # Boto 3 + s3.Object('mybucket', 'hello.txt').put(Body=open('/tmp/hello.txt', 'rb')) + + +Accessing a Bucket +------------------ +Getting a bucket is easy with Boto 3's resources, however these do not automatically validate whether a bucket exists:: + + # Boto 2.x + bucket = s3_connection.get_bucket('mybucket', validate=False) + exists = s3_connection.lookup('mybucket') + + # Boto 3 + import botocore + bucket = s3.Bucket('mybucket') + exists = True + try: + s3.meta.client.head_bucket(Bucket='mybucket') + except botocore.exceptions.ClientError as e: + # If a client error is thrown, then check that it was a 404 error. + # If it was a 404 error, then the bucket does not exist. + error_code = int(e.response['Error']['Code']) + if error_code == 404: + exists = False + +Deleting a Bucket +----------------- +All of the keys in a bucket must be deleted before the bucket itself can be deleted:: + + # Boto 2.x + for key in bucket: + key.delete() + bucket.delete() + + # Boto 3 + for key in bucket.objects.all(): + key.delete() + bucket.delete() + +Iteration of Buckets and Keys +----------------------------- +Bucket and key objects are no longer iterable, but now provide collection attributes which can be iterated:: + + # Boto 2.x + for bucket in s3_connection: + for key in bucket: + print(key.name) + + # Boto 3 + for bucket in s3.buckets.all(): + for key in bucket.objects.all(): + print(key.key) + +Access Controls +--------------- +Getting and setting canned access control values in Boto 3 operates on an ``ACL`` resource object:: + + # Boto 2.x + bucket.set_acl('public-read') + key.set_acl('public-read') + + # Boto 3 + bucket.Acl().put(ACL='public-read') + obj.put(ACL='public-read') + +It's also possible to retrieve the policy grant information:: + + # Boto 2.x + acp = bucket.get_acl() + for grant in acp.acl.grants: + print(grant.display_name, grant.permission) + + # Boto 3 + acl = bucket.Acl() + for grant in acl.grants: + print(grant['DisplayName'], grant['Permission']) + +Boto 3 lacks the grant shortcut methods present in Boto 2.x, but it is still fairly simple to add grantees:: + + # Boto 2.x + bucket.add_email_grant('READ', 'user@domain.tld') + + # Boto 3 + bucket.Acl.put(GrantRead='emailAddress=user@domain.tld') + +Key Metadata +------------ +It's possible to set arbitrary metadata on keys:: + + # Boto 2.x + key.set_metadata('meta1', 'This is my metadata value') + print(key.get_metadata('meta1')) + + # Boto 3 + key.put(Metadata={'meta1': 'This is my metadata value'}) + print(key.metadata['meta1']) + +Managing CORS Configuration +--------------------------- +Allows you to manage the cross-origin resource sharing configuration for S3 buckets:: + + # Boto 2.x + cors = bucket.get_cors() + + config = CORSConfiguration() + config.add_rule('GET', '*') + bucket.set_cors(config) + + bucket.delete_cors() + + # Boto 3 + cors = bucket.Cors() + + config = { + 'CORSRules': [ + { + 'AllowedMethods': ['GET'], + 'AllowedOrigins': ['*'] + } + ] + } + cors.put(CORSConfiguration=config) + + cors.delete() diff --git a/docs/source/guide/new.rst b/docs/source/guide/new.rst new file mode 100644 index 0000000..9ac0287 --- /dev/null +++ b/docs/source/guide/new.rst @@ -0,0 +1,39 @@ +.. _guide_new: + +What's New +========== +Boto 3 is a ground-up rewrite of Boto. It uses a data-driven approach to +generate classes at runtime from JSON description files that are shared +between SDKs in various languages. This includes descriptions for a +high level, object oriented interface similar to those availabe in +previous versions of Boto. + +Because Boto 3 is generated from these shared JSON files, we get +fast updates to the latest services and features and a consistent +API across services. Community contributions to JSON description +files in other SDKs also benefit Boto 3, just as contributions to +Boto 3 benefit the other SDKs. + +Major Features +-------------- +Boto 3 consists of the following major features: + +* **Resources**: a high level, object oriented interface +* **Collections**: a tool to iterate and manipulate groups of resources +* **Clients**: low level service connections +* **Paginators**: automatic paging of responses +* **Waiters**: a way to block until a certain state has been reached + +Along with these major features, Boto 3 also provides *sessions* and +per-session *credentials* & *configuration*, as well as basic +components like *authentication*, *parameter* & *response* handling, +an *event system* for customizations and logic to *retry* failed +requests. + +Botocore +~~~~~~~~ +Boto 3 is built atop of a library called +`Botocore `_, which is shared by the +`AWS CLI `_. Botocore provides the low level +clients, session, and credential & configuration data. Boto 3 builds on top +of Botocore by providing its own session, resources and collections. diff --git a/docs/source/guide/quickstart.rst b/docs/source/guide/quickstart.rst new file mode 100644 index 0000000..278a6e2 --- /dev/null +++ b/docs/source/guide/quickstart.rst @@ -0,0 +1,84 @@ +.. _guide_quickstart: + +Quickstart +========== +Getting started with Boto 3 is easy, but requires a few steps. + +Installation +------------ +Install the latest Boto 3 release via :command:`pip`:: + + pip install boto3 + +You may also install a specific version:: + + pip install boto3==1.0.0 + +.. note:: + + The latest development version can always be found on + `GitHub `_. + +Configuration +------------- +Before you can begin using Boto 3, you should set up authentication +credentials. Credentials for your AWS account can be found in the +`IAM Console `_. You can +create or use an existing user. Go to manage access keys and +generate a new set of keys. + +If you have the `AWS CLI `_ +installed, then you can use it to configure your credentials file:: + + aws configure + +Alternatively, you can create the credential file yourself. By default, +its location is at ``~/.aws/credentials``:: + + [default] + aws_access_key_id = YOUR_ACCESS_KEY + aws_secret_access_key = YOUR_SECRET_KEY + +You may also want to set a default region. This can be done in the +configuration file. By default, its location is at ``~/.aws/config``:: + + [default] + region=us-east-1 + +Alternatively, you can pass a ``region_name`` when creating clients +and resources. + +This sets up credentials for the default profile as well as a default +region to use when creating connections. See +:ref:`guide_configuration` for in-depth configuration sources and +options. + +Using Boto 3 +------------ +To use Boto 3, you must first import it and tell it what service you are +going to use:: + + import boto3 + + # Let's use Amazon S3 + s3 = boto3.resource('s3') + +Now that you have an ``s3`` resource, you can make requests and process +responses from the service. The following uses the ``buckets`` collection +to print out all bucket names:: + + # Print out bucket names + for bucket in s3.buckets.all(): + print(bucket.name) + +It's also easy to upload and download binary data. For example, the +following uploads a new file to S3. It assumes that the bucket ``my-bucket`` +already exists:: + + # Upload a new file + data = open('test.jpg', 'rb') + s3.Bucket('my-bucket').put_object(Key='test.jpg', Body=data) + +:ref:`guide_resources` and :ref:`guide_collections` will be covered in more +detail in the following sections, so don't worry if you do not completely +understand the examples. diff --git a/docs/source/guide/resources.rst b/docs/source/guide/resources.rst new file mode 100644 index 0000000..b68937a --- /dev/null +++ b/docs/source/guide/resources.rst @@ -0,0 +1,215 @@ +.. _guide_resources: + +Resources +========= + +Overview +-------- +Resources represent an object-oriented interface to Amazon Web Services (AWS). +They provide a higher-level abstraction than the raw, low-level calls made by +service clients. To use resources, you invoke the +:py:meth:`~boto3.session.Session.resource` method of a +:py:class:`~boto3.session.Session` and pass in a service name:: + + # Get resources from the default session + sqs = boto3.resource('sqs') + s3 = boto3.resource('s3') + +Every resource instance has a number of attributes and methods. These can +conceptually be split up into identifiers, attributes, actions, references, +sub-resources, and collections. Each of these is described in further detail +below and in the following section. + +Resources themselves can also be conceptually split into service resources +(like ``sqs``, ``s3``, ``ec2``, etc) and individual resources (like +``sqs.Queue`` or ``s3.Bucket``). Service resources *do not* have +identifiers or attributes. The two share the same components otherwise. + + +.. _identifiers_attributes_intro: + +Identifiers & Attributes +------------------------ +An identifier is a unique value that is used to call actions on the resource. +Resources **must** have at least one identifier, except for the top-level +service resources (e.g. ``sqs`` or ``s3``). An identifier is set at instance +creation-time, and failing to provide all necessary identifiers during +instantiation will result in an exception. Examples of identifiers:: + + # SQS Queue (url is an identifier) + queue = sqs.Queue(url='http://...') + print(queue.url) + + # S3 Object (bucket_name and key are identifiers) + obj = s3.Object(bucket_name='boto3', key='test.py') + print(obj.bucket_name) + print(obj.key) + + # Raises exception, missing identifier: key! + obj = s3.Object(bucket_name='boto3') + +Identifiers may also be passed as positional arguments:: + + # SQS Queue + queue = sqs.Queue('http://...') + + # S3 Object + obj = s3.Object('boto3', 'test.py') + + # Raises exception, missing key! + obj = s3.Object('boto3') + +Identifiers also play a role in resource instance equality. For two +instances of a resource to be considered equal, their identifiers must +be equal:: + + >>> bucket1 = s3.Bucket('boto3') + >>> bucket2 = s3.Bucket('boto3') + >>> bucket3 = s3.Bucket('some-other-bucket') + + >>> bucket1 == bucket2 + True + >>> bucket1 == bucket3 + False + +.. note:: + + Only identifiers are taken into account for instance equality. Region, + account ID and other data members are not considered. When using temporary + credentials or multiple regions in your code please keep this in mind. + +Resources may also have attributes, which are *lazy-loaded* properties on the +instance. They may be set at creation time from the response of an action on +another resource, or they may be set when accessed or via an explicit call to +the ``load`` or ``reload`` action. Examples of attributes:: + + # SQS Message + message.body + + # S3 Object + obj.last_modified + obj.md5 + +.. warning:: + + Attributes may incur a load action when first accessed. If latency is + a concern, then manually calling ``load`` will allow you to control + exactly when the load action (and thus latency) is invoked. The + documentation for each resource explicitly lists its attributes. + + Additionally, attributes may be reloaded after an action has been + performed on the resource. For example, if the ``last_modified`` + attribute of an S3 object is loaded and then a ``put`` action is + called, then the next time you access ``last_modified`` it will + reload the object's metadata. + +.. _actions_intro: + +Actions +------- +An action is a method which makes a call to the service. Actions may return a +low-level response, a new resource instance or a list of new resource +instances. Actions automatically set the resource identifiers as parameters, +but allow you to pass additional parameters via keyword arguments. Examples +of actions:: + + # SQS Queue + messages = queue.receive_messages() + + # SQS Message + for message in messages: + message.delete() + + # S3 Object + obj = s3.Object(bucket_name='boto3', key='test.py') + response = obj.get() + data = response['Body'].read() + +Examples of sending additional parameters:: + + # SQS Service + queue = sqs.get_queue_by_name(QueueName='test') + + # SQS Queue + queue.send_message(MessageBody='hello') + +.. note:: + + Parameters **must** be passed as keyword arguments. They will not work + as positional arguments. + +.. _references_intro: + +References +---------- +A reference is an attribute which may be ``None`` or a related resource +instance. The resource instance does not share identifiers with its +reference resource, that is, it is not a strict parent to child relationship. +In relational terms, these can be considered many-to-one or one-to-one. +Examples of references:: + + # EC2 Instance + instance.subnet + instance.vpc + +In the above example, an EC2 instance may have exactly one associated +subnet, and may have exactly one associated VPC. The subnet does not +require the instance ID to exist, hence it is not a parent to child +relationship. + +.. _subresources_intro: + +Sub-resources +------------- +A sub-resource is similar to a reference, but is a related class rather than +an instance. Sub-resources, when instantiated, share identifiers with their +parent. It is a strict parent-child relationship. In relational terms, these +can be considered one-to-many. Examples of sub-resources:: + + # SQS + queue = sqs.Queue(url='...') + message = queue.Message(receipt_handle='...') + print(queue.url == message.queue_url) + print(message.receipt_handle) + + # S3 + obj = bucket.Object(key='new_file.txt') + print(obj.bucket_name) + print(obj.key) + +Because an SQS message cannot exist without a queue, and an S3 object cannot +exist without a bucket, these are parent to child relationships. + +.. _waiters_intro: + +Waiters +------- +A waiter is similiar to an action. A waiter will poll the status of a +resource and suspend execution until the resource reaches the state that is +being polling for or a failure occurs while polling. +Waiters automatically set the resource +identifiers as parameters, but allow you to pass additional parameters via +keyword arguments. Examples of waiters include:: + + # S3: Wait for a bucket to exist. + bucket.wait_until_exists() + + # EC2: Wait for an instance to reach the running state. + instance.wait_until_running() + + +Multithreading +-------------- +It is recommended to create a resource instance for each thread in a multithreaded application rather than sharing a single instance among the threads. For example:: + + import boto3 + import boto3.session + import threading + + class MyTask(threading.Thread): + def run(self): + session = boto3.session.Session() + s3 = session.resource('s3') + # ... do some work with S3 ... + +In the example above, each thread would have its own Boto 3 session and its own instance of the S3 resource. This is a good idea because resources contain shared data when loaded and calling actions, accessing properties, or manually loading or reloading the resource can modify this data. diff --git a/docs/source/guide/session.rst b/docs/source/guide/session.rst new file mode 100644 index 0000000..5fa21b5 --- /dev/null +++ b/docs/source/guide/session.rst @@ -0,0 +1,32 @@ +.. _guide_session: + +Session +======= +A session manages state about a particular configuration. By default a +session is created for you when needed. However it is possible and +recommended to maintain your own session(s) in some scenarios. Sessions +typically store: + +* Credentials +* Region +* Other configurations + +Default Session +--------------- +The ``boto3`` module acts as a proxy to the default session, which is +created automatically when needed. Example default session use:: + + # Using the default session + sqs = boto3.client('sqs') + s3 = boto3.resource('s3') + +Custom Session +-------------- +It is also possible to manage your own session and create clients or +resources from it:: + + # Creating your own session + session = boto3.session.Session() + + sqs = session.client('sqs') + s3 = session.resource('s3') diff --git a/docs/source/guide/sqs.rst b/docs/source/guide/sqs.rst new file mode 100644 index 0000000..f9d32c5 --- /dev/null +++ b/docs/source/guide/sqs.rst @@ -0,0 +1,158 @@ +.. _sample_tutorial: + +A Sample Tutorial +================= +This tutorial will show you how to use Boto3 with an AWS service. In this +sample tutorial, you will learn how to use Boto3 with +`Amazon Simple Queue Service (SQS) `_ + +SQS +--- +SQS allows you to queue and then process messages. This tutorial covers how to +create a new queue, get and use an existing queue, push new messages onto the +queue, and process messages from the queue by using +:ref:`guide_resources` and :ref:`guide_collections`. + +Creating a Queue +---------------- +Queues are created with a name. You may also optionally set queue +attributes, such as the number of seconds to wait before an item may be +processed. The examples below will use the queue name ``test``. +Before creating a queue, you must first get the SQS service resource:: + + # Get the service resource + sqs = boto3.resource('sqs') + + # Create the queue. This returns an SQS.Queue instance + queue = sqs.create_queue(QueueName='test', Attributes={'DelaySeconds': '5'}) + + # You can now access identifiers and attributes + print(queue.url) + print(queue.attributes.get('DelaySeconds')) + +Reference: :py:meth:`SQS.ServiceResource.create_queue` + +.. warning:: + + The code above may throw an exception if you already have a queue named + ``test``. + +Using an Existing Queue +----------------------- +It is possible to look up a queue by its name. If the queue does not exist, +then an exception will be thrown:: + + # Get the service resource + sqs = boto3.resource('sqs') + + # Get the queue. This returns an SQS.Queue instance + queue = sqs.get_queue_by_name(QueueName='test') + + # You can now access identifiers and attributes + print(queue.url) + print(queue.attributes.get('DelaySeconds')) + +It is also possible to list all of your existing queues:: + + # Print out each queue name, which is part of its ARN + for queue in sqs.queues.all(): + print(queue.url) + +.. note:: + + To get the name from a queue, you must use its ARN, which is available + in the queue's ``attributes`` attribute. Using + ``queue.attributes['QueueArn'].split(':')[-1]`` will return its name. + +Reference: :py:meth:`SQS.ServiceResource.get_queue_by_name`, +:py:attr:`SQS.ServiceResource.queues` + +Sending Messages +---------------- +Sending a message adds it to the end of the queue:: + + # Get the service resource + sqs = boto3.resource('sqs') + + # Get the queue + queue = sqs.get_queue_by_name(QueueName='test') + + # Create a new message + response = queue.send_message(MessageBody='world') + + # The response is NOT a resource, but gives you a message ID and MD5 + print(response.get('MessageId')) + print(response.get('MD5OfMessageBody')) + +You can also create messages with custom attributes:: + + queue.send_message(MessageBody='boto3', MessageAttributes={ + 'Author': { + 'StringValue': 'Daniel', + 'DataType': 'String' + } + }) + +Messages can also be sent in batches. For example, sending the two messages +described above in a single request would look like the following:: + + response = queue.send_messages(Entries=[ + { + 'Id': '1', + 'MessageBody': 'world' + }, + { + 'Id': '2', + 'MessageBody': 'boto3', + 'MessageAttributes': { + 'Author': { + 'StringValue': 'Daniel', + 'DataType': 'String' + } + } + } + ]) + + # Print out any failures + print(response.get('Failed')) + +In this case, the response contains lists of ``Successful`` and ``Failed`` +messages, so you can retry failures if needed. + +Reference: :py:meth:`SQS.Queue.send_message`, +:py:meth:`SQS.Queue.send_messages` + +Processing Messages +------------------- +Messages are processed in batches:: + + # Get the service resource + sqs = boto3.resource('sqs') + + # Get the queue + queue = sqs.get_queue_by_name(QueueName='test') + + # Process messages by printing out body and optional author name + for message in queue.receive_messages(MessageAttributeNames=['Author']): + # Get the custom author message attribute if it was set + author_text = '' + if message.message_attributes is not None: + author_name = message.message_attributes.get('Author').get('StringValue') + if author_name: + author_text = ' ({0})'.format(author_name) + + # Print out the body and author (if set) + print('Hello, {0}!{1}'.format(message.body, author_text)) + + # Let the queue know that the message is processed + message.delete() + +Given *only* the messages that were sent in a batch with +:py:meth:`SQS.Queue.send_messages` in the previous section, the above code +will print out:: + + Hello, world! + Hello, boto3! (Daniel) + +Reference: :py:meth:`SQS.Queue.receive_messages`, +:py:meth:`SQS.Message.delete` diff --git a/docs/source/index.rst b/docs/source/index.rst new file mode 100644 index 0000000..b6606dc --- /dev/null +++ b/docs/source/index.rst @@ -0,0 +1,64 @@ +.. Boto3 documentation master file, created by + sphinx-quickstart on Wed Sep 3 11:11:30 2014. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Boto 3 Documentation +==================== +Boto is the Amazon Web Services (AWS) SDK for Python, which allows Python +developers to write software that makes use of Amazon services like S3 and +EC2. Boto provides an easy to use, object-oriented API as well as low-level +direct service access. + + +Quickstart +---------- + +.. toctree:: + :maxdepth: 2 + + guide/quickstart + guide/sqs + +User Guide +---------- + +.. toctree:: + + guide/index + + + +API Reference +------------- + +Services +~~~~~~~~ + +.. toctree:: + :maxdepth: 3 + + reference/services/index + +Core +~~~~ + +.. toctree:: + :maxdepth: 3 + + reference/core/index + +Customizations +~~~~~~~~~~~~~~ + +.. toctree:: + :maxdepth: 3 + + reference/customizations/index + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/docs/source/reference/core/boto3.rst b/docs/source/reference/core/boto3.rst new file mode 100644 index 0000000..5a8dfcb --- /dev/null +++ b/docs/source/reference/core/boto3.rst @@ -0,0 +1,9 @@ +.. _ref_core_init: + +=============== +Boto3 Reference +=============== + +.. automodule:: boto3 + :members: + :undoc-members: diff --git a/docs/source/reference/core/collections.rst b/docs/source/reference/core/collections.rst new file mode 100644 index 0000000..1998bf7 --- /dev/null +++ b/docs/source/reference/core/collections.rst @@ -0,0 +1,9 @@ +.. _ref_core_collections: + +===================== +Collections Reference +===================== + +.. automodule:: boto3.resources.collection + :members: + :undoc-members: diff --git a/docs/source/reference/core/index.rst b/docs/source/reference/core/index.rst new file mode 100644 index 0000000..72ed1ab --- /dev/null +++ b/docs/source/reference/core/index.rst @@ -0,0 +1,9 @@ +Core References +=============== + +.. toctree:: + :maxdepth: 2 + :titlesonly: + :glob: + + * diff --git a/docs/source/reference/core/resources.rst b/docs/source/reference/core/resources.rst new file mode 100644 index 0000000..85a9595 --- /dev/null +++ b/docs/source/reference/core/resources.rst @@ -0,0 +1,48 @@ +.. _ref_core_resources: + +=================== +Resources Reference +=================== + +Resource Model +-------------- + +.. automodule:: boto3.resources.model + :members: + :undoc-members: + :inherited-members: + +Request Parameters +------------------ + +.. automodule:: boto3.resources.params + :members: + :undoc-members: + +Response Handlers +----------------- + +.. automodule:: boto3.resources.response + :members: + :undoc-members: + +Resource Actions +---------------- + +.. automodule:: boto3.resources.action + :members: + :undoc-members: + +Resource Base +------------- + +.. automodule:: boto3.resources.base + :members: + :undoc-members: + +Resource Factory +---------------- + +.. automodule:: boto3.resources.factory + :members: + :undoc-members: diff --git a/docs/source/reference/core/session.rst b/docs/source/reference/core/session.rst new file mode 100644 index 0000000..2eedbcc --- /dev/null +++ b/docs/source/reference/core/session.rst @@ -0,0 +1,9 @@ +.. _ref_core_session: + +================= +Session Reference +================= + +.. automodule:: boto3.session + :members: + :undoc-members: diff --git a/docs/source/reference/customizations/dynamodb.rst b/docs/source/reference/customizations/dynamodb.rst new file mode 100644 index 0000000..99b20ca --- /dev/null +++ b/docs/source/reference/customizations/dynamodb.rst @@ -0,0 +1,64 @@ +.. _ref_custom_dynamodb: + +================================ +DynamoDB Customization Reference +================================ + +.. _ref_valid_dynamodb_types: + +Valid DynamoDB Types +-------------------- + +These are the valid item types to use with Boto3 Table Resource (:py:class:`dynamodb.Table`) and DynamoDB: + ++----------------------------------------------+-----------------------------+ +| Python Type | DynamoDB Type | ++==============================================+=============================+ +| string | String (S) | ++----------------------------------------------+-----------------------------+ +| integer | Number (N) | ++----------------------------------------------+-----------------------------+ +| :py:class:`decimal.Decimal` | Number (N) | ++----------------------------------------------+-----------------------------+ +| :py:class:`boto3.dynamodb.types.Binary` | Binary (B) | ++----------------------------------------------+-----------------------------+ +| boolean | Boolean (BOOL) | ++----------------------------------------------+-----------------------------+ +| ``None`` | Null (NULL) | ++----------------------------------------------+-----------------------------+ +| string set | String Set (SS) | ++----------------------------------------------+-----------------------------+ +| integer set | Number Set (NS) | ++----------------------------------------------+-----------------------------+ +| :py:class:`decimal.Decimal` set | Number Set (NS) | ++----------------------------------------------+-----------------------------+ +| :py:class:`boto3.dynamodb.types.Binary` set | Binary Set (BS) | ++----------------------------------------------+-----------------------------+ +| list | List (L) | ++----------------------------------------------+-----------------------------+ +| dict | Map (M) | ++----------------------------------------------+-----------------------------+ + + +Custom Boto3 Types +------------------ + + +.. autoclass:: boto3.dynamodb.types.Binary + :members: + :undoc-members: + +.. _ref_dynamodb_conditions: + +DynamoDB Conditions +------------------- + +.. autoclass:: boto3.dynamodb.conditions.Key + :members: + :undoc-members: + :inherited-members: + +.. autoclass:: boto3.dynamodb.conditions.Attr + :members: + :undoc-members: + :inherited-members: diff --git a/docs/source/reference/customizations/index.rst b/docs/source/reference/customizations/index.rst new file mode 100644 index 0000000..e95def6 --- /dev/null +++ b/docs/source/reference/customizations/index.rst @@ -0,0 +1,9 @@ +Customization References +======================== + +.. toctree:: + :maxdepth: 2 + :titlesonly: + :glob: + + * diff --git a/docs/source/reference/customizations/s3.rst b/docs/source/reference/customizations/s3.rst new file mode 100644 index 0000000..115cf87 --- /dev/null +++ b/docs/source/reference/customizations/s3.rst @@ -0,0 +1,20 @@ +.. _ref_custom_s3: + +========================== +S3 Customization Reference +========================== + +S3 Command Injection +-------------------- + +.. automodule:: boto3.s3.inject + :members: + :undoc-members: + :inherited-members: + +S3 Transfers +------------ + +.. automodule:: boto3.s3.transfer + :members: + :undoc-members: diff --git a/docs/source/reference/services/index.rst b/docs/source/reference/services/index.rst new file mode 100644 index 0000000..dd45427 --- /dev/null +++ b/docs/source/reference/services/index.rst @@ -0,0 +1,8 @@ +Available Services +================== + +.. toctree:: + :maxdepth: 2 + :glob: + + * diff --git a/requirements-docs.txt b/requirements-docs.txt new file mode 100644 index 0000000..7befed0 --- /dev/null +++ b/requirements-docs.txt @@ -0,0 +1,3 @@ +Sphinx>=1.1.3,<1.3 +guzzle_sphinx_theme>=0.7.10,<0.8 +-rrequirements.txt diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..dadb2d1 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,5 @@ +-e git://github.com/boto/botocore.git@develop#egg=botocore +-e git://github.com/boto/jmespath.git@develop#egg=jmespath +nose==1.3.3 +mock==1.0.1 +wheel==0.24.0 diff --git a/requirements26.txt b/requirements26.txt new file mode 100644 index 0000000..9696b3b --- /dev/null +++ b/requirements26.txt @@ -0,0 +1 @@ +unittest2==0.5.1 diff --git a/scripts/ci/install b/scripts/ci/install new file mode 100755 index 0000000..62846cb --- /dev/null +++ b/scripts/ci/install @@ -0,0 +1,32 @@ +#!/usr/bin/env python +import os +import sys +from subprocess import check_call +import shutil + +_dname = os.path.dirname + +REPO_ROOT = _dname(_dname(_dname(os.path.abspath(__file__)))) +os.chdir(REPO_ROOT) + + +def run(command): + return check_call(command, shell=True) + + +try: + # Has the form "major.minor" + python_version = os.environ['PYTHON_VERSION'] +except KeyError: + python_version = '.'.join([str(i) for i in sys.version_info[:2]]) + +if python_version == '2.6': + run('pip install -r requirements26.txt') + +run('pip install -r requirements.txt') +run('pip install coverage') +if os.path.isdir('dist') and os.listdir('dist'): + shutil.rmtree('dist') +run('python setup.py bdist_wheel') +wheel_dist = os.listdir('dist')[0] +run('pip install %s' % (os.path.join('dist', wheel_dist))) diff --git a/scripts/ci/run-integ-tests b/scripts/ci/run-integ-tests new file mode 100755 index 0000000..cdb0ef3 --- /dev/null +++ b/scripts/ci/run-integ-tests @@ -0,0 +1,20 @@ +#!/usr/bin/env python +# Don't run tests from the root repo dir. +# We want to ensure we're importing from the installed +# binary package not from the CWD. + +import os +from subprocess import check_call + +_dname = os.path.dirname + +REPO_ROOT = _dname(_dname(_dname(os.path.abspath(__file__)))) +os.chdir(os.path.join(REPO_ROOT, 'tests')) + + +def run(command): + return check_call(command, shell=True) + + +run('nosetests --with-xunit --cover-erase --with-coverage ' + '--cover-package boto3 --cover-xml -v integration') diff --git a/scripts/ci/run-tests b/scripts/ci/run-tests new file mode 100755 index 0000000..ef09340 --- /dev/null +++ b/scripts/ci/run-tests @@ -0,0 +1,20 @@ +#!/usr/bin/env python +# Don't run tests from the root repo dir. +# We want to ensure we're importing from the installed +# binary package not from the CWD. + +import os +from subprocess import check_call + +_dname = os.path.dirname + +REPO_ROOT = _dname(_dname(_dname(os.path.abspath(__file__)))) +os.chdir(os.path.join(REPO_ROOT, 'tests')) + + +def run(command): + return check_call(command, shell=True) + + +run('nosetests --with-coverage --cover-erase --cover-package boto3 ' + '--with-xunit --cover-xml -v unit/ functional/') diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..4713472 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,8 @@ +[wheel] +universal = 1 + +[metadata] +requires-dist = + botocore>=1.3.0,<1.4.0 + jmespath>=0.7.1,<1.0.0 + futures==2.2.0; python_version=="2.6" or python_version=="2.7" diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..191c842 --- /dev/null +++ b/setup.py @@ -0,0 +1,66 @@ +#!/usr/bin/env python + +""" +distutils/setuptools install script. +""" +import os +import re +import sys + +from setuptools import setup, find_packages + + +ROOT = os.path.dirname(__file__) +VERSION_RE = re.compile(r'''__version__ = ['"]([0-9.]+)['"]''') + + +requires = [ + 'botocore>=1.3.0,<1.4.0', + 'jmespath>=0.7.1,<1.0.0', +] + + +if sys.version_info[0] == 2: + # concurrent.futures is only in python3, so for + # python2 we need to install the backport. + requires.append('futures>=2.2.0,<4.0.0') + + +def get_version(): + init = open(os.path.join(ROOT, 'boto3', '__init__.py')).read() + return VERSION_RE.search(init).group(1) + + +setup( + name='boto3', + version=get_version(), + description='The AWS SDK for Python', + long_description=open('README.rst').read(), + author='Amazon Web Services', + url='https://github.com/boto/boto3', + scripts=[], + packages=find_packages(exclude=['tests*']), + package_data={ + 'boto3': [ + 'data/aws/resources/*.json', + ] + }, + include_package_data=True, + install_requires=requires, + extras_require={ + ':python_version=="2.6" or python_version=="2.7"': ['futures==2.2.0'] + }, + license="Apache License 2.0", + classifiers=[ + 'Development Status :: 5 - Production/Stable', + 'Intended Audience :: Developers', + 'Natural Language :: English', + 'License :: OSI Approved :: Apache Software License', + 'Programming Language :: Python', + 'Programming Language :: Python :: 2.6', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.3', + 'Programming Language :: Python :: 3.4', + ], +) diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..09f262e --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1,77 @@ +# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + +import random +import sys +import time + +from botocore.compat import six + + +# The unittest module got a significant overhaul +# in 2.7, so if we're in 2.6 we can use the backported +# version unittest2. +if sys.version_info[:2] == (2, 6): + import unittest2 as unittest +else: + import unittest + + +# Python 3 includes mocking, while 2 requires an extra module. +if sys.version_info[0] == 2: + import mock +else: + from unittest import mock + + +# In python 3, order matters when calling assertEqual to +# compare lists and dictionaries with lists. Therefore, +# assertItemsEqual needs to be used but it is renamed to +# assertCountEqual in python 3. +if six.PY2: + unittest.TestCase.assertCountEqual = unittest.TestCase.assertItemsEqual + + +def unique_id(name): + """ + Generate a unique ID that includes the given name, + a timestamp and a random number. This helps when running + integration tests in parallel that must create remote + resources. + """ + return '{0}-{1}-{2}'.format(name, int(time.time()), + random.randint(0, 10000)) + + +class BaseTestCase(unittest.TestCase): + """ + A base test case which mocks out the low-level session to prevent + any actual calls to Botocore. + """ + def setUp(self): + self.bc_session_patch = mock.patch('botocore.session.Session') + self.bc_session_cls = self.bc_session_patch.start() + + loader = self.bc_session_cls.return_value.get_component.return_value + loader.data_path = '' + self.loader = loader + + # We also need to patch the global default session. + # Otherwise it could be a cached real session came from previous + # "functional" or "integration" tests. + patch_global_session = mock.patch('boto3.DEFAULT_SESSION') + patch_global_session.start() + self.addCleanup(patch_global_session.stop) + + def tearDown(self): + self.bc_session_patch.stop() diff --git a/tests/functional/__init__.py b/tests/functional/__init__.py new file mode 100644 index 0000000..c89416d --- /dev/null +++ b/tests/functional/__init__.py @@ -0,0 +1,12 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. diff --git a/tests/functional/docs/__init__.py b/tests/functional/docs/__init__.py new file mode 100644 index 0000000..1524eb0 --- /dev/null +++ b/tests/functional/docs/__init__.py @@ -0,0 +1,79 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from tests import unittest + + +class BaseDocsFunctionalTests(unittest.TestCase): + def assert_contains_lines_in_order(self, lines, contents): + for line in lines: + self.assertIn(line, contents) + beginning = contents.find(line) + contents = contents[(beginning + len(line)):] + + def get_class_document_block(self, class_name, contents): + start_class_document = '.. py:class:: %s' % class_name + start_index = contents.find(start_class_document) + self.assertNotEqual(start_index, -1, 'Class is not found in contents') + contents = contents[start_index:] + end_index = contents.find( + ' .. py:class::', len(start_class_document)) + return contents[:end_index] + + def get_method_document_block(self, method_name, contents): + start_method_document = ' .. py:method:: %s(' % method_name + start_index = contents.find(start_method_document) + self.assertNotEqual(start_index, -1, 'Method is not found in contents') + contents = contents[start_index:] + end_index = contents.find( + ' .. py:method::', len(start_method_document)) + return contents[:end_index] + + def get_request_syntax_document_block(self, contents): + start_marker = '**Request Syntax**' + start_index = contents.find(start_marker) + self.assertNotEqual( + start_index, -1, 'There is no request syntax section') + contents = contents[start_index:] + end_index = contents.find( + ':type', len(start_marker)) + return contents[:end_index] + + def get_response_syntax_document_block(self, contents): + start_marker = '**Response Syntax**' + start_index = contents.find(start_marker) + self.assertNotEqual( + start_index, -1, 'There is no response syntax section') + contents = contents[start_index:] + end_index = contents.find( + '**Response Structure**', len(start_marker)) + return contents[:end_index] + + def get_request_parameter_document_block(self, param_name, contents): + start_param_document = ' :type %s:' % param_name + start_index = contents.find(start_param_document) + self.assertNotEqual(start_index, -1, 'Param is not found in contents') + contents = contents[start_index:] + end_index = contents.find(' :type', len(start_param_document)) + return contents[:end_index] + + def get_response_parameter_document_block(self, param_name, contents): + start_param_document = ' **Response Structure**' + start_index = contents.find(start_param_document) + self.assertNotEqual(start_index, -1, 'There is no response structure') + + start_param_document = ' - **%s**' % param_name + start_index = contents.find(start_param_document) + self.assertNotEqual(start_index, -1, 'Param is not found in contents') + contents = contents[start_index:] + end_index = contents.find(' - **', len(start_param_document)) + return contents[:end_index] diff --git a/tests/functional/docs/test_dynamodb.py b/tests/functional/docs/test_dynamodb.py new file mode 100644 index 0000000..62752a2 --- /dev/null +++ b/tests/functional/docs/test_dynamodb.py @@ -0,0 +1,128 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from tests.functional.docs import BaseDocsFunctionalTests + +from boto3.session import Session +from boto3.docs.service import ServiceDocumenter + + +class TestDynamoDBCustomizations(BaseDocsFunctionalTests): + def setUp(self): + self.documenter = ServiceDocumenter( + 'dynamodb', session=Session(region_name='us-east-1')) + self.generated_contents = self.documenter.document_service() + self.generated_contents = self.generated_contents.decode('utf-8') + + def test_batch_writer_is_documented(self): + self.assert_contains_lines_in_order([ + '.. py:class:: DynamoDB.Table(name)', + ' * :py:meth:`batch_writer()`', + ' .. py:method:: batch_writer()'], + self.generated_contents + ) + + def test_document_interface_is_documented(self): + contents = self.get_class_document_block( + 'DynamoDB.Table', self.generated_contents) + + # Take an arbitrary method that uses the customization. + method_contents = self.get_method_document_block('put_item', contents) + + # Make sure the request syntax is as expected. + request_syntax_contents = self.get_request_syntax_document_block( + method_contents) + self.assert_contains_lines_in_order([ + ' response = table.put_item(', + ' Item={', + (' \'string\': \'string\'|123|Binary(b\'bytes\')' + '|True|None|set([\'string\'])|set([123])|' + 'set([Binary(b\'bytes\')])|[]|{}'), + ' },', + ' Expected={', + ' \'string\': {', + (' \'Value\': \'string\'|123' + '|Binary(b\'bytes\')|True|None|set([\'string\'])' + '|set([123])|set([Binary(b\'bytes\')])|[]|{},'), + ' \'AttributeValueList\': [', + (' \'string\'|123|Binary(b\'bytes\')' + '|True|None|set([\'string\'])|set([123])|' + 'set([Binary(b\'bytes\')])|[]|{},')], + request_syntax_contents) + + # Make sure the response syntax is as expected. + response_syntax_contents = self.get_response_syntax_document_block( + method_contents) + self.assert_contains_lines_in_order([ + ' {', + ' \'Attributes\': {', + (' \'string\': \'string\'|123|' + 'Binary(b\'bytes\')|True|None|set([\'string\'])|' + 'set([123])|set([Binary(b\'bytes\')])|[]|{}'), + ' },'], + response_syntax_contents) + + # Make sure the request parameter is documented correctly. + request_param_contents = self.get_request_parameter_document_block( + 'Item', method_contents) + self.assert_contains_lines_in_order([ + ' :type Item: dict', + ' :param Item: **[REQUIRED]**', + ' - *(string) --*', + (' - *(valid DynamoDB type) --* - The value of the ' + 'attribute. The valid value types are listed in the ' + ':ref:`DynamoDB Reference Guide`.')], + request_param_contents + ) + + # Make sure the response parameter is documented correctly. + response_param_contents = self.get_response_parameter_document_block( + 'Attributes', method_contents) + self.assert_contains_lines_in_order([ + ' - **Attributes** *(dict) --*', + ' - *(string) --*', + (' - *(valid DynamoDB type) --* - The value of ' + 'the attribute. The valid value types are listed in the ' + ':ref:`DynamoDB Reference Guide`.')], + response_param_contents) + + def test_conditions_is_documented(self): + contents = self.get_class_document_block( + 'DynamoDB.Table', self.generated_contents) + + # Take an arbitrary method that uses the customization. + method_contents = self.get_method_document_block('query', contents) + + # Make sure the request syntax is as expected. + request_syntax_contents = self.get_request_syntax_document_block( + method_contents) + self.assert_contains_lines_in_order([ + ' response = table.query(', + (' FilterExpression=Attr(\'myattribute\').' + 'eq(\'myvalue\'),'), + (' KeyConditionExpression=Key(\'mykey\')' + '.eq(\'myvalue\'),')], + request_syntax_contents) + + # Make sure the request parameter is documented correctly. + self.assert_contains_lines_in_order([ + (' :type FilterExpression: condition from ' + ':py:class:`boto3.dynamodb.conditions.Attr` method'), + (' :param FilterExpression: The condition(s) an ' + 'attribute(s) must meet. Valid conditions are listed in ' + 'the :ref:`DynamoDB Reference Guide`.'), + (' :type KeyConditionExpression: condition from ' + ':py:class:`boto3.dynamodb.conditions.Key` method'), + (' :param KeyConditionExpression: The condition(s) a ' + 'key(s) must meet. Valid conditions are listed in the ' + ':ref:`DynamoDB Reference Guide`.')], + method_contents) diff --git a/tests/functional/docs/test_s3.py b/tests/functional/docs/test_s3.py new file mode 100644 index 0000000..2208f10 --- /dev/null +++ b/tests/functional/docs/test_s3.py @@ -0,0 +1,34 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from tests.functional.docs import BaseDocsFunctionalTests + +from boto3.session import Session +from boto3.docs.service import ServiceDocumenter + + +class TestS3Customizations(BaseDocsFunctionalTests): + def setUp(self): + self.documenter = ServiceDocumenter( + 's3', session=Session(region_name='us-east-1')) + self.generated_contents = self.documenter.document_service() + self.generated_contents = self.generated_contents.decode('utf-8') + + def test_file_transfer_methods_are_documented(self): + self.assert_contains_lines_in_order([ + '.. py:class:: S3.Client', + ' * :py:meth:`download_file`', + ' * :py:meth:`upload_file`', + ' .. py:method:: download_file(', + ' .. py:method:: upload_file('], + self.generated_contents + ) diff --git a/tests/functional/docs/test_smoke.py b/tests/functional/docs/test_smoke.py new file mode 100644 index 0000000..386fcf4 --- /dev/null +++ b/tests/functional/docs/test_smoke.py @@ -0,0 +1,157 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from nose.tools import assert_true +import botocore.session +from botocore import xform_name +from botocore.exceptions import DataNotFoundError + +import boto3 +from boto3.docs.service import ServiceDocumenter + + +def test_docs_generated(): + """Verify we can generate the appropriate docs for all services""" + botocore_session = botocore.session.get_session() + session = boto3.Session(region_name='us-east-1') + for service_name in session.get_available_services(): + generated_docs = ServiceDocumenter( + service_name, session=session).document_service() + generated_docs = generated_docs.decode('utf-8') + client = boto3.client(service_name, 'us-east-1') + + # Check that all of the services have the appropriate title + yield (_assert_has_title, generated_docs, client) + + # Check that all services have the client documented. + yield (_assert_has_client_documentation, generated_docs, service_name, + client) + + # If the client can paginate, make sure the paginators are documented. + try: + paginator_model = botocore_session.get_paginator_model( + service_name) + yield (_assert_has_paginator_documentation, generated_docs, + service_name, client, + sorted(paginator_model._paginator_config)) + except DataNotFoundError: + pass + + # If the client has waiters, make sure the waiters are documented + if client.waiter_names: + waiter_model = botocore_session.get_waiter_model(service_name) + yield (_assert_has_waiter_documentation, generated_docs, + service_name, client, waiter_model) + + # If the service has resources, make sure the service resource + # is at least documented. + if service_name in session.get_available_resources(): + resource = boto3.resource(service_name, 'us-east-1') + yield (_assert_has_resource_documentation, generated_docs, + service_name, resource) + + +def _assert_contains_lines_in_order(lines, contents): + for line in lines: + assert_true(line in contents) + beginning = contents.find(line) + contents = contents[(beginning + len(line)):] + + +def _assert_has_title(generated_docs, client): + title = client.__class__.__name__ + ref_lines = [ + '*' * len(title), + title, + '*' * len(title) + ] + _assert_contains_lines_in_order(ref_lines, generated_docs) + + +def _assert_has_client_documentation(generated_docs, service_name, client): + ref_lines = [ + '======', + 'Client', + '======', + '.. py:class:: %s.Client' % client.__class__.__name__, + ' A low-level client representing', + ' import boto3', + ' client = boto3.client(\'%s\')' % service_name, + ' These are the available methods:', + ' * :py:meth:`get_paginator`', + ' * :py:meth:`get_waiter`', + ' .. py:method:: get_paginator(operation_name)', + ' .. py:method:: get_waiter(waiter_name)', + ] + _assert_contains_lines_in_order(ref_lines, generated_docs) + + +def _assert_has_paginator_documentation(generated_docs, service_name, client, + paginator_names): + ref_lines = [ + '==========', + 'Paginators', + '==========', + 'The available paginators are:' + ] + for paginator_name in paginator_names: + ref_lines.append( + '* :py:class:`%s.Paginator.%s`' % ( + client.__class__.__name__, paginator_name)) + + for paginator_name in paginator_names: + ref_lines.append( + '.. py:class:: %s.Paginator.%s' % ( + client.__class__.__name__, paginator_name)) + ref_lines.append( + ' .. py:method:: paginate(') + + _assert_contains_lines_in_order(ref_lines, generated_docs) + + +def _assert_has_waiter_documentation(generated_docs, service_name, client, + waiter_model): + ref_lines = [ + '=======', + 'Waiters', + '=======', + 'The available waiters are:' + ] + for waiter_name in waiter_model.waiter_names: + ref_lines.append( + '* :py:class:`%s.Waiter.%s`' % ( + client.__class__.__name__, waiter_name)) + + for waiter_name in waiter_model.waiter_names: + ref_lines.append( + '.. py:class:: %s.Waiter.%s' % ( + client.__class__.__name__, waiter_name)) + ref_lines.append( + ' waiter = client.get_waiter(\'%s\')' % xform_name(waiter_name)) + ref_lines.append( + ' .. py:method:: wait(') + + _assert_contains_lines_in_order(ref_lines, generated_docs) + + +def _assert_has_resource_documentation(generated_docs, service_name, resource): + ref_lines = [ + '================', + 'Service Resource', + '================', + '.. py:class:: %s.ServiceResource' % ( + resource.meta.client.__class__.__name__), + ' A resource representing', + ' import boto3', + ' %s = boto3.resource(\'%s\')' % (service_name, service_name), + ] + _assert_contains_lines_in_order(ref_lines, generated_docs) diff --git a/tests/functional/dynamodb/__init__.py b/tests/functional/dynamodb/__init__.py new file mode 100644 index 0000000..c89416d --- /dev/null +++ b/tests/functional/dynamodb/__init__.py @@ -0,0 +1,12 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. diff --git a/tests/functional/dynamodb/test_table.py b/tests/functional/dynamodb/test_table.py new file mode 100644 index 0000000..974072f --- /dev/null +++ b/tests/functional/dynamodb/test_table.py @@ -0,0 +1,27 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from tests import unittest, mock + +import boto3 + + +class TestTableResourceCustomizations(unittest.TestCase): + + maxDiff = None + + def setUp(self): + self.resource = boto3.resource('dynamodb', 'us-east-1') + + def test_resource_has_batch_writer_added(self): + table = self.resource.Table('mytable') + self.assertTrue(hasattr(table, 'batch_writer')) diff --git a/tests/functional/test_dynamodb.py b/tests/functional/test_dynamodb.py new file mode 100644 index 0000000..64c6b9c --- /dev/null +++ b/tests/functional/test_dynamodb.py @@ -0,0 +1,72 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +import json +from tests import unittest, mock + +from botocore.vendored import requests + +from boto3.session import Session +from boto3.dynamodb.conditions import Attr + + +class TestDynamoDB(unittest.TestCase): + def setUp(self): + self.http_response = requests.models.Response() + self.http_response.status_code = 200 + self.parsed_response = {} + self.make_request_patch = mock.patch( + 'botocore.endpoint.Endpoint.make_request') + self.make_request_mock = self.make_request_patch.start() + self.make_request_mock.return_value = ( + self.http_response, self.parsed_response) + self.session = Session( + aws_access_key_id='dummy', + aws_secret_access_key='dummy', + region_name='us-east-1') + + def tearDown(self): + self.make_request_patch.stop() + + def test_resource(self): + dynamodb = self.session.resource('dynamodb') + table = dynamodb.Table('MyTable') + # Make sure it uses the high level interface + table.scan(FilterExpression=Attr('mykey').eq('myvalue')) + request = self.make_request_mock.call_args_list[0][0][1] + request_params = json.loads(request['body'].decode('utf-8')) + self.assertEqual( + request_params, + {'TableName': 'MyTable', + 'FilterExpression': '#n0 = :v0', + 'ExpressionAttributeNames': {'#n0': 'mykey'}, + 'ExpressionAttributeValues': {':v0': {'S': 'myvalue'}}} + ) + + def test_client(self): + dynamodb = self.session.client('dynamodb') + # Make sure the client still uses the botocore level interface + dynamodb.scan( + TableName='MyTable', + FilterExpression='#n0 = :v0', + ExpressionAttributeNames={'#n0': 'mykey'}, + ExpressionAttributeValues={':v0': {'S': 'myvalue'}} + ) + request = self.make_request_mock.call_args_list[0][0][1] + request_params = json.loads(request['body'].decode('utf-8')) + self.assertEqual( + request_params, + {'TableName': 'MyTable', + 'FilterExpression': '#n0 = :v0', + 'ExpressionAttributeNames': {'#n0': 'mykey'}, + 'ExpressionAttributeValues': {':v0': {'S': 'myvalue'}}} + ) diff --git a/tests/functional/test_resource.py b/tests/functional/test_resource.py new file mode 100644 index 0000000..30988e8 --- /dev/null +++ b/tests/functional/test_resource.py @@ -0,0 +1,38 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +import boto3 + +import botocore.session +from tests import unittest + + +def identity(self, x): + return x + + +class TestResourceCustomization(unittest.TestCase): + def setUp(self): + self.botocore_session = botocore.session.get_session() + + def add_new_method(self, name): + def handler(class_attributes, **kwargs): + class_attributes[name] = identity + return handler + + def test_can_inject_method_onto_resource(self): + session = boto3.Session(botocore_session=self.botocore_session) + self.botocore_session.register('creating-resource-class.s3', + self.add_new_method(name='my_method')) + resource = session.resource('s3') + self.assertTrue(hasattr(resource, 'my_method')) + self.assertEqual(resource.my_method('anything'), 'anything') diff --git a/tests/functional/test_s3.py b/tests/functional/test_s3.py new file mode 100644 index 0000000..31c565a --- /dev/null +++ b/tests/functional/test_s3.py @@ -0,0 +1,45 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from tests import unittest + +import boto3.session + + +class TestS3MethodInjection(unittest.TestCase): + def test_transfer_methods_injected_to_client(self): + session = boto3.session.Session(region_name='us-west-2') + client = session.client('s3') + self.assertTrue(hasattr(client, 'upload_file'), + 'upload_file was not injected onto S3 client') + self.assertTrue(hasattr(client, 'download_file'), + 'download_file was not injected onto S3 client') + + def test_bucket_resource_has_load_method(self): + session = boto3.session.Session(region_name='us-west-2') + bucket = session.resource('s3').Bucket('fakebucket') + self.assertTrue(hasattr(bucket, 'load'), + 'load() was not injected onto S3 Bucket resource.') + + def test_transfer_methods_injected_to_bucket(self): + bucket = boto3.resource('s3').Bucket('my_bucket') + self.assertTrue(hasattr(bucket, 'upload_file'), + 'upload_file was not injected onto S3 bucket') + self.assertTrue(hasattr(bucket, 'download_file'), + 'download_file was not injected onto S3 bucket') + + def test_transfer_methods_injected_to_object(self): + obj = boto3.resource('s3').Object('my_bucket', 'my_key') + self.assertTrue(hasattr(obj, 'upload_file'), + 'upload_file was not injected onto S3 object') + self.assertTrue(hasattr(obj, 'download_file'), + 'download_file was not injected onto S3 object') diff --git a/tests/functional/test_session.py b/tests/functional/test_session.py new file mode 100644 index 0000000..16c64fb --- /dev/null +++ b/tests/functional/test_session.py @@ -0,0 +1,34 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from tests import unittest + +import boto3.session + + +class TestSession(unittest.TestCase): + def setUp(self): + self.session = boto3.session.Session(region_name='us-west-2') + + def test_events_attribute(self): + # Create some function to register. + def my_handler(my_list, **kwargs): + return my_list.append('my_handler called') + + # Register the handler to the event. + self.session.events.register('myevent', my_handler) + + initial_list = [] + # Emit the event. + self.session.events.emit('myevent', my_list=initial_list) + # Ensure that the registered handler was called. + self.assertEqual(initial_list, ['my_handler called']) diff --git a/tests/functional/test_smoke.py b/tests/functional/test_smoke.py new file mode 100644 index 0000000..391948f --- /dev/null +++ b/tests/functional/test_smoke.py @@ -0,0 +1,72 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from nose.tools import assert_true + +from boto3.session import Session +import botocore.session + + +def create_session(): + session = Session(aws_access_key_id='dummy', + aws_secret_access_key='dummy', + region_name='us-east-1') + return session + + +def test_can_create_all_resources(): + """Verify we can create all existing resources.""" + session = create_session() + for service_name in session.get_available_resources(): + yield _test_create_resource, session, service_name + + +def _test_create_resource(session, service_name): + resource = session.resource(service_name) + # Verifying we have a "meta" attr is just an arbitrary + # sanity check. + assert_true(hasattr(resource, 'meta')) + + +def test_can_create_all_clients(): + session = create_session() + for service_name in session.get_available_services(): + yield _test_create_client, session, service_name + + +def _test_create_client(session, service_name): + client = session.client(service_name) + assert_true(hasattr(client, 'meta')) + + +def test_api_versions_synced_with_botocore(): + botocore_session = botocore.session.get_session() + boto3_session = create_session() + for service_name in boto3_session.get_available_resources(): + yield (_assert_same_api_versions, service_name, + botocore_session, boto3_session) + + +def _assert_same_api_versions(service_name, botocore_session, boto3_session): + resource = boto3_session.resource(service_name) + boto3_api_version = resource.meta.client.meta.service_model.api_version + client = botocore_session.create_client(service_name, + region_name='us-east-1', + aws_access_key_id='foo', + aws_secret_access_key='bar') + botocore_api_version = client.meta.service_model.api_version + if botocore_api_version != boto3_api_version: + raise AssertionError( + "Different latest API versions found for %s: " + "%s (botocore), %s (boto3)\n" % (service_name, + botocore_api_version, + boto3_api_version)) diff --git a/tests/functional/test_utils.py b/tests/functional/test_utils.py new file mode 100644 index 0000000..1e5af59 --- /dev/null +++ b/tests/functional/test_utils.py @@ -0,0 +1,37 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from tests import unittest + +import botocore.session + + +from boto3 import utils +import boto3.session + + +class TestUtils(unittest.TestCase): + def test_runtime_error_raised_when_shadowing_client_method(self): + botocore_session = botocore.session.get_session() + session = boto3.session.Session(region_name='us-west-2', + botocore_session=botocore_session) + + def shadows_put_object(class_attributes, **kwargs): + utils.inject_attribute(class_attributes, 'put_object', 'invalid') + + botocore_session.register('creating-client-class', shadows_put_object) + + with self.assertRaises(RuntimeError): + # This should raise an exception because we're trying to + # shadow the put_object client method in the + # shadows_put_object handler above. + session.client('s3') diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py new file mode 100644 index 0000000..c89416d --- /dev/null +++ b/tests/integration/__init__.py @@ -0,0 +1,12 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. diff --git a/tests/integration/test_collections.py b/tests/integration/test_collections.py new file mode 100644 index 0000000..0574acd --- /dev/null +++ b/tests/integration/test_collections.py @@ -0,0 +1,56 @@ +# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + +import boto3.session + +from boto3.resources.collection import CollectionManager + + +# A map of services to regions that cannot use us-west-2 +# for the integration tests. +REGION_MAP = { + 'opsworks': 'us-east-1' +} + +# A list of collections to ignore. They require parameters +# or are very slow to run. +BLACKLIST = { + 'ec2': ['images'], + 'iam': ['signing_certificates'], + 'sqs': ['dead_letter_source_queues'] +} + + +def test_all_collections(): + # This generator yields test functions for every collection + # on every available resource, except those which have + # been blacklisted. + session = boto3.session.Session() + for service_name in session.get_available_resources(): + resource = session.resource( + service_name, + region_name=REGION_MAP.get(service_name, 'us-west-2')) + + for key in dir(resource): + if key in BLACKLIST.get(service_name, []): + continue + + value = getattr(resource, key) + if isinstance(value, CollectionManager): + yield _test_collection, service_name, key, value + +def _test_collection(service_name, collection_name, collection): + # Create a list of the first page of items. This tests that + # a remote request can be made, the response parsed, and that + # resources are successfully created. + list(collection.limit(1)) diff --git a/tests/integration/test_dynamodb.py b/tests/integration/test_dynamodb.py new file mode 100644 index 0000000..dca2922 --- /dev/null +++ b/tests/integration/test_dynamodb.py @@ -0,0 +1,205 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +import collections +from decimal import Decimal + +import boto3.session +from boto3.dynamodb.types import Binary +from boto3.dynamodb.conditions import Attr, Key +from tests import unittest, unique_id + + +class BaseDynamoDBTest(unittest.TestCase): + @classmethod + def setUpClass(cls): + cls.session = boto3.session.Session(region_name='us-west-2') + cls.dynamodb = cls.session.resource('dynamodb') + cls.table_name = unique_id('boto3db') + cls.item_data = { + 'MyHashKey': 'mykey', + 'MyNull': None, + 'MyBool': True, + 'MyString': 'mystring', + 'MyNumber': Decimal('1.25'), + 'MyBinary': Binary(b'\x01'), + 'MyStringSet': set(['foo']), + 'MyNumberSet': set([Decimal('1.25')]), + 'MyBinarySet': set([Binary(b'\x01')]), + 'MyList': ['foo'], + 'MyMap': {'foo': 'bar'} + } + cls.table = cls.dynamodb.create_table( + TableName=cls.table_name, + ProvisionedThroughput={"ReadCapacityUnits": 5, + "WriteCapacityUnits": 5}, + KeySchema=[{"AttributeName": "MyHashKey", "KeyType": "HASH"}], + AttributeDefinitions=[{"AttributeName": "MyHashKey", + "AttributeType": "S"}]) + waiter = cls.dynamodb.meta.client.get_waiter('table_exists') + waiter.wait(TableName=cls.table_name) + + @classmethod + def tearDownClass(cls): + cls.table.delete() + + +class TestDynamoDBTypes(BaseDynamoDBTest): + def test_put_get_item(self): + self.table.put_item(Item=self.item_data) + self.addCleanup(self.table.delete_item, Key={'MyHashKey': 'mykey'}) + response = self.table.get_item(Key={'MyHashKey': 'mykey'}, + ConsistentRead=True) + self.assertEqual(response['Item'], self.item_data) + + +class TestDynamoDBConditions(BaseDynamoDBTest): + @classmethod + def setUpClass(cls): + super(TestDynamoDBConditions, cls).setUpClass() + cls.table.put_item(Item=cls.item_data) + + @classmethod + def tearDownClass(cls): + cls.table.delete_item(Key={'MyHashKey': 'mykey'}) + super(TestDynamoDBConditions, cls).tearDownClass() + + def test_filter_expression(self): + r = self.table.scan( + FilterExpression=Attr('MyHashKey').eq('mykey')) + self.assertEqual(r['Items'][0]['MyHashKey'], 'mykey') + + def test_key_condition_expression(self): + r = self.table.query( + KeyConditionExpression=Key('MyHashKey').eq('mykey')) + self.assertEqual(r['Items'][0]['MyHashKey'], 'mykey') + + def test_key_condition_with_filter_condition_expression(self): + r = self.table.query( + KeyConditionExpression=Key('MyHashKey').eq('mykey'), + FilterExpression=Attr('MyString').eq('mystring')) + self.assertEqual(r['Items'][0]['MyString'], 'mystring') + + def test_condition_less_than(self): + r = self.table.scan( + FilterExpression=Attr('MyNumber').lt(Decimal('1.26'))) + self.assertTrue(r['Items'][0]['MyNumber'] < Decimal('1.26')) + + def test_condition_less_than_equal(self): + r = self.table.scan( + FilterExpression=Attr('MyNumber').lte(Decimal('1.26'))) + self.assertTrue(r['Items'][0]['MyNumber'] <= Decimal('1.26')) + + def test_condition_greater_than(self): + r = self.table.scan( + FilterExpression=Attr('MyNumber').gt(Decimal('1.24'))) + self.assertTrue(r['Items'][0]['MyNumber'] > Decimal('1.24')) + + def test_condition_greater_than_equal(self): + r = self.table.scan( + FilterExpression=Attr('MyNumber').gte(Decimal('1.24'))) + self.assertTrue(r['Items'][0]['MyNumber'] >= Decimal('1.24')) + + def test_condition_begins_with(self): + r = self.table.scan( + FilterExpression=Attr('MyString').begins_with('my')) + self.assertTrue(r['Items'][0]['MyString'].startswith('my')) + + def test_condition_between(self): + r = self.table.scan( + FilterExpression=Attr('MyNumber').between( + Decimal('1.24'), Decimal('1.26'))) + self.assertTrue(r['Items'][0]['MyNumber'] > Decimal('1.24')) + self.assertTrue(r['Items'][0]['MyNumber'] < Decimal('1.26')) + + def test_condition_not_equal(self): + r = self.table.scan( + FilterExpression=Attr('MyHashKey').ne('notmykey')) + self.assertNotEqual(r['Items'][0]['MyHashKey'], 'notmykey') + + def test_condition_in(self): + r = self.table.scan( + FilterExpression=Attr('MyHashKey').is_in(['notmykey', 'mykey'])) + self.assertIn(r['Items'][0]['MyHashKey'], ['notmykey', 'mykey']) + + def test_condition_exists(self): + r = self.table.scan( + FilterExpression=Attr('MyString').exists()) + self.assertIn('MyString', r['Items'][0]) + + def test_condition_not_exists(self): + r = self.table.scan( + FilterExpression=Attr('MyFakeKey').not_exists()) + self.assertNotIn('MyFakeKey', r['Items'][0]) + + def test_condition_contains(self): + r = self.table.scan( + FilterExpression=Attr('MyString').contains('my')) + self.assertIn('my', r['Items'][0]['MyString']) + + def test_condition_size(self): + r = self.table.scan( + FilterExpression=Attr('MyString').size().eq(len('mystring'))) + self.assertEqual(len(r['Items'][0]['MyString']), len('mystring')) + + def test_condition_attribute_type(self): + r = self.table.scan( + FilterExpression=Attr('MyMap').attribute_type('M')) + self.assertIsInstance(r['Items'][0]['MyMap'], collections.Mapping) + + def test_condition_and(self): + r = self.table.scan( + FilterExpression=(Attr('MyHashKey').eq('mykey') & + Attr('MyString').eq('mystring'))) + item = r['Items'][0] + self.assertTrue( + item['MyHashKey'] == 'mykey' and item['MyString'] == 'mystring') + + def test_condition_or(self): + r = self.table.scan( + FilterExpression=(Attr('MyHashKey').eq('mykey2') | + Attr('MyString').eq('mystring'))) + item = r['Items'][0] + self.assertTrue( + item['MyHashKey'] == 'mykey2' or item['MyString'] == 'mystring') + + def test_condition_not(self): + r = self.table.scan( + FilterExpression=(~Attr('MyHashKey').eq('mykey2'))) + item = r['Items'][0] + self.assertTrue(item['MyHashKey'] != 'mykey2') + + def test_condition_in_map(self): + r = self.table.scan( + FilterExpression=Attr('MyMap.foo').eq('bar')) + self.assertEqual(r['Items'][0]['MyMap']['foo'], 'bar') + + def test_condition_in_list(self): + r = self.table.scan( + FilterExpression=Attr('MyList[0]').eq('foo')) + self.assertEqual(r['Items'][0]['MyList'][0], 'foo') + + +class TestDynamodbBatchWrite(BaseDynamoDBTest): + def test_batch_write_items(self): + num_elements = 1000 + items = [] + for i in range(num_elements): + items.append({'MyHashKey': 'foo%s' % i, + 'OtherKey': 'bar%s' % i}) + with self.table.batch_writer() as batch: + for item in items: + batch.put_item(Item=item) + + # Verify all the items were added to dynamodb. + for obj in self.table.scan()['Items']: + self.assertIn(obj, items) diff --git a/tests/integration/test_s3.py b/tests/integration/test_s3.py new file mode 100644 index 0000000..d647d64 --- /dev/null +++ b/tests/integration/test_s3.py @@ -0,0 +1,530 @@ +# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +import os +import threading +import math +import time +import random +import tempfile +import shutil +import hashlib +import string +import datetime + +from tests import unittest, unique_id +from botocore.compat import six +from botocore.client import Config + +import boto3.session +import boto3.s3.transfer + + +urlopen = six.moves.urllib.request.urlopen + + +def assert_files_equal(first, second): + if os.path.getsize(first) != os.path.getsize(second): + raise AssertionError("Files are not equal: %s, %s" % (first, second)) + first_md5 = md5_checksum(first) + second_md5 = md5_checksum(second) + if first_md5 != second_md5: + raise AssertionError( + "Files are not equal: %s(md5=%s) != %s(md5=%s)" % ( + first, first_md5, second, second_md5)) + + +def md5_checksum(filename): + checksum = hashlib.md5() + with open(filename, 'rb') as f: + for chunk in iter(lambda: f.read(8192), b''): + checksum.update(chunk) + return checksum.hexdigest() + + +def random_bucket_name(prefix='boto3-transfer', num_chars=10): + base = string.ascii_lowercase + string.digits + random_bytes = bytearray(os.urandom(num_chars)) + return prefix + ''.join([base[b % len(base)] for b in random_bytes]) + + +class FileCreator(object): + def __init__(self): + self.rootdir = tempfile.mkdtemp() + + def remove_all(self): + shutil.rmtree(self.rootdir) + + def create_file(self, filename, contents, mode='w'): + """Creates a file in a tmpdir + + ``filename`` should be a relative path, e.g. "foo/bar/baz.txt" + It will be translated into a full path in a tmp dir. + + ``mode`` is the mode the file should be opened either as ``w`` or + `wb``. + + Returns the full path to the file. + + """ + full_path = os.path.join(self.rootdir, filename) + if not os.path.isdir(os.path.dirname(full_path)): + os.makedirs(os.path.dirname(full_path)) + with open(full_path, mode) as f: + f.write(contents) + return full_path + + def create_file_with_size(self, filename, filesize): + filename = self.create_file(filename, contents='') + chunksize = 8192 + with open(filename, 'wb') as f: + for i in range(int(math.ceil(filesize / float(chunksize)))): + f.write(b'a' * chunksize) + return filename + + def append_file(self, filename, contents): + """Append contents to a file + + ``filename`` should be a relative path, e.g. "foo/bar/baz.txt" + It will be translated into a full path in a tmp dir. + + Returns the full path to the file. + """ + full_path = os.path.join(self.rootdir, filename) + if not os.path.isdir(os.path.dirname(full_path)): + os.makedirs(os.path.dirname(full_path)) + with open(full_path, 'a') as f: + f.write(contents) + return full_path + + def full_path(self, filename): + """Translate relative path to full path in temp dir. + + f.full_path('foo/bar.txt') -> /tmp/asdfasd/foo/bar.txt + """ + return os.path.join(self.rootdir, filename) + + +class TestS3Resource(unittest.TestCase): + def setUp(self): + self.region = 'us-west-2' + self.session = boto3.session.Session(region_name=self.region) + self.s3 = self.session.resource('s3') + self.bucket_name = unique_id('boto3-test') + + def create_bucket_resource(self, bucket_name, region=None): + if region is None: + region = self.region + kwargs = {'Bucket': bucket_name} + if region != 'us-east-1': + kwargs['CreateBucketConfiguration'] = { + 'LocationConstraint': region + } + bucket = self.s3.create_bucket(**kwargs) + self.addCleanup(bucket.delete) + return bucket + + def test_s3(self): + client = self.s3.meta.client + + # Create a bucket (resource action with a resource response) + bucket = self.create_bucket_resource(self.bucket_name) + waiter = client.get_waiter('bucket_exists') + waiter.wait(Bucket=self.bucket_name) + + # Create an object + obj = bucket.Object('test.txt') + obj.put( + Body='hello, world') + waiter = client.get_waiter('object_exists') + waiter.wait(Bucket=self.bucket_name, Key='test.txt') + self.addCleanup(obj.delete) + + # List objects and make sure ours is present + self.assertIn('test.txt', [o.key for o in bucket.objects.all()]) + + # Lazy-loaded attribute + self.assertEqual(12, obj.content_length) + + # Load a similar attribute from the collection response + self.assertEqual(12, list(bucket.objects.all())[0].size) + + # Perform a resource action with a low-level response + self.assertEqual(b'hello, world', + obj.get()['Body'].read()) + + def test_s3_resource_waiter(self): + # Create a bucket + bucket = self.create_bucket_resource(self.bucket_name) + # Wait till the bucket exists + bucket.wait_until_exists() + # Confirm the bucket exists by finding it in a list of all of our + # buckets + self.assertIn(self.bucket_name, + [b.name for b in self.s3.buckets.all()]) + + # Create an object + obj = bucket.Object('test.txt') + obj.put( + Body='hello, world') + self.addCleanup(obj.delete) + + # Wait till the bucket exists + obj.wait_until_exists() + + # List objects and make sure ours is present + self.assertIn('test.txt', [o.key for o in bucket.objects.all()]) + + def test_can_create_object_directly(self): + obj = self.s3.Object(self.bucket_name, 'test.txt') + + self.assertEqual(obj.bucket_name, self.bucket_name) + self.assertEqual(obj.key, 'test.txt') + + def test_s3_multipart(self): + # Create the bucket + bucket = self.create_bucket_resource(self.bucket_name) + bucket.wait_until_exists() + + # Create the multipart upload + mpu = bucket.Object('mp-test.txt').initiate_multipart_upload() + self.addCleanup(mpu.abort) + + # Create and upload a part + part = mpu.Part(1) + response = part.upload(Body='hello, world!') + + # Complete the upload, which requires info on all of the parts + part_info = { + 'Parts': [ + { + 'PartNumber': 1, + 'ETag': response['ETag'] + } + ] + } + + mpu.complete(MultipartUpload=part_info) + self.addCleanup(bucket.Object('mp-test.txt').delete) + + contents = bucket.Object('mp-test.txt').get()['Body'].read() + self.assertEqual(contents, b'hello, world!') + + +class TestS3Transfers(unittest.TestCase): + """Tests for the high level boto3.s3.transfer module.""" + + @classmethod + def setUpClass(cls): + cls.region = 'us-west-2' + cls.session = boto3.session.Session(region_name=cls.region) + cls.client = cls.session.client('s3', cls.region) + cls.bucket_name = random_bucket_name() + cls.client.create_bucket( + Bucket=cls.bucket_name, + CreateBucketConfiguration={'LocationConstraint': cls.region}) + + def setUp(self): + self.files = FileCreator() + + def tearDown(self): + self.files.remove_all() + + @classmethod + def tearDownClass(cls): + cls.client.delete_bucket(Bucket=cls.bucket_name) + + def delete_object(self, key): + self.client.delete_object( + Bucket=self.bucket_name, + Key=key) + + def object_exists(self, key): + self.client.head_object(Bucket=self.bucket_name, + Key=key) + return True + + def create_s3_transfer(self, config=None): + return boto3.s3.transfer.S3Transfer(self.client, + config=config) + + def assert_has_public_read_acl(self, response): + grants = response['Grants'] + public_read = [g['Grantee'].get('URI', '') for g in grants + if g['Permission'] == 'READ'] + self.assertIn('groups/global/AllUsers', public_read[0]) + + def test_upload_below_threshold(self): + config = boto3.s3.transfer.TransferConfig( + multipart_threshold=2 * 1024 * 1024) + transfer = self.create_s3_transfer(config) + filename = self.files.create_file_with_size( + 'foo.txt', filesize=1024 * 1024) + transfer.upload_file(filename, self.bucket_name, + 'foo.txt') + self.addCleanup(self.delete_object, 'foo.txt') + + self.assertTrue(self.object_exists('foo.txt')) + + def test_upload_above_threshold(self): + config = boto3.s3.transfer.TransferConfig( + multipart_threshold=2 * 1024 * 1024) + transfer = self.create_s3_transfer(config) + filename = self.files.create_file_with_size( + '20mb.txt', filesize=20 * 1024 * 1024) + transfer.upload_file(filename, self.bucket_name, + '20mb.txt') + self.addCleanup(self.delete_object, '20mb.txt') + self.assertTrue(self.object_exists('20mb.txt')) + + def test_upload_file_above_threshold_with_acl(self): + config = boto3.s3.transfer.TransferConfig( + multipart_threshold=5 * 1024 * 1024) + transfer = self.create_s3_transfer(config) + filename = self.files.create_file_with_size( + '6mb.txt', filesize=6 * 1024 * 1024) + extra_args = {'ACL': 'public-read'} + transfer.upload_file(filename, self.bucket_name, + '6mb.txt', extra_args=extra_args) + self.addCleanup(self.delete_object, '6mb.txt') + + self.assertTrue(self.object_exists('6mb.txt')) + response = self.client.get_object_acl( + Bucket=self.bucket_name, Key='6mb.txt') + self.assert_has_public_read_acl(response) + + def test_upload_file_above_threshold_with_ssec(self): + key_bytes = os.urandom(32) + extra_args = { + 'SSECustomerKey': key_bytes, + 'SSECustomerAlgorithm': 'AES256', + } + config = boto3.s3.transfer.TransferConfig( + multipart_threshold=5 * 1024 * 1024) + transfer = self.create_s3_transfer(config) + filename = self.files.create_file_with_size( + '6mb.txt', filesize=6 * 1024 * 1024) + transfer.upload_file(filename, self.bucket_name, + '6mb.txt', extra_args=extra_args) + self.addCleanup(self.delete_object, '6mb.txt') + # A head object will fail if it has a customer key + # associated with it and it's not provided in the HeadObject + # request so we can use this to verify our functionality. + response = self.client.head_object( + Bucket=self.bucket_name, + Key='6mb.txt', **extra_args) + self.assertEqual(response['SSECustomerAlgorithm'], 'AES256') + + def test_progress_callback_on_upload(self): + self.amount_seen = 0 + lock = threading.Lock() + + def progress_callback(amount): + with lock: + self.amount_seen += amount + + transfer = self.create_s3_transfer() + filename = self.files.create_file_with_size( + '20mb.txt', filesize=20 * 1024 * 1024) + transfer.upload_file(filename, self.bucket_name, + '20mb.txt', callback=progress_callback) + self.addCleanup(self.delete_object, '20mb.txt') + + # The callback should have been called enough times such that + # the total amount of bytes we've seen (via the "amount" + # arg to the callback function) should be the size + # of the file we uploaded. + self.assertEqual(self.amount_seen, 20 * 1024 * 1024) + + def test_callback_called_once_with_sigv4(self): + # Verify #98, where the callback was being invoked + # twice when using signature version 4. + self.amount_seen = 0 + lock = threading.Lock() + def progress_callback(amount): + with lock: + self.amount_seen += amount + + client = self.session.client( + 's3', self.region, + config=Config(signature_version='s3v4')) + transfer = boto3.s3.transfer.S3Transfer(client) + filename = self.files.create_file_with_size( + '10mb.txt', filesize=10 * 1024 * 1024) + transfer.upload_file(filename, self.bucket_name, + '10mb.txt', callback=progress_callback) + self.addCleanup(self.delete_object, '10mb.txt') + + self.assertEqual(self.amount_seen, 10 * 1024 * 1024) + + def test_can_send_extra_params_on_upload(self): + transfer = self.create_s3_transfer() + filename = self.files.create_file_with_size('foo.txt', filesize=1024) + transfer.upload_file(filename, self.bucket_name, + 'foo.txt', extra_args={'ACL': 'public-read'}) + self.addCleanup(self.delete_object, 'foo.txt') + + response = self.client.get_object_acl( + Bucket=self.bucket_name, Key='foo.txt') + self.assert_has_public_read_acl(response) + + def test_can_configure_threshold(self): + config = boto3.s3.transfer.TransferConfig( + multipart_threshold=6 * 1024 * 1024 + ) + transfer = self.create_s3_transfer(config) + filename = self.files.create_file_with_size( + 'foo.txt', filesize=8 * 1024 * 1024) + transfer.upload_file(filename, self.bucket_name, + 'foo.txt') + self.addCleanup(self.delete_object, 'foo.txt') + + self.assertTrue(self.object_exists('foo.txt')) + + def test_can_send_extra_params_on_download(self): + # We're picking the customer provided sse feature + # of S3 to test the extra_args functionality of + # S3. + key_bytes = os.urandom(32) + extra_args = { + 'SSECustomerKey': key_bytes, + 'SSECustomerAlgorithm': 'AES256', + } + self.client.put_object(Bucket=self.bucket_name, + Key='foo.txt', + Body=b'hello world', + **extra_args) + self.addCleanup(self.delete_object, 'foo.txt') + transfer = self.create_s3_transfer() + + download_path = os.path.join(self.files.rootdir, 'downloaded.txt') + transfer.download_file(self.bucket_name, 'foo.txt', + download_path, extra_args=extra_args) + with open(download_path, 'rb') as f: + self.assertEqual(f.read(), b'hello world') + + def test_progress_callback_on_download(self): + self.amount_seen = 0 + lock = threading.Lock() + + def progress_callback(amount): + with lock: + self.amount_seen += amount + + transfer = self.create_s3_transfer() + filename = self.files.create_file_with_size( + '20mb.txt', filesize=20 * 1024 * 1024) + with open(filename, 'rb') as f: + self.client.put_object(Bucket=self.bucket_name, + Key='20mb.txt', Body=f) + self.addCleanup(self.delete_object, '20mb.txt') + + download_path = os.path.join(self.files.rootdir, 'downloaded.txt') + transfer.download_file(self.bucket_name, '20mb.txt', + download_path, callback=progress_callback) + + self.assertEqual(self.amount_seen, 20 * 1024 * 1024) + + def test_download_below_threshold(self): + transfer = self.create_s3_transfer() + + filename = self.files.create_file_with_size( + 'foo.txt', filesize=1024 * 1024) + with open(filename, 'rb') as f: + self.client.put_object(Bucket=self.bucket_name, + Key='foo.txt', + Body=f) + self.addCleanup(self.delete_object, 'foo.txt') + + download_path = os.path.join(self.files.rootdir, 'downloaded.txt') + transfer.download_file(self.bucket_name, 'foo.txt', + download_path) + assert_files_equal(filename, download_path) + + def test_download_above_threshold(self): + transfer = self.create_s3_transfer() + + filename = self.files.create_file_with_size( + 'foo.txt', filesize=20 * 1024 * 1024) + with open(filename, 'rb') as f: + self.client.put_object(Bucket=self.bucket_name, + Key='foo.txt', + Body=f) + self.addCleanup(self.delete_object, 'foo.txt') + + download_path = os.path.join(self.files.rootdir, 'downloaded.txt') + transfer.download_file(self.bucket_name, 'foo.txt', + download_path) + assert_files_equal(filename, download_path) + + def test_transfer_methods_through_client(self): + # This is really just a sanity check to ensure that the interface + # from the clients work. We're not exhaustively testing through + # this client interface. + filename = self.files.create_file_with_size( + 'foo.txt', filesize=1024 * 1024) + self.client.upload_file(Filename=filename, + Bucket=self.bucket_name, + Key='foo.txt') + self.addCleanup(self.delete_object, 'foo.txt') + + download_path = os.path.join(self.files.rootdir, 'downloaded.txt') + self.client.download_file(Bucket=self.bucket_name, + Key='foo.txt', + Filename=download_path) + assert_files_equal(filename, download_path) + + def test_transfer_methods_through_bucket(self): + # This is just a sanity check to ensure that the bucket interface work. + key = 'bucket.txt' + bucket = self.session.resource('s3').Bucket(self.bucket_name) + filename = self.files.create_file_with_size(key, 1024*1024) + bucket.upload_file(Filename=filename, Key=key) + self.addCleanup(self.delete_object, key) + download_path = os.path.join(self.files.rootdir, unique_id('foo')) + bucket.download_file(Key=key, Filename=download_path) + assert_files_equal(filename, download_path) + + def test_transfer_methods_through_object(self): + # This is just a sanity check to ensure that the object interface work. + key = 'object.txt' + obj = self.session.resource('s3').Object(self.bucket_name, key) + filename = self.files.create_file_with_size(key, 1024*1024) + obj.upload_file(Filename=filename) + self.addCleanup(self.delete_object, key) + download_path = os.path.join(self.files.rootdir, unique_id('foo')) + obj.download_file(Filename=download_path) + assert_files_equal(filename, download_path) + + +class TestCustomS3BucketLoad(unittest.TestCase): + def setUp(self): + self.region = 'us-west-2' + self.session = boto3.session.Session(region_name=self.region) + self.s3 = self.session.resource('s3') + self.bucket_name = unique_id('boto3-test') + + def create_bucket_resource(self, bucket_name, region=None): + if region is None: + region = self.region + kwargs = {'Bucket': bucket_name} + if region != 'us-east-1': + kwargs['CreateBucketConfiguration'] = { + 'LocationConstraint': region + } + bucket = self.s3.create_bucket(**kwargs) + self.addCleanup(bucket.delete) + return bucket + + def test_can_access_buckets_creation_date(self): + bucket = self.create_bucket_resource(random_bucket_name()) + self.assertIsInstance(bucket.creation_date, datetime.datetime) diff --git a/tests/integration/test_session.py b/tests/integration/test_session.py new file mode 100644 index 0000000..8e79f3c --- /dev/null +++ b/tests/integration/test_session.py @@ -0,0 +1,46 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from tests import unittest, unique_id +import botocore.session + +import boto3.session + + +class TestUserAgentCustomizations(unittest.TestCase): + def setUp(self): + self.botocore_session = botocore.session.get_session() + self.session = boto3.session.Session( + region_name='us-west-2', botocore_session=self.botocore_session) + self.actual_user_agent = None + self.botocore_session.register('request-created', + self.record_user_agent) + + def record_user_agent(self, request, **kwargs): + self.actual_user_agent = request.headers['User-Agent'] + + def test_client_user_agent(self): + client = self.session.client('s3') + client.list_buckets() + self.assertIn('Boto3', self.actual_user_agent) + self.assertIn('Botocore', self.actual_user_agent) + self.assertIn('Python', self.actual_user_agent) + # We should *not* have any mention of resource + # when using clients directly. + self.assertNotIn('Resource', self.actual_user_agent) + + def test_resource_user_agent_has_customization(self): + resource = self.session.resource('s3') + list(resource.buckets.all()) + # We should have customized the user agent for + # resource calls with "Resource". + self.assertTrue(self.actual_user_agent.endswith(' Resource')) diff --git a/tests/integration/test_sqs.py b/tests/integration/test_sqs.py new file mode 100644 index 0000000..eb43d2a --- /dev/null +++ b/tests/integration/test_sqs.py @@ -0,0 +1,40 @@ +# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + +import boto3.session + +from tests import unittest, unique_id + + +class TestSQSResource(unittest.TestCase): + def setUp(self): + self.session = boto3.session.Session(region_name='us-west-2') + self.sqs = self.session.resource('sqs') + self.queue_name = unique_id('boto3-test') + + def test_sqs(self): + # Create a new resource + queue = self.sqs.create_queue(QueueName=self.queue_name) + self.addCleanup(queue.delete) + + # Call an action + queue.send_message(MessageBody='test') + + # Get pre-populated resources and access attributes + messages = queue.receive_messages(WaitTimeSeconds=1) + + self.assertEqual(len(messages), 1) + self.addCleanup(messages[0].delete) + + self.assertEqual(queue.url, messages[0].queue_url) + self.assertEqual('test', messages[0].body) diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/data/todo/2015-04-01/resources-1.json b/tests/unit/data/todo/2015-04-01/resources-1.json new file mode 100644 index 0000000..39a22ef --- /dev/null +++ b/tests/unit/data/todo/2015-04-01/resources-1.json @@ -0,0 +1,72 @@ +{ + "service": { + "actions": { + "CreateToDo": { + "request": { "operation": "CreateToDo" }, + "resource": { + "type": "ToDo", + "identifiers": [ + { "target": "Id", "source": "response", "path": "Id" } + ] + } + } + }, + "has": { + "ToDo": { + "resource": { + "type": "ToDo", + "identifiers": [ + { "target": "Id", "source": "input" } + ] + } + } + }, + "hasMany": { + "ToDos": { + "request": { "operation": "DescribeToDos" }, + "resource": { + "type": "ToDo", + "identifiers": [ + { "target": "Id", "source": "response", "path": "ToDoList[].Id" } + ] + } + } + } + }, + "resources": { + "ToDo": { + "identifiers": [ + { "name": "Id" } + ], + "shape": "ToDoItem", + "actions": { + "Delete": { + "request": { + "operation": "DeleteToDo", + "params": [ + { "target": "Id", "source": "identifier", "name": "Id" } + ] + } + } + }, + "waiters": { + "Ready": { + "waiterName": "ToDoReady", + "params": [ + { "target": "Id", "source": "identifier", "name": "Id" } + ] + } + }, + "has": { + "MySelf": { + "resource": { + "type": "ToDo", + "identifiers": [ + { "target": "Id", "source": "data", "path": "Id" } + ] + } + } + } + } + } +} diff --git a/tests/unit/data/todo/2015-04-01/service-2.json b/tests/unit/data/todo/2015-04-01/service-2.json new file mode 100644 index 0000000..1d71b40 --- /dev/null +++ b/tests/unit/data/todo/2015-04-01/service-2.json @@ -0,0 +1,185 @@ +{ + "metadata":{ + "apiVersion":"2015-04-01", + "endpointPrefix":"todo", + "jsonVersion":"1.1", + "serviceFullName":"AWS ToDo Sample API for Tasks", + "serviceAbbreviation":"AWS ToDo Tasks", + "signatureVersion":"v4", + "protocol":"json" + }, + "documentation":"

AWS sample API that tracks to-do items.

    ", + "operations":{ + "CreateToDo":{ + "name":"CreateToDo", + "http":{ + "method":"POST", + "requestUri":"/todos" + }, + "input":{ + "shape":"CreateToDoInput", + "documentation":"

    Container of the newly created to-do's values.

    " + }, + "output":{ + "shape":"ToDoItem", + "documentation":"

    A single ToDo item.

    " + }, + "errors":[ + { + "shape":"ToDoServerException", + "exception":true, + "documentation":"

    A server-side error occurred during the API call. The error message will contain additional details about the cause.

    " + }, + { + "shape":"ToDoClientException", + "exception":true, + "documentation":"

    The API was called with invalid parameters. The error message will contain additional details about the cause.

    " + } + ], + "documentation":"

    Create a new to-do item.

    " + }, + "DescribeToDos": { + "name":"DescribeToDos", + "http":{ + "method":"GET", + "requestUri":"/todos" + }, + "output":{ + "shape":"ToDoList", + "documentation":"

    List of to-do items.

    " + }, + "errors":[ + { + "shape":"ToDoServerException", + "exception":true, + "documentation":"

    A server-side error occurred during the API call. The error message will contain additional details about the cause.

    " + } + ], + "documentation":"

    Create a new to-do item.

    " + }, + "GetToDo":{ + "name":"GetToDo", + "http":{ + "method":"GET", + "requestUri":"/todos/{Id+}" + }, + "input":{ + "shape":"ToDoInput" + }, + "output":{ + "shape":"ToDoItem" + }, + "errors":[ + { + "shape":"ToDoServerException", + "exception":true, + "documentation":"

    A server-side error occurred during the API call. The error message will contain additional details about the cause.

    " + }, + { + "shape":"ToDoClientException", + "exception":true, + "documentation":"

    The API was called with invalid parameters. The error message will contain additional details about the cause.

    " + } + ], + "documentation":"

    Get an existing to-do item.

    " + }, + "DeleteToDo":{ + "name":"DeleteToDo", + "http":{ + "method":"DELETE", + "requestUri":"/todos/{Id+}" + }, + "input":{ + "shape":"ToDoInput" + }, + "errors":[ + { + "shape":"ToDoServerException", + "exception":true, + "documentation":"

    A server-side error occurred during the API call. The error message will contain additional details about the cause.

    " + }, + { + "shape":"ToDoClientException", + "exception":true, + "documentation":"

    The API was called with invalid parameters. The error message will contain additional details about the cause.

    " + } + ], + "documentation":"

    Delete an existing to-do item.

    " + } + }, + "shapes":{ + "CreateToDoInput":{ + "type":"structure", + "required":[ + "Title" + ], + "members":{ + "Title":{ + "shape":"String", + "documentation":"The title of the to-do item." + } + }, + "documentation":"

    Container for to-do values.

    " + }, + "String":{ + "type":"string" + }, + "ToDoClientException":{ + "type":"structure", + "members":{ + "message":{"shape":"ErrorMessage"} + }, + "exception":true, + "documentation":"

    The API was called with invalid parameters. The error message will contain additional details about the cause.

    " + }, + "ToDoInput":{ + "type":"structure", + "required":[ + "Id" + ], + "members":{ + "Id":{ + "shape":"String", + "location":"uri", + "locationName":"Id" + } + } + }, + "ToDoItem":{ + "type":"structure", + "members":{ + "Id":{ + "shape":"String", + "documentation":"Unique identifier" + }, + "Title":{ + "shape":"String", + "documentation":"The title of the to-do item." + }, + "Status":{ + "shape":"String", + "documentation":"The status of the to-do item. Either CREATING, READY, or DONE." + } + }, + "documentation":"A single to-do item." + }, + "ToDoList":{ + "type":"list", + "member":{ + "shape":"ToDoItem", + "documentation":"List of to-do items." + } + }, + "ToDoServerException":{ + "type":"structure", + "members":{ + "message":{"shape":"ErrorMessage"} + }, + "exception":true, + "documentation":"

    A server-side error occurred during the API call. The error message will contain additional details about the cause.

    " + }, + "ErrorMessage":{ + "type":"string" + } + } +} diff --git a/tests/unit/data/todo/2015-04-01/waiters-2.json b/tests/unit/data/todo/2015-04-01/waiters-2.json new file mode 100644 index 0000000..9f90c97 --- /dev/null +++ b/tests/unit/data/todo/2015-04-01/waiters-2.json @@ -0,0 +1,24 @@ +{ + "version": 2, + "waiters": { + "ToDoReady": { + "delay": 20, + "operation": "GetToDo", + "maxAttempts": 25, + "acceptors": [ + { + "expected": "READY", + "matcher": "path", + "state": "success", + "argument": "ToDo.Status" + }, + { + "expected": "DONE", + "matcher": "path", + "state": "success", + "argument": "ToDo.Status" + } + ] + } + } +} diff --git a/tests/unit/docs/__init__.py b/tests/unit/docs/__init__.py new file mode 100644 index 0000000..c015363 --- /dev/null +++ b/tests/unit/docs/__init__.py @@ -0,0 +1,272 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +import os +import json +import tempfile +import shutil +from tests import unittest + +import botocore.session +from botocore.compat import OrderedDict +from botocore.loaders import Loader +from botocore.docs.bcdoc.restdoc import DocumentStructure + +from boto3.session import Session + + +class BaseDocsTest(unittest.TestCase): + def setUp(self): + self.root_dir = tempfile.mkdtemp() + self.version_dirs = os.path.join( + self.root_dir, 'myservice', '2014-01-01') + os.makedirs(self.version_dirs) + + self.model_file = os.path.join(self.version_dirs, 'service-2.json') + self.waiter_model_file = os.path.join( + self.version_dirs, 'waiters-2.json') + self.paginator_model_file = os.path.join( + self.version_dirs, 'paginators-1.json') + self.resource_model_file = os.path.join( + self.version_dirs, 'resources-1.json') + + self.json_model = {} + self.waiter_json_model = {} + self.paginator_json_model = {} + self.resource_json_model = {} + self._setup_models() + self._write_models() + + self.doc_name = 'MyDoc' + self.doc_structure = DocumentStructure(self.doc_name) + + self.loader = Loader(extra_search_paths=[self.root_dir]) + self.botocore_session = botocore.session.get_session() + self.botocore_session.register_component('data_loader', self.loader) + self.session = Session( + botocore_session=self.botocore_session, region_name='us-east-1') + self.client = self.session.client('myservice', 'us-east-1') + self.resource = self.session.resource('myservice', 'us-east-1') + + def tearDown(self): + shutil.rmtree(self.root_dir) + + def _setup_models(self): + self.json_model = { + 'metadata': { + 'apiVersion': '2014-01-01', + 'endpointPrefix': 'myservice', + 'signatureVersion': 'v4', + 'serviceFullName': 'AWS MyService', + 'protocol': 'query' + }, + 'operations': { + 'SampleOperation': { + 'name': 'SampleOperation', + 'input': {'shape': 'SampleOperationInputOutput'}, + 'output': {'shape': 'SampleOperationInputOutput'} + } + }, + 'shapes': { + 'SampleOperationInputOutput': { + 'type': 'structure', + 'members': OrderedDict([ + ('Foo', { + 'shape': 'String', + 'documentation': 'Documents Foo'}), + ('Bar', { + 'shape': 'String', + 'documentation': 'Documents Bar'}), + ]) + }, + 'String': { + 'type': 'string' + } + } + } + + self.waiter_json_model = { + "version": 2, + "waiters": { + "SampleOperationComplete": { + "delay": 15, + "operation": "SampleOperation", + "maxAttempts": 40, + "acceptors": [ + {"expected": "complete", + "matcher": "pathAll", + "state": "success", + "argument": "Biz"}, + {"expected": "failed", + "matcher": "pathAny", + "state": "failure", + "argument": "Biz"} + ] + } + } + } + + self.paginator_json_model = { + "pagination": { + "SampleOperation": { + "input_token": "NextResult", + "output_token": "NextResult", + "limit_key": "MaxResults", + "result_key": "Biz" + } + } + } + + self.resource_json_model = { + "service": { + "actions": OrderedDict([ + ("SampleOperation", { + "request": {"operation": "SampleOperation"} + }), + ("SampleListReturnOperation", { + "request": {"operation": "SampleOperation"}, + "resource": { + "type": "Sample", + "identifiers": [ + {"target": "Name", "source": "response", + "path": "Samples[].Name"} + ], + "path": "Samples[]" + } + }) + ]), + "has": { + "Sample": { + "resource": { + "type": "Sample", + "identifiers": [ + {"target": "Name", "source": "input"} + ] + } + } + }, + "hasMany": { + "Samples": { + "request": {"operation": "SampleOperation"}, + "resource": { + "type": "Sample", + "identifiers": [ + {"target": "Name", "source": "response", + "path": "Samples[].Foo"} + ] + } + } + } + }, + "resources": { + "Sample": { + "identifiers": [ + {"name": "Name", "memberName": "Foo"} + ], + "shape": "SampleOperationInputOutput", + "load": { + "request": { + "operation": "SampleOperation", + "params": [ + {"target": "Foo", "source": "identifier", + "name": "Name"} + ] + } + }, + "actions": { + "Operate": { + "request": { + "operation": "SampleOperation", + "params": [ + {"target": "Foo", "source": "identifier", + "name": "Name"} + ] + } + } + }, + "batchActions": { + "Operate": { + "request": { + "operation": "SampleOperation", + "params": [ + {"target": "Samples[].Foo", + "source": "identifier", "name": "Name"} + ] + } + } + }, + "has": { + "RelatedSample": { + "resource": { + "type": "Sample", + "identifiers": [ + {"target": "Name", "source": "data", + "path": "Foo"} + ] + } + } + }, + "waiters": { + "Complete": { + "waiterName": "SampleOperationComplete", + "params": [ + {"target": "Foo", "source": "identifier", + "name": "Name"} + ] + } + } + } + } + } + + def _write_models(self): + with open(self.resource_model_file, 'w') as f: + json.dump(self.resource_json_model, f) + + with open(self.waiter_model_file, 'w') as f: + json.dump(self.waiter_json_model, f) + + with open(self.paginator_model_file, 'w') as f: + json.dump(self.paginator_json_model, f) + + with open(self.model_file, 'w') as f: + json.dump(self.json_model, f) + + def add_shape(self, shape): + shape_name = list(shape.keys())[0] + self.json_model['shapes'][shape_name] = shape[shape_name] + + def add_shape_to_params(self, param_name, shape_name, documentation=None, + is_required=False): + params_shape = self.json_model['shapes']['SampleOperationInputOutput'] + member = {'shape': shape_name} + if documentation is not None: + member['documentation'] = documentation + params_shape['members'][param_name] = member + + if is_required: + required_list = params_shape.get('required', []) + required_list.append(param_name) + params_shape['required'] = required_list + + def assert_contains_lines_in_order(self, lines, contents=None): + if contents is None: + contents = self.doc_structure.flush_structure().decode('utf-8') + for line in lines: + self.assertIn(line, contents) + beginning = contents.find(line) + contents = contents[(beginning + len(line)):] + + def assert_not_contains_lines(self, lines): + contents = self.doc_structure.flush_structure().decode('utf-8') + for line in lines: + self.assertNotIn(line, contents) diff --git a/tests/unit/docs/test_action.py b/tests/unit/docs/test_action.py new file mode 100644 index 0000000..6a35f9a --- /dev/null +++ b/tests/unit/docs/test_action.py @@ -0,0 +1,88 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from tests.unit.docs import BaseDocsTest + +from boto3.docs.action import ActionDocumenter + + +class TestActionDocumenter(BaseDocsTest): + def test_document_service_resource_actions(self): + action_documenter = ActionDocumenter(self.resource) + action_documenter.document_actions(self.doc_structure) + self.assert_contains_lines_in_order([ + '.. py:method:: sample_operation(**kwargs)', + ' **Request Syntax**', + ' ::', + ' response = myservice.sample_operation(', + ' Foo=\'string\',', + ' Bar=\'string\'', + ' )', + ' :type Foo: string', + ' :param Foo: Documents Foo', + ' :type Bar: string', + ' :param Bar: Documents Bar', + ' :rtype: dict', + ' :returns:', + ' **Response Syntax**', + ' ::', + ' {', + ' \'Foo\': \'string\',', + ' \'Bar\': \'string\'', + ' }', + ' **Response Structure**', + ' - *(dict) --*', + ' - **Foo** *(string) --* Documents Foo', + ' - **Bar** *(string) --* Documents Bar' + ]) + + def test_document_nonservice_resource_actions(self): + subresource = self.resource.Sample('mysample') + action_documenter = ActionDocumenter(subresource) + action_documenter.document_actions(self.doc_structure) + self.assert_contains_lines_in_order([ + '.. py:method:: load()', + (' Calls :py:meth:`MyService.Client.sample_operation` to update ' + 'the attributes of the Sample resource'), + ' **Request Syntax** ', + ' ::', + ' sample.load()', + ' :returns: None', + '.. py:method:: operate(**kwargs)', + ' **Request Syntax** ', + ' ::', + ' response = sample.operate(', + " Bar='string'", + ' )', + ' :type Bar: string', + ' :param Bar: Documents Bar', + ' :rtype: dict', + ' :returns: ', + ' ', + ' **Response Syntax** ', + ' ::', + ' {', + " 'Foo': 'string',", + " 'Bar': 'string'", + ' }', + ' **Response Structure** ', + ' - *(dict) --* ', + ' - **Foo** *(string) --* Documents Foo', + ' - **Bar** *(string) --* Documents Bar', + '.. py:method:: reload()', + (' Calls :py:meth:`MyService.Client.sample_operation` to update ' + 'the attributes of the Sample resource'), + ' **Request Syntax** ', + ' ::', + ' sample.reload()', + ' :returns: None' + ]) diff --git a/tests/unit/docs/test_client.py b/tests/unit/docs/test_client.py new file mode 100644 index 0000000..c2774e8 --- /dev/null +++ b/tests/unit/docs/test_client.py @@ -0,0 +1,66 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from tests.unit.docs import BaseDocsTest + +from boto3.docs.client import Boto3ClientDocumenter + + +class TestBoto3ClientDocumenter(BaseDocsTest): + def setUp(self): + super(TestBoto3ClientDocumenter, self).setUp() + self.client_documenter = Boto3ClientDocumenter(self.client) + + def test_document_client(self): + self.client_documenter.document_client(self.doc_structure) + self.assert_contains_lines_in_order([ + '======', + 'Client', + '======', + '.. py:class:: MyService.Client', + ' A low-level client representing AWS MyService::', + ' import boto3', + ' client = boto3.client(\'myservice\')', + ' These are the available methods:', + ' * :py:meth:`can_paginate`', + ' * :py:meth:`get_paginator`', + ' * :py:meth:`get_waiter`', + ' * :py:meth:`sample_operation`', + ' .. py:method:: can_paginate(operation_name)', + ' .. py:method:: get_paginator(operation_name)', + ' .. py:method:: get_waiter(waiter_name)', + ' .. py:method:: sample_operation(**kwargs)', + ' **Request Syntax**', + ' ::', + ' response = client.sample_operation(', + ' Foo=\'string\'', + ' Bar=\'string\'', + ' )', + ' :type Foo: string', + ' :param Foo: Documents Foo', + ' :type Bar: string', + ' :param Bar: Documents Bar', + ' :rtype: dict', + ' :returns:', + ' **Response Syntax**', + ' ::', + ' {', + ' \'Foo\': \'string\'', + ' \'Bar\': \'string\'', + + ' }', + ' **Response Structure**', + ' - *(dict) --*', + ' - **Foo** *(string) --*', + ' - **Bar** *(string) --*' + + ]) diff --git a/tests/unit/docs/test_collection.py b/tests/unit/docs/test_collection.py new file mode 100644 index 0000000..5ed7bc2 --- /dev/null +++ b/tests/unit/docs/test_collection.py @@ -0,0 +1,99 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from tests.unit.docs import BaseDocsTest + +from boto3.docs.collection import CollectionDocumenter + + +class TestCollectionDocumenter(BaseDocsTest): + def test_document_collections(self): + collection_documenter = CollectionDocumenter(self.resource) + collection_documenter.document_collections(self.doc_structure) + self.assert_contains_lines_in_order([ + '.. py:attribute:: samples', + ' A collection of Sample resources', + ' .. py:method:: all()', + (' Creates an iterable of all Sample resources in the ' + 'collection.'), + ' **Request Syntax** ', + ' ::', + ' sample_iterator = myservice.samples.all()', + ' :rtype: list(:py:class:`myservice.Sample`)', + ' :returns: A list of Sample resources', + ' .. py:method:: filter(**kwargs)', + (' Creates an iterable of all Sample resources in ' + 'the collection filtered by kwargs passed to method.'), + ' **Request Syntax** ', + ' ::', + ' sample_iterator = myservice.samples.filter(', + " Foo='string',", + " Bar='string'", + ' )', + ' :type Foo: string', + ' :param Foo: Documents Foo', + ' :type Bar: string', + ' :param Bar: Documents Bar', + ' :rtype: list(:py:class:`myservice.Sample`)', + ' :returns: A list of Sample resources', + ' .. py:method:: limit(**kwargs)', + (' Creates an iterable up to a specified amount of ' + 'Sample resources in the collection.'), + ' **Request Syntax** ', + ' ::', + ' sample_iterator = myservice.samples.limit(', + ' count=123', + ' )', + ' :type count: integer', + (' :param count: The limit to the number of resources ' + 'in the iterable.'), + ' :rtype: list(:py:class:`myservice.Sample`)', + ' :returns: A list of Sample resources', + ' .. py:method:: operate(**kwargs)', + ' **Request Syntax** ', + ' response = myservice.samples.operate(', + " Foo='string',", + " Bar='string'", + ' )', + ' :type Foo: string', + ' :param Foo: Documents Foo', + ' :type Bar: string', + ' :param Bar: Documents Bar', + ' :rtype: dict', + ' :returns: ', + ' **Response Syntax** ', + ' ::', + ' {', + " 'Foo': 'string',", + " 'Bar': 'string'", + ' }', + ' **Response Structure** ', + ' - *(dict) --* ', + ' - **Foo** *(string) --* Documents Foo', + ' - **Bar** *(string) --* Documents Bar', + ' .. py:method:: page_size(**kwargs)', + (' Creates an iterable of all Sample resources in the ' + 'collection, but limits the number of items returned by ' + 'each service call by the specified amount.'), + ' **Request Syntax** ', + ' ::', + '', + ' sample_iterator = myservice.samples.page_size(', + ' count=123', + ' )', + ' :type count: integer', + (' :param count: The number of items returned by ' + 'each service call'), + ' :rtype: list(:py:class:`myservice.Sample`)', + ' :returns: A list of Sample resources', + ' ' + ]) diff --git a/tests/unit/docs/test_docstring.py b/tests/unit/docs/test_docstring.py new file mode 100644 index 0000000..76868c9 --- /dev/null +++ b/tests/unit/docs/test_docstring.py @@ -0,0 +1,225 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +import mock +from botocore.compat import six + +from tests.unit.docs import BaseDocsTest + + +class TestResourceDocstrings(BaseDocsTest): + def test_action_help(self): + with mock.patch('sys.stdout', six.StringIO()) as mock_stdout: + help(self.resource.sample_operation) + action_docstring = mock_stdout.getvalue() + self.assert_contains_lines_in_order([ + ' **Request Syntax**', + ' ::', + ' response = myservice.sample_operation(', + ' Foo=\'string\',', + ' Bar=\'string\'', + ' )', + ' :type Foo: string', + ' :param Foo: Documents Foo', + ' :type Bar: string', + ' :param Bar: Documents Bar', + ' :rtype: dict', + ' :returns:', + ' **Response Syntax**', + ' ::', + ' {', + ' \'Foo\': \'string\',', + ' \'Bar\': \'string\'', + ' }', + ' **Response Structure**', + ' - *(dict) --*', + ' - **Foo** *(string) --* Documents Foo', + ' - **Bar** *(string) --* Documents Bar' + ], action_docstring) + + def test_load_help(self): + sub_resource = self.resource.Sample('Id') + with mock.patch('sys.stdout', six.StringIO()) as mock_stdout: + help(sub_resource.load) + load_docstring = mock_stdout.getvalue() + self.assert_contains_lines_in_order([ + (' Calls :py:meth:`MyService.Client.sample_operation` to update ' + 'the attributes of the Sample resource'), + ' **Request Syntax** ', + ' ::', + ' sample.load()', + ' :returns: None', + ], load_docstring) + + def test_sub_resource_help(self): + with mock.patch('sys.stdout', six.StringIO()) as mock_stdout: + help(self.resource.Sample) + sub_resource_docstring = mock_stdout.getvalue() + self.assert_contains_lines_in_order([ + ' Creates a Sample resource.::', + " sample = myservice.Sample('name')", + ' :type name: string', + " :param name: The Sample's name identifier.", + ' :rtype: :py:class:`MyService.Sample`', + ' :returns: A Sample resource', + ], sub_resource_docstring) + + def test_attribute_help(self): + with mock.patch('sys.stdout', six.StringIO()) as mock_stdout: + help(self.resource.Sample('id').__class__.foo) + attribute_docstring = mock_stdout.getvalue() + self.assert_contains_lines_in_order([ + ' *(string)* Documents Foo' + ], attribute_docstring) + + def test_identifier_help(self): + with mock.patch('sys.stdout', six.StringIO()) as mock_stdout: + help(self.resource.Sample('id').__class__.name) + identifier_docstring = mock_stdout.getvalue() + self.assert_contains_lines_in_order([ + " *(string)* The Sample's name identifier. This " + "**must** be set." + ], identifier_docstring) + + def test_reference_help(self): + sample_resource = self.resource.Sample('id') + with mock.patch('sys.stdout', six.StringIO()) as mock_stdout: + help(sample_resource.__class__.related_sample) + reference_docstring = mock_stdout.getvalue() + self.assert_contains_lines_in_order([ + " (:py:class:`Sample`) The related related_sample " + "if set, otherwise ``None``." + ], reference_docstring) + + def test_collection_help(self): + with mock.patch('sys.stdout', six.StringIO()) as mock_stdout: + help(self.resource.__class__.samples) + collection_method_docstring = mock_stdout.getvalue() + self.assert_contains_lines_in_order([ + ' A collection of Sample resources' + ], collection_method_docstring) + + def test_collection_all_method_help(self): + with mock.patch('sys.stdout', six.StringIO()) as mock_stdout: + help(self.resource.samples.all) + collection_method_docstring = mock_stdout.getvalue() + self.assert_contains_lines_in_order([ + (' Creates an iterable of all Sample resources in the ' + 'collection.'), + ' **Request Syntax** ', + ' ::', + ' sample_iterator = myservice.samples.all()', + ' :rtype: list(:py:class:`myservice.Sample`)', + ' :returns: A list of Sample resources', + ], collection_method_docstring) + + def test_collection_filter_method_help(self): + with mock.patch('sys.stdout', six.StringIO()) as mock_stdout: + help(self.resource.samples.filter) + collection_method_docstring = mock_stdout.getvalue() + self.assert_contains_lines_in_order([ + ' **Request Syntax** ', + ' ::', + ' sample_iterator = myservice.samples.filter(', + " Foo='string',", + " Bar='string'", + ' )', + ' :type Foo: string', + ' :param Foo: Documents Foo', + ' :type Bar: string', + ' :param Bar: Documents Bar', + ' :rtype: list(:py:class:`myservice.Sample`)', + ' :returns: A list of Sample resources', + ], collection_method_docstring) + + def test_collection_limit_method_help(self): + with mock.patch('sys.stdout', six.StringIO()) as mock_stdout: + help(self.resource.samples.limit) + collection_method_docstring = mock_stdout.getvalue() + self.assert_contains_lines_in_order([ + ' **Request Syntax** ', + ' ::', + ' sample_iterator = myservice.samples.limit(', + ' count=123', + ' )', + ' :type count: integer', + (' :param count: The limit to the number of resources ' + 'in the iterable.'), + ' :rtype: list(:py:class:`myservice.Sample`)', + ' :returns: A list of Sample resources', + ], collection_method_docstring) + + def test_collection_page_size_method_help(self): + with mock.patch('sys.stdout', six.StringIO()) as mock_stdout: + help(self.resource.samples.page_size) + collection_method_docstring = mock_stdout.getvalue() + self.assert_contains_lines_in_order([ + ' **Request Syntax** ', + ' ::', + ' sample_iterator = myservice.samples.page_size(', + ' count=123', + ' )', + ' :type count: integer', + (' :param count: The number of items returned by ' + 'each service call'), + ' :rtype: list(:py:class:`myservice.Sample`)', + ' :returns: A list of Sample resources', + ], collection_method_docstring) + + def test_batch_action_help(self): + with mock.patch('sys.stdout', six.StringIO()) as mock_stdout: + help(self.resource.samples.operate) + batch_action_docstring = mock_stdout.getvalue() + self.assert_contains_lines_in_order([ + ' **Request Syntax** ', + ' ::', + ' response = myservice.samples.operate(', + " Foo='string',", + " Bar='string'", + ' )', + ' :type Foo: string', + ' :param Foo: Documents Foo', + ' :type Bar: string', + ' :param Bar: Documents Bar', + ' :rtype: dict', + ' :returns: ', + ' **Response Syntax** ', + ' ::', + ' {', + " 'Foo': 'string',", + " 'Bar': 'string'", + ' }', + ' **Response Structure** ', + ' - *(dict) --* ', + ' - **Foo** *(string) --* Documents Foo', + ' - **Bar** *(string) --* Documents Bar', + ], batch_action_docstring) + + def test_resource_waiter_help(self): + with mock.patch('sys.stdout', six.StringIO()) as mock_stdout: + help(self.resource.Sample('id').wait_until_complete) + resource_waiter_docstring = mock_stdout.getvalue() + self.assert_contains_lines_in_order([ + (' Waits until this Sample is complete. This method calls ' + ':py:meth:`MyService.Waiter.sample_operation_complete.wait` ' + 'which polls. :py:meth:`MyService.Client.sample_operation` every ' + '15 seconds until a successful state is reached. An error ' + 'is returned after 40 failed checks.'), + ' **Request Syntax** ', + ' ::', + ' sample.wait_until_complete(', + " Bar='string'", + ' )', + ' :type Bar: string', + ' :param Bar: Documents Bar', + ' :returns: None', + ], resource_waiter_docstring) diff --git a/tests/unit/docs/test_method.py b/tests/unit/docs/test_method.py new file mode 100644 index 0000000..e35e4d5 --- /dev/null +++ b/tests/unit/docs/test_method.py @@ -0,0 +1,283 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from botocore.hooks import HierarchicalEmitter +from botocore.docs.utils import DocumentedShape + +from tests.unit.docs import BaseDocsTest +from boto3.resources.model import ResponseResource +from boto3.docs.method import document_model_driven_resource_method + + +class TestDocumentModelDrivenResourceMethod(BaseDocsTest): + def setUp(self): + super(TestDocumentModelDrivenResourceMethod, self).setUp() + self.event_emitter = HierarchicalEmitter() + self.service_model = self.client.meta.service_model + self.operation_model = self.service_model.operation_model( + 'SampleOperation') + self.service_resource_model = self.resource.meta.resource_model + + def test_default(self): + document_model_driven_resource_method( + self.doc_structure, 'foo', self.operation_model, + event_emitter=self.event_emitter, + method_description='This describes the foo method.', + example_prefix='response = myservice.foo', + resource_action_model=self.service_resource_model.actions[0] + ) + self.assert_contains_lines_in_order([ + '.. py:method:: foo(**kwargs)', + ' This describes the foo method.', + ' **Request Syntax** ', + ' ::', + ' response = myservice.foo(', + " Foo='string',", + " Bar='string'", + ' )', + ' :type Foo: string', + ' :param Foo: Documents Foo', + ' :type Bar: string', + ' :param Bar: Documents Bar', + ' :rtype: dict', + ' :returns: ', + ' **Response Syntax** ', + ' ::', + ' {', + " 'Foo': 'string',", + " 'Bar': 'string'", + ' }', + ' **Response Structure** ', + ' - *(dict) --* ', + ' - **Foo** *(string) --* Documents Foo', + ' - **Bar** *(string) --* Documents Bar' + ]) + + def test_returns_resource(self): + resource_action = self.service_resource_model.actions[0] + # Override the return type of the action to be a resource + # instead of the regular dictionary. + resource_action.resource = ResponseResource( + {'type': 'Sample', + 'identifiers': [{ + 'target': 'Name', 'source': 'requestParameter', + 'path': 'Foo'}]}, + self.resource_json_model) + document_model_driven_resource_method( + self.doc_structure, 'foo', self.operation_model, + event_emitter=self.event_emitter, + method_description='This describes the foo method.', + example_prefix='sample = myservice.foo', + resource_action_model=resource_action + ) + self.assert_contains_lines_in_order([ + '.. py:method:: foo(**kwargs)', + ' This describes the foo method.', + ' **Request Syntax** ', + ' ::', + ' sample = myservice.foo(', + " Foo='string',", + " Bar='string'", + ' )', + ' :type Foo: string', + ' :param Foo: Documents Foo', + ' :type Bar: string', + ' :param Bar: Documents Bar', + ' :rtype: :py:class:`myservice.Sample`', + ' :returns: Sample resource' + ]) + + def test_returns_list_of_resource(self): + resource_action = self.service_resource_model.actions[1] + document_model_driven_resource_method( + self.doc_structure, 'foo', self.operation_model, + event_emitter=self.event_emitter, + method_description='This describes the foo method.', + example_prefix='samples = myservice.foo', + resource_action_model=resource_action + ) + self.assert_contains_lines_in_order([ + '.. py:method:: foo(**kwargs)', + ' This describes the foo method.', + ' **Request Syntax** ', + ' ::', + ' samples = myservice.foo(', + " Foo='string',", + " Bar='string'", + ' )', + ' :type Foo: string', + ' :param Foo: Documents Foo', + ' :type Bar: string', + ' :param Bar: Documents Bar', + ' :rtype: list(:py:class:`myservice.Sample`)', + ' :returns: A list of Sample resource' + ]) + + def test_include_input(self): + include_params = [ + DocumentedShape( + name='Biz', type_name='string', documentation='biz docs') + ] + document_model_driven_resource_method( + self.doc_structure, 'foo', self.operation_model, + event_emitter=self.event_emitter, + method_description='This describes the foo method.', + example_prefix='response = myservice.foo', + include_input=include_params, + resource_action_model=self.service_resource_model.actions[0] + ) + self.assert_contains_lines_in_order([ + '.. py:method:: foo(**kwargs)', + ' This describes the foo method.', + ' **Request Syntax** ', + ' ::', + ' response = myservice.foo(', + " Foo='string',", + " Bar='string',", + " Biz='string'", + ' )', + ' :type Foo: string', + ' :param Foo: Documents Foo', + ' :type Bar: string', + ' :param Bar: Documents Bar', + ' :type Biz: string', + ' :param Biz: biz docs', + ' :rtype: dict', + ' :returns: ', + ' **Response Syntax** ', + ' ::', + ' {', + " 'Foo': 'string',", + " 'Bar': 'string'", + ' }', + ' **Response Structure** ', + ' - *(dict) --* ', + ' - **Foo** *(string) --* Documents Foo', + ' - **Bar** *(string) --* Documents Bar' + ]) + + def test_include_output(self): + include_params = [ + DocumentedShape( + name='Biz', type_name='string', documentation='biz docs') + ] + document_model_driven_resource_method( + self.doc_structure, 'foo', self.operation_model, + event_emitter=self.event_emitter, + method_description='This describes the foo method.', + example_prefix='response = myservice.foo', + include_output=include_params, + resource_action_model=self.service_resource_model.actions[0] + ) + self.assert_contains_lines_in_order([ + '.. py:method:: foo(**kwargs)', + ' This describes the foo method.', + ' **Request Syntax** ', + ' ::', + ' response = myservice.foo(', + " Foo='string',", + " Bar='string'", + ' )', + ' :type Foo: string', + ' :param Foo: Documents Foo', + ' :type Bar: string', + ' :param Bar: Documents Bar', + ' :rtype: dict', + ' :returns: ', + ' **Response Syntax** ', + ' ::', + ' {', + " 'Foo': 'string',", + " 'Bar': 'string',", + " 'Biz': 'string'", + ' }', + ' **Response Structure** ', + ' - *(dict) --* ', + ' - **Foo** *(string) --* Documents Foo', + ' - **Bar** *(string) --* Documents Bar', + ' - **Biz** *(string) --* biz docs' + ]) + + def test_exclude_input(self): + document_model_driven_resource_method( + self.doc_structure, 'foo', self.operation_model, + event_emitter=self.event_emitter, + method_description='This describes the foo method.', + example_prefix='response = myservice.foo', + exclude_input=['Bar'], + resource_action_model=self.service_resource_model.actions[0] + ) + self.assert_contains_lines_in_order([ + '.. py:method:: foo(**kwargs)', + ' This describes the foo method.', + ' **Request Syntax** ', + ' ::', + ' response = myservice.foo(', + " Foo='string',", + ' )', + ' :type Foo: string', + ' :param Foo: Documents Foo', + ' :rtype: dict', + ' :returns: ', + ' **Response Syntax** ', + ' ::', + ' {', + " 'Foo': 'string',", + " 'Bar': 'string'", + ' }', + ' **Response Structure** ', + ' - *(dict) --* ', + ' - **Foo** *(string) --* Documents Foo', + ' - **Bar** *(string) --* Documents Bar' + ]) + self.assert_not_contains_lines([ + ':param Bar: string', + 'Bar=\'string\'' + ]) + + def test_exclude_output(self): + document_model_driven_resource_method( + self.doc_structure, 'foo', self.operation_model, + event_emitter=self.event_emitter, + method_description='This describes the foo method.', + example_prefix='response = myservice.foo', + exclude_output=['Bar'], + resource_action_model=self.service_resource_model.actions[0] + ) + self.assert_contains_lines_in_order([ + '.. py:method:: foo(**kwargs)', + ' This describes the foo method.', + ' **Request Syntax** ', + ' ::', + ' response = myservice.foo(', + " Foo='string',", + " Bar='string'", + ' )', + ' :type Foo: string', + ' :param Foo: Documents Foo', + ' :type Bar: string', + ' :param Bar: Documents Bar', + ' :rtype: dict', + ' :returns: ', + ' **Response Syntax** ', + ' ::', + ' {', + " 'Foo': 'string'", + ' }', + ' **Response Structure** ', + ' - *(dict) --* ', + ' - **Foo** *(string) --* Documents Foo', + ]) + self.assert_not_contains_lines([ + '\'Bar\': \'string\'', + '- **Bar** *(string) --*', + ]) diff --git a/tests/unit/docs/test_resource.py b/tests/unit/docs/test_resource.py new file mode 100644 index 0000000..a956c77 --- /dev/null +++ b/tests/unit/docs/test_resource.py @@ -0,0 +1,97 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from tests.unit.docs import BaseDocsTest + +from boto3.docs.resource import ResourceDocumenter +from boto3.docs.resource import ServiceResourceDocumenter + + +class TestResourceDocumenter(BaseDocsTest): + def test_document_resource(self): + resource = self.resource.Sample('mysample') + resource_documenter = ResourceDocumenter( + resource, self.botocore_session) + resource_documenter.document_resource(self.doc_structure) + self.assert_contains_lines_in_order([ + '======', + 'Sample', + '======', + '.. py:class:: MyService.Sample(name)', + ' A resource representing an AWS MyService Sample::', + ' import boto3', + " myservice = boto3.resource('myservice')", + " sample = myservice.Sample('name')", + " These are the resource's available identifiers:", + ' * :py:attr:`name`', + " These are the resource's available attributes:", + ' * :py:attr:`bar`', + ' * :py:attr:`foo`', + " These are the resource's available actions:", + ' * :py:meth:`load()`', + ' * :py:meth:`operate()`', + ' * :py:meth:`reload()`', + " These are the resource's available waiters:", + ' * :py:meth:`wait_until_complete()`', + ' .. rst-class:: admonition-title', + ' Identifiers', + ' .. py:attribute:: name', + ' .. rst-class:: admonition-title', + ' Attributes', + ' .. py:attribute:: bar', + ' *(string)* Documents Bar', + ' .. py:attribute:: foo', + ' *(string)* Documents Foo', + ' .. rst-class:: admonition-title', + ' Actions', + ' .. py:method:: load()', + ' .. py:method:: operate(**kwargs)', + ' .. py:method:: reload()', + ' .. rst-class:: admonition-title', + ' Waiters', + ' .. py:method:: wait_until_complete(**kwargs)', + ]) + + +class TestServiceResourceDocumenter(BaseDocsTest): + def test_document_resource(self): + resource_documenter = ServiceResourceDocumenter( + self.resource, self.botocore_session) + resource_documenter.document_resource(self.doc_structure) + self.assert_contains_lines_in_order([ + '================', + 'Service Resource', + '================', + '.. py:class:: MyService.ServiceResource()', + ' A resource representing AWS MyService::', + ' import boto3', + " myservice = boto3.resource('myservice')", + " These are the resource's available actions:", + ' * :py:meth:`sample_operation()`', + " These are the resource's available sub-resources:", + ' * :py:meth:`Sample()`', + " These are the resource's available collections:", + ' * :py:attr:`samples`', + ' .. rst-class:: admonition-title', + ' Actions', + ' .. py:method:: sample_operation(**kwargs)', + ' .. rst-class:: admonition-title', + ' Sub-resources', + ' .. py:method:: Sample(name)', + ' .. rst-class:: admonition-title', + ' Collections', + ' .. py:attribute:: samples', + ' .. py:method:: all()', + ' .. py:method:: filter(**kwargs)', + ' .. py:method:: limit(**kwargs)', + ' .. py:method:: page_size(**kwargs)', + ]) diff --git a/tests/unit/docs/test_service.py b/tests/unit/docs/test_service.py new file mode 100644 index 0000000..802e8c7 --- /dev/null +++ b/tests/unit/docs/test_service.py @@ -0,0 +1,113 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +import os + +from tests.unit.docs import BaseDocsTest +from boto3.docs.service import ServiceDocumenter + + +class TestServiceDocumenter(BaseDocsTest): + def test_document_service(self): + service_documenter = ServiceDocumenter('myservice', self.session) + contents = service_documenter.document_service().decode('utf-8') + lines = [ + '*********', + 'MyService', + '*********', + '.. contents:: Table of Contents', + ' :depth: 2', + '======', + 'Client', + '======', + '.. py:class:: MyService.Client', + ' These are the available methods:', + ' * :py:meth:`sample_operation`', + '==========', + 'Paginators', + '==========', + 'The available paginators are:', + '* :py:class:`MyService.Paginator.SampleOperation`', + '.. py:class:: MyService.Paginator.SampleOperation', + ' .. py:method:: paginate(**kwargs)', + '=======', + 'Waiters', + '=======', + 'The available waiters are:', + '* :py:class:`MyService.Waiter.SampleOperationComplete`', + '.. py:class:: MyService.Waiter.SampleOperationComplete', + ' .. py:method:: wait(**kwargs)', + '================', + 'Service Resource', + '================', + '.. py:class:: MyService.ServiceResource()', + " These are the resource's available actions:", + ' * :py:meth:`sample_operation()`', + " These are the resource's available sub-resources:", + ' * :py:meth:`Sample()`', + " These are the resource's available collections:", + ' * :py:attr:`samples`', + ' .. py:method:: sample_operation(**kwargs)', + ' .. py:method:: Sample(name)', + ' .. py:attribute:: samples', + ' .. py:method:: all()', + ' .. py:method:: filter(**kwargs)', + ' .. py:method:: limit(**kwargs)', + ' .. py:method:: page_size(**kwargs)', + '======', + 'Sample', + '======', + '.. py:class:: MyService.Sample(name)', + " These are the resource's available identifiers:", + ' * :py:attr:`name`', + " These are the resource's available attributes:", + ' * :py:attr:`bar`', + ' * :py:attr:`foo`', + " These are the resource's available actions:", + ' * :py:meth:`load()`', + ' * :py:meth:`operate()`', + ' * :py:meth:`reload()`', + " These are the resource's available waiters:", + ' * :py:meth:`wait_until_complete()`', + ' .. py:attribute:: name', + ' .. py:attribute:: bar', + ' .. py:attribute:: foo', + ' .. py:method:: load()', + ' .. py:method:: operate(**kwargs)', + ' .. py:method:: reload()', + ' .. py:method:: wait_until_complete(**kwargs)', + ] + self.assert_contains_lines_in_order(lines, contents) + + def test_document_service_no_resource(self): + os.remove(self.resource_model_file) + service_documenter = ServiceDocumenter('myservice', self.session) + contents = service_documenter.document_service().decode('utf-8') + self.assertNotIn('Service Resource', contents) + + def test_document_service_no_paginators(self): + # Delete the resource model so that the resource is not documented + # as it may try to look at the paginator model during documentation. + os.remove(self.resource_model_file) + os.remove(self.paginator_model_file) + service_documenter = ServiceDocumenter('myservice', self.session) + contents = service_documenter.document_service().decode('utf-8') + self.assertNotIn('Paginators', contents) + + def test_document_service_no_waiter(self): + # Delete the resource model so that the resource is not documented + # as it may try to look at the waiter model during documentation. + os.remove(self.resource_model_file) + os.remove(self.waiter_model_file) + service_documenter = ServiceDocumenter('myservice', self.session) + contents = service_documenter.document_service().decode('utf-8') + self.assertNotIn('Waiters', contents) diff --git a/tests/unit/docs/test_subresource.py b/tests/unit/docs/test_subresource.py new file mode 100644 index 0000000..0b4211d --- /dev/null +++ b/tests/unit/docs/test_subresource.py @@ -0,0 +1,30 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from tests.unit.docs import BaseDocsTest + +from boto3.docs.subresource import SubResourceDocumenter + + +class TestSubResourceDocumenter(BaseDocsTest): + def test_document_sub_resources(self): + sub_resource_documentor = SubResourceDocumenter(self.resource) + sub_resource_documentor.document_sub_resources(self.doc_structure) + self.assert_contains_lines_in_order([ + '.. py:method:: Sample(name)', + ' Creates a Sample resource.::', + " sample = myservice.Sample('name')", + ' :type name: string', + " :param name: The Sample's name identifier.", + ' :rtype: :py:class:`MyService.Sample`', + ' :returns: A Sample resource', + ]) diff --git a/tests/unit/docs/test_utils.py b/tests/unit/docs/test_utils.py new file mode 100644 index 0000000..24498b4 --- /dev/null +++ b/tests/unit/docs/test_utils.py @@ -0,0 +1,42 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from tests import unittest + +from boto3.resources.model import Parameter +from boto3.docs.utils import get_resource_ignore_params + + +class TestGetResourceIgnoreParams(unittest.TestCase): + def test_target_is_single_resource(self): + param = Parameter('InstanceId', 'response') + ignore_params = get_resource_ignore_params([param]) + self.assertEqual(ignore_params, ['InstanceId']) + + def test_target_is_multiple_resources(self): + param = Parameter('InstanceIds[]', 'response') + ignore_params = get_resource_ignore_params([param]) + self.assertEqual(ignore_params, ['InstanceIds']) + + def test_target_is_element_of_multiple_resources(self): + param = Parameter('InstanceIds[0]', 'response') + ignore_params = get_resource_ignore_params([param]) + self.assertEqual(ignore_params, ['InstanceIds']) + + def test_target_is_nested_param(self): + param = Parameter('Filters[0].Name', 'response') + ignore_params = get_resource_ignore_params([param]) + self.assertEqual(ignore_params, ['Filters']) + + param = Parameter('Filters[0].Values[0]', 'response') + ignore_params = get_resource_ignore_params([param]) + self.assertEqual(ignore_params, ['Filters']) diff --git a/tests/unit/docs/test_waiter.py b/tests/unit/docs/test_waiter.py new file mode 100644 index 0000000..4885ba8 --- /dev/null +++ b/tests/unit/docs/test_waiter.py @@ -0,0 +1,41 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from tests.unit.docs import BaseDocsTest + +from boto3.docs.waiter import WaiterResourceDocumenter + + +class TestWaiterResourceDocumenter(BaseDocsTest): + def test_document_resource_waiters(self): + service_waiter_model = self.botocore_session.get_waiter_model( + 'myservice') + subresource = self.resource.Sample('mysample') + waiter_documenter = WaiterResourceDocumenter( + subresource, service_waiter_model) + waiter_documenter.document_resource_waiters(self.doc_structure) + self.assert_contains_lines_in_order([ + '.. py:method:: wait_until_complete(**kwargs)', + (' Waits until this Sample is complete. This method calls ' + ':py:meth:`MyService.Waiter.sample_operation_complete.wait` ' + 'which polls. :py:meth:`MyService.Client.sample_operation` ' + 'every 15 seconds until a successful state is reached. An ' + 'error is returned after 40 failed checks.'), + ' **Request Syntax** ', + ' ::', + ' sample.wait_until_complete(', + " Bar='string'", + ' )', + ' :type Bar: string', + ' :param Bar: Documents Bar', + ' :returns: None' + ]) diff --git a/tests/unit/dynamodb/__init__.py b/tests/unit/dynamodb/__init__.py new file mode 100644 index 0000000..c89416d --- /dev/null +++ b/tests/unit/dynamodb/__init__.py @@ -0,0 +1,12 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. diff --git a/tests/unit/dynamodb/test_conditions.py b/tests/unit/dynamodb/test_conditions.py new file mode 100644 index 0000000..b5cbae3 --- /dev/null +++ b/tests/unit/dynamodb/test_conditions.py @@ -0,0 +1,473 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from tests import unittest + +from boto3.exceptions import DynamoDBOperationNotSupportedError +from boto3.exceptions import DynamoDBNeedsConditionError +from boto3.exceptions import DynamoDBNeedsKeyConditionError +from boto3.dynamodb.conditions import Attr, Key +from boto3.dynamodb.conditions import And, Or, Not, Equals, LessThan +from boto3.dynamodb.conditions import LessThanEquals, GreaterThan +from boto3.dynamodb.conditions import GreaterThanEquals, BeginsWith, Between +from boto3.dynamodb.conditions import NotEquals, In, AttributeExists +from boto3.dynamodb.conditions import AttributeNotExists, Contains, Size +from boto3.dynamodb.conditions import AttributeType +from boto3.dynamodb.conditions import ConditionExpressionBuilder + + +class TestK(unittest.TestCase): + def setUp(self): + self.attr = Key('mykey') + self.attr2 = Key('myotherkey') + self.value = 'foo' + self.value2 = 'foo2' + + def test_and(self): + with self.assertRaisesRegexp( + DynamoDBOperationNotSupportedError, 'AND'): + self.attr & self.attr2 + + def test_or(self): + with self.assertRaisesRegexp( + DynamoDBOperationNotSupportedError, 'OR'): + self.attr | self.attr2 + + def test_not(self): + with self.assertRaisesRegexp( + DynamoDBOperationNotSupportedError, 'NOT'): + ~self.attr + + def test_eq(self): + self.assertEqual( + self.attr.eq(self.value), Equals(self.attr, self.value)) + + def test_lt(self): + self.assertEqual( + self.attr.lt(self.value), LessThan(self.attr, self.value)) + + def test_lte(self): + self.assertEqual( + self.attr.lte(self.value), LessThanEquals(self.attr, self.value)) + + def test_gt(self): + self.assertEqual( + self.attr.gt(self.value), GreaterThan(self.attr, self.value)) + + def test_gte(self): + self.assertEqual( + self.attr.gte(self.value), + GreaterThanEquals(self.attr, self.value)) + + def test_begins_with(self): + self.assertEqual(self.attr.begins_with(self.value), + BeginsWith(self.attr, self.value)) + + def test_between(self): + self.assertEqual(self.attr.between(self.value, self.value2), + Between(self.attr, self.value, self.value2)) + + +class TestA(TestK): + def setUp(self): + self.attr = Attr('mykey') + self.attr2 = Attr('myotherkey') + self.value = 'foo' + self.value2 = 'foo2' + + def test_ne(self): + self.assertEqual(self.attr.ne(self.value), + NotEquals(self.attr, self.value)) + + def test_is_in(self): + self.assertEqual(self.attr.is_in([self.value]), + In(self.attr, [self.value])) + + def test_exists(self): + self.assertEqual(self.attr.exists(), AttributeExists(self.attr)) + + def test_not_exists(self): + self.assertEqual(self.attr.not_exists(), AttributeNotExists(self.attr)) + + def test_contains(self): + self.assertEqual(self.attr.contains(self.value), + Contains(self.attr, self.value)) + + def test_size(self): + self.assertEqual(self.attr.size(), Size(self.attr)) + + def test_attribute_type(self): + self.assertEqual(self.attr.attribute_type(self.value), + AttributeType(self.attr, self.value)) + + +class TestConditions(unittest.TestCase): + def setUp(self): + self.value = Attr('mykey') + self.value2 = 'foo' + + def build_and_assert_expression(self, condition, + reference_expression_dict): + expression_dict = condition.get_expression() + self.assertDictEqual(expression_dict, reference_expression_dict) + + def test_equal_operator(self): + cond1 = Equals(self.value, self.value2) + cond2 = Equals(self.value, self.value2) + self.assertTrue(cond1 == cond2) + + def test_equal_operator_type(self): + cond1 = Equals(self.value, self.value2) + cond2 = NotEquals(self.value, self.value2) + self.assertFalse(cond1 == cond2) + + def test_equal_operator_value(self): + cond1 = Equals(self.value, self.value2) + cond2 = Equals(self.value, self.value) + self.assertFalse(cond1 == cond2) + + def test_not_equal_operator(self): + cond1 = Equals(self.value, self.value2) + cond2 = NotEquals(self.value, self.value) + self.assertTrue(cond1 != cond2) + + def test_and_operator(self): + cond1 = Equals(self.value, self.value2) + cond2 = Equals(self.value, self.value2) + self.assertEqual(cond1 & cond2, And(cond1, cond2)) + + def test_and_operator_throws_excepetion(self): + cond1 = Equals(self.value, self.value2) + with self.assertRaisesRegexp( + DynamoDBOperationNotSupportedError, 'AND'): + cond1 & self.value2 + + def test_or_operator(self): + cond1 = Equals(self.value, self.value2) + cond2 = Equals(self.value, self.value2) + self.assertEqual(cond1 | cond2, Or(cond1, cond2)) + + def test_or_operator_throws_excepetion(self): + cond1 = Equals(self.value, self.value2) + with self.assertRaisesRegexp( + DynamoDBOperationNotSupportedError, 'OR'): + cond1 | self.value2 + + def test_not_operator(self): + cond1 = Equals(self.value, self.value2) + self.assertEqual(~cond1, Not(cond1)) + + def test_eq(self): + self.build_and_assert_expression( + Equals(self.value, self.value2), + {'format': '{0} {operator} {1}', + 'operator': '=', 'values': (self.value, self.value2)}) + + def test_ne(self): + self.build_and_assert_expression( + NotEquals(self.value, self.value2), + {'format': '{0} {operator} {1}', + 'operator': '<>', 'values': (self.value, self.value2)}) + + def test_lt(self): + self.build_and_assert_expression( + LessThan(self.value, self.value2), + {'format': '{0} {operator} {1}', + 'operator': '<', 'values': (self.value, self.value2)}) + + def test_lte(self): + self.build_and_assert_expression( + LessThanEquals(self.value, self.value2), + {'format': '{0} {operator} {1}', + 'operator': '<=', 'values': (self.value, self.value2)}) + + def test_gt(self): + self.build_and_assert_expression( + GreaterThan(self.value, self.value2), + {'format': '{0} {operator} {1}', + 'operator': '>', 'values': (self.value, self.value2)}) + + def test_gte(self): + self.build_and_assert_expression( + GreaterThanEquals(self.value, self.value2), + {'format': '{0} {operator} {1}', + 'operator': '>=', 'values': (self.value, self.value2)}) + + def test_in(self): + cond = In(self.value, (self.value2)) + self.build_and_assert_expression( + cond, + {'format': '{0} {operator} {1}', + 'operator': 'IN', 'values': (self.value, (self.value2))}) + self.assertTrue(cond.has_grouped_values) + + def test_bet(self): + self.build_and_assert_expression( + Between(self.value, self.value2, 'foo2'), + {'format': '{0} {operator} {1} AND {2}', + 'operator': 'BETWEEN', + 'values': (self.value, self.value2, 'foo2')}) + + def test_beg(self): + self.build_and_assert_expression( + BeginsWith(self.value, self.value2), + {'format': '{operator}({0}, {1})', + 'operator': 'begins_with', 'values': (self.value, self.value2)}) + + def test_cont(self): + self.build_and_assert_expression( + Contains(self.value, self.value2), + {'format': '{operator}({0}, {1})', + 'operator': 'contains', 'values': (self.value, self.value2)}) + + def test_ae(self): + self.build_and_assert_expression( + AttributeExists(self.value), + {'format': '{operator}({0})', + 'operator': 'attribute_exists', 'values': (self.value,)}) + + def test_ane(self): + self.build_and_assert_expression( + AttributeNotExists(self.value), + {'format': '{operator}({0})', + 'operator': 'attribute_not_exists', 'values': (self.value,)}) + + def test_size(self): + self.build_and_assert_expression( + Size(self.value), + {'format': '{operator}({0})', + 'operator': 'size', 'values': (self.value,)}) + + def test_size_can_use_attr_methods(self): + size = Size(self.value) + self.build_and_assert_expression( + size.eq(self.value), + {'format': '{0} {operator} {1}', + 'operator': '=', 'values': (size, self.value)}) + + def test_size_can_use_and(self): + size = Size(self.value) + ae = AttributeExists(self.value) + self.build_and_assert_expression( + size & ae, + {'format': '({0} {operator} {1})', + 'operator': 'AND', 'values': (size, ae)}) + + def test_attribute_type(self): + self.build_and_assert_expression( + AttributeType(self.value, self.value2), + {'format': '{operator}({0}, {1})', + 'operator': 'attribute_type', + 'values': (self.value, self.value2)}) + + def test_and(self): + cond1 = Equals(self.value, self.value2) + cond2 = Equals(self.value, self.value2) + and_cond = And(cond1, cond2) + self.build_and_assert_expression( + and_cond, + {'format': '({0} {operator} {1})', + 'operator': 'AND', 'values': (cond1, cond2)}) + + def test_or(self): + cond1 = Equals(self.value, self.value2) + cond2 = Equals(self.value, self.value2) + or_cond = Or(cond1, cond2) + self.build_and_assert_expression( + or_cond, + {'format': '({0} {operator} {1})', + 'operator': 'OR', 'values': (cond1, cond2)}) + + def test_not(self): + cond = Equals(self.value, self.value2) + not_cond = Not(cond) + self.build_and_assert_expression( + not_cond, + {'format': '({operator} {0})', + 'operator': 'NOT', 'values': (cond,)}) + + +class TestConditionExpressionBuilder(unittest.TestCase): + def setUp(self): + self.builder = ConditionExpressionBuilder() + + def assert_condition_expression_build( + self, condition, ref_string, ref_names, ref_values, + is_key_condition=False): + exp_string, names, values = self.builder.build_expression( + condition, is_key_condition=is_key_condition) + self.assertEqual(exp_string, ref_string) + self.assertEqual(names, ref_names) + self.assertEqual(values, ref_values) + + def test_bad_input(self): + a = Attr('myattr') + with self.assertRaises(DynamoDBNeedsConditionError): + self.builder.build_expression(a) + + def test_build_expression_eq(self): + a = Attr('myattr') + self.assert_condition_expression_build( + a.eq('foo'), '#n0 = :v0', {'#n0': 'myattr'}, {':v0': 'foo'}) + + def test_reset(self): + a = Attr('myattr') + self.assert_condition_expression_build( + a.eq('foo'), '#n0 = :v0', {'#n0': 'myattr'}, {':v0': 'foo'}) + + self.assert_condition_expression_build( + a.eq('foo'), '#n1 = :v1', {'#n1': 'myattr'}, {':v1': 'foo'}) + + self.builder.reset() + self.assert_condition_expression_build( + a.eq('foo'), '#n0 = :v0', {'#n0': 'myattr'}, {':v0': 'foo'}) + + def test_build_expression_lt(self): + a = Attr('myattr') + self.assert_condition_expression_build( + a.lt('foo'), '#n0 < :v0', {'#n0': 'myattr'}, {':v0': 'foo'}) + + def test_build_expression_lte(self): + a1 = Attr('myattr') + self.assert_condition_expression_build( + a1.lte('foo'), '#n0 <= :v0', {'#n0': 'myattr'}, {':v0': 'foo'}) + + def test_build_expression_gt(self): + a = Attr('myattr') + self.assert_condition_expression_build( + a.gt('foo'), '#n0 > :v0', {'#n0': 'myattr'}, {':v0': 'foo'}) + + def test_build_expression_gte(self): + a = Attr('myattr') + self.assert_condition_expression_build( + a.gte('foo'), '#n0 >= :v0', {'#n0': 'myattr'}, {':v0': 'foo'}) + + def test_build_expression_begins_with(self): + a = Attr('myattr') + self.assert_condition_expression_build( + a.begins_with('foo'), 'begins_with(#n0, :v0)', + {'#n0': 'myattr'}, {':v0': 'foo'}) + + def test_build_expression_between(self): + a = Attr('myattr') + self.assert_condition_expression_build( + a.between('foo', 'foo2'), '#n0 BETWEEN :v0 AND :v1', + {'#n0': 'myattr'}, {':v0': 'foo', ':v1': 'foo2'}) + + def test_build_expression_ne(self): + a = Attr('myattr') + self.assert_condition_expression_build( + a.ne('foo'), '#n0 <> :v0', {'#n0': 'myattr'}, {':v0': 'foo'}) + + def test_build_expression_in(self): + a = Attr('myattr') + self.assert_condition_expression_build( + a.is_in([1, 2, 3]), '#n0 IN (:v0, :v1, :v2)', + {'#n0': 'myattr'}, {':v0': 1, ':v1': 2, ':v2': 3}) + + def test_build_expression_exists(self): + a = Attr('myattr') + self.assert_condition_expression_build( + a.exists(), 'attribute_exists(#n0)', {'#n0': 'myattr'}, {}) + + def test_build_expression_not_exists(self): + a = Attr('myattr') + self.assert_condition_expression_build( + a.not_exists(), 'attribute_not_exists(#n0)', {'#n0': 'myattr'}, {}) + + def test_build_contains(self): + a = Attr('myattr') + self.assert_condition_expression_build( + a.contains('foo'), 'contains(#n0, :v0)', + {'#n0': 'myattr'}, {':v0': 'foo'}) + + def test_build_size(self): + a = Attr('myattr') + self.assert_condition_expression_build( + a.size(), 'size(#n0)', {'#n0': 'myattr'}, {}) + + def test_build_size_with_other_conditons(self): + a = Attr('myattr') + self.assert_condition_expression_build( + a.size().eq(5), 'size(#n0) = :v0', {'#n0': 'myattr'}, {':v0': 5}) + + def test_build_attribute_type(self): + a = Attr('myattr') + self.assert_condition_expression_build( + a.attribute_type('foo'), 'attribute_type(#n0, :v0)', + {'#n0': 'myattr'}, {':v0': 'foo'}) + + def test_build_and(self): + a = Attr('myattr') + a2 = Attr('myattr2') + self.assert_condition_expression_build( + a.eq('foo') & a2.eq('bar'), '(#n0 = :v0 AND #n1 = :v1)', + {'#n0': 'myattr', '#n1': 'myattr2'}, {':v0': 'foo', ':v1': 'bar'}) + + def test_build_or(self): + a = Attr('myattr') + a2 = Attr('myattr2') + self.assert_condition_expression_build( + a.eq('foo') | a2.eq('bar'), '(#n0 = :v0 OR #n1 = :v1)', + {'#n0': 'myattr', '#n1': 'myattr2'}, {':v0': 'foo', ':v1': 'bar'}) + + def test_build_not(self): + a = Attr('myattr') + self.assert_condition_expression_build( + ~a.eq('foo'), '(NOT #n0 = :v0)', + {'#n0': 'myattr'}, {':v0': 'foo'}) + + def test_build_attribute_with_attr_value(self): + a = Attr('myattr') + value = Attr('myreference') + self.assert_condition_expression_build( + a.eq(value), '#n0 = #n1', + {'#n0': 'myattr', '#n1': 'myreference'}, {}) + + def test_build_with_is_key_condition(self): + k = Key('myattr') + self.assert_condition_expression_build( + k.eq('foo'), '#n0 = :v0', + {'#n0': 'myattr'}, {':v0': 'foo'}, is_key_condition=True) + + def test_build_with_is_key_condition_throws_error(self): + a = Attr('myattr') + with self.assertRaises(DynamoDBNeedsKeyConditionError): + self.builder.build_expression(a.eq('foo'), is_key_condition=True) + + def test_build_attr_map(self): + a = Attr('MyMap.MyKey') + self.assert_condition_expression_build( + a.eq('foo'), '#n0.#n1 = :v0', {'#n0': 'MyMap', '#n1': 'MyKey'}, + {':v0': 'foo'}) + + def test_build_attr_list(self): + a = Attr('MyList[0]') + self.assert_condition_expression_build( + a.eq('foo'), '#n0[0] = :v0', {'#n0': 'MyList'}, {':v0': 'foo'}) + + def test_build_nested_attr_map_list(self): + a = Attr('MyMap.MyList[2].MyElement') + self.assert_condition_expression_build( + a.eq('foo'), '#n0.#n1[2].#n2 = :v0', + {'#n0': 'MyMap', '#n1': 'MyList', '#n2': 'MyElement'}, + {':v0': 'foo'}) + + def test_build_double_nested_and_or(self): + a = Attr('myattr') + a2 = Attr('myattr2') + self.assert_condition_expression_build( + (a.eq('foo') & a2.eq('foo2')) | (a.eq('bar') & a2.eq('bar2')), + '((#n0 = :v0 AND #n1 = :v1) OR (#n2 = :v2 AND #n3 = :v3))', + {'#n0': 'myattr', '#n1': 'myattr2', '#n2': 'myattr', + '#n3': 'myattr2'}, + {':v0': 'foo', ':v1': 'foo2', ':v2': 'bar', ':v3': 'bar2'}) diff --git a/tests/unit/dynamodb/test_table.py b/tests/unit/dynamodb/test_table.py new file mode 100644 index 0000000..fe9ad8e --- /dev/null +++ b/tests/unit/dynamodb/test_table.py @@ -0,0 +1,226 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from tests import unittest, mock + +from boto3.dynamodb.table import BatchWriter + + +class BaseTransformationTest(unittest.TestCase): + + maxDiff = None + + def setUp(self): + self.client = mock.Mock() + self.client.batch_write_item.return_value = {'UnprocessedItems': {}} + self.table_name = 'tablename' + self.flush_amount = 2 + self.batch_writer = BatchWriter(self.table_name, self.client, + self.flush_amount) + + def assert_batch_write_calls_are(self, expected_batch_writes): + self.assertEqual(self.client.batch_write_item.call_count, + len(expected_batch_writes)) + batch_write_calls = [ + args[1] for args in + self.client.batch_write_item.call_args_list + ] + self.assertEqual(batch_write_calls, expected_batch_writes) + + def test_batch_write_does_not_immediately_write(self): + self.batch_writer.put_item(Item={'Hash': 'foo'}) + self.assertFalse(self.client.batch_write_item.called) + + def test_batch_write_flushes_at_flush_amount(self): + self.batch_writer.put_item(Item={'Hash': 'foo1'}) + self.batch_writer.put_item(Item={'Hash': 'foo2'}) + expected = { + 'RequestItems': { + self.table_name: [ + {'PutRequest': {'Item': {'Hash': 'foo1'}}}, + {'PutRequest': {'Item': {'Hash': 'foo2'}}}, + ] + } + } + self.assert_batch_write_calls_are([expected]) + + def test_multiple_flushes_reset_items_to_put(self): + self.batch_writer.put_item(Item={'Hash': 'foo1'}) + self.batch_writer.put_item(Item={'Hash': 'foo2'}) + self.batch_writer.put_item(Item={'Hash': 'foo3'}) + self.batch_writer.put_item(Item={'Hash': 'foo4'}) + # We should have two batch calls, one for foo1,foo2 and + # one for foo3,foo4. + first_batch = { + 'RequestItems': { + self.table_name: [ + {'PutRequest': {'Item': {'Hash': 'foo1'}}}, + {'PutRequest': {'Item': {'Hash': 'foo2'}}}, + ] + } + } + second_batch = { + 'RequestItems': { + self.table_name: [ + {'PutRequest': {'Item': {'Hash': 'foo3'}}}, + {'PutRequest': {'Item': {'Hash': 'foo4'}}}, + ] + } + } + self.assert_batch_write_calls_are([first_batch, second_batch]) + + def test_can_handle_puts_and_deletes(self): + self.batch_writer.put_item(Item={'Hash': 'foo1'}) + self.batch_writer.delete_item(Key={'Hash': 'foo2'}) + expected = { + 'RequestItems': { + self.table_name: [ + {'PutRequest': {'Item': {'Hash': 'foo1'}}}, + {'DeleteRequest': {'Key': {'Hash': 'foo2'}}}, + ] + } + } + self.assert_batch_write_calls_are([expected]) + + def test_multiple_batch_calls_with_mixed_deletes(self): + self.batch_writer.put_item(Item={'Hash': 'foo1'}) + self.batch_writer.delete_item(Key={'Hash': 'foo2'}) + self.batch_writer.delete_item(Key={'Hash': 'foo3'}) + self.batch_writer.put_item(Item={'Hash': 'foo4'}) + first_batch = { + 'RequestItems': { + self.table_name: [ + {'PutRequest': {'Item': {'Hash': 'foo1'}}}, + {'DeleteRequest': {'Key': {'Hash': 'foo2'}}}, + ] + } + } + second_batch = { + 'RequestItems': { + self.table_name: [ + {'DeleteRequest': {'Key': {'Hash': 'foo3'}}}, + {'PutRequest': {'Item': {'Hash': 'foo4'}}}, + ] + } + } + self.assert_batch_write_calls_are([first_batch, second_batch]) + + def test_unprocessed_items_added_to_next_batch(self): + self.client.batch_write_item.side_effect = [ + { + 'UnprocessedItems': { + self.table_name: [ + {'PutRequest': {'Item': {'Hash': 'foo2'}}} + ], + }, + }, + # Then the last response shows that everything went through + {'UnprocessedItems': {}} + ] + self.batch_writer.put_item(Item={'Hash': 'foo1'}) + self.batch_writer.put_item(Item={'Hash': 'foo2'}) + self.batch_writer.put_item(Item={'Hash': 'foo3'}) + + # We should have sent two batch requests consisting of 2 + # 2 requests. foo1,foo2 and foo2,foo3. + # foo2 is sent twice because the first response has it listed + # as an unprocessed item which means it needs to be part + # of the next batch. + first_batch = { + 'RequestItems': { + self.table_name: [ + {'PutRequest': {'Item': {'Hash': 'foo1'}}}, + {'PutRequest': {'Item': {'Hash': 'foo2'}}}, + ] + } + } + second_batch = { + 'RequestItems': { + self.table_name: [ + {'PutRequest': {'Item': {'Hash': 'foo2'}}}, + {'PutRequest': {'Item': {'Hash': 'foo3'}}}, + ] + } + } + self.assert_batch_write_calls_are([first_batch, second_batch]) + + def test_all_items_flushed_on_exit(self): + with self.batch_writer as b: + b.put_item(Item={'Hash': 'foo1'}) + self.assert_batch_write_calls_are([ + { + 'RequestItems': { + self.table_name: [ + {'PutRequest': {'Item': {'Hash': 'foo1'}}}, + ] + }, + }, + ]) + + def test_repeated_flushing_on_exit(self): + # We're going to simulate unprocessed_items + # returning multiple unprocessed items across calls. + self.client.batch_write_item.side_effect = [ + { + 'UnprocessedItems': { + self.table_name: [ + {'PutRequest': {'Item': {'Hash': 'foo2'}}}, + {'PutRequest': {'Item': {'Hash': 'foo3'}}}, + ], + }, + }, + { + 'UnprocessedItems': { + self.table_name: [ + {'PutRequest': {'Item': {'Hash': 'foo3'}}}, + ], + }, + }, + { + 'UnprocessedItems': {} + }, + ] + with BatchWriter(self.table_name, self.client, flush_amount=4) as b: + b.put_item(Item={'Hash': 'foo1'}) + b.put_item(Item={'Hash': 'foo2'}) + b.put_item(Item={'Hash': 'foo3'}) + # So when we exit, we expect three calls. + # First we try the normal batch write with 3 items: + first_batch = { + 'RequestItems': { + self.table_name: [ + {'PutRequest': {'Item': {'Hash': 'foo1'}}}, + {'PutRequest': {'Item': {'Hash': 'foo2'}}}, + {'PutRequest': {'Item': {'Hash': 'foo3'}}}, + ] + } + } + # Then we see two unprocessed items so we send another batch. + second_batch = { + 'RequestItems': { + self.table_name: [ + {'PutRequest': {'Item': {'Hash': 'foo2'}}}, + {'PutRequest': {'Item': {'Hash': 'foo3'}}}, + ] + } + } + # And then we still see one more unprocessed item so + # we need to send another batch. + third_batch = { + 'RequestItems': { + self.table_name: [ + {'PutRequest': {'Item': {'Hash': 'foo3'}}}, + ] + } + } + self.assert_batch_write_calls_are([first_batch, second_batch, + third_batch]) diff --git a/tests/unit/dynamodb/test_transform.py b/tests/unit/dynamodb/test_transform.py new file mode 100644 index 0000000..0f689fa --- /dev/null +++ b/tests/unit/dynamodb/test_transform.py @@ -0,0 +1,620 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from tests import unittest, mock + +from botocore.model import ServiceModel, OperationModel + +from boto3.resources.base import ResourceMeta, ServiceResource +from boto3.dynamodb.transform import ParameterTransformer +from boto3.dynamodb.transform import TransformationInjector +from boto3.dynamodb.transform import DynamoDBHighLevelResource +from boto3.dynamodb.transform import register_high_level_interface +from boto3.dynamodb.transform import copy_dynamodb_params +from boto3.dynamodb.conditions import Attr, Key + + +class BaseTransformationTest(unittest.TestCase): + def setUp(self): + self.target_shape = 'MyShape' + self.original_value = 'orginal' + self.transformed_value = 'transformed' + self.transformer = ParameterTransformer() + self.json_model = {} + self.nested_json_model = {} + self.setup_models() + self.build_models() + + def setup_models(self): + self.json_model = { + 'operations': { + 'SampleOperation': { + 'name': 'SampleOperation', + 'input': {'shape': 'SampleOperationInputOutput'}, + 'output': {'shape': 'SampleOperationInputOutput'} + } + }, + 'shapes': { + 'SampleOperationInputOutput': { + 'type': 'structure', + 'members': {} + }, + 'String': { + 'type': 'string' + } + } + } + + def build_models(self): + self.service_model = ServiceModel(self.json_model) + self.operation_model = OperationModel( + self.json_model['operations']['SampleOperation'], + self.service_model + ) + + def add_input_shape(self, shape): + self.add_shape(shape) + params_shape = self.json_model['shapes']['SampleOperationInputOutput'] + shape_name = list(shape.keys())[0] + params_shape['members'][shape_name] = {'shape': shape_name} + + def add_shape(self, shape): + shape_name = list(shape.keys())[0] + self.json_model['shapes'][shape_name] = shape[shape_name] + + +class TestInputOutputTransformer(BaseTransformationTest): + def setUp(self): + super(TestInputOutputTransformer, self).setUp() + self.transformation = lambda params: self.transformed_value + self.add_shape({self.target_shape: {'type': 'string'}}) + + def test_transform_structure(self): + input_params = { + 'Structure': { + 'TransformMe': self.original_value, + 'LeaveAlone': self.original_value, + } + } + input_shape = { + 'Structure': { + 'type': 'structure', + 'members': { + 'TransformMe': {'shape': self.target_shape}, + 'LeaveAlone': {'shape': 'String'} + } + } + } + + self.add_input_shape(input_shape) + self.transformer.transform( + params=input_params, model=self.operation_model.input_shape, + transformation=self.transformation, + target_shape=self.target_shape) + self.assertEqual( + input_params, + {'Structure': { + 'TransformMe': self.transformed_value, + 'LeaveAlone': self.original_value}} + ) + + def test_transform_map(self): + input_params = { + 'TransformMe': {'foo': self.original_value}, + 'LeaveAlone': {'foo': self.original_value} + } + + targeted_input_shape = { + 'TransformMe': { + 'type': 'map', + 'key': {'shape': 'String'}, + 'value': {'shape': self.target_shape} + } + } + + untargeted_input_shape = { + 'LeaveAlone': { + 'type': 'map', + 'key': {'shape': 'String'}, + 'value': {'shape': 'String'} + } + } + + self.add_input_shape(targeted_input_shape) + self.add_input_shape(untargeted_input_shape) + + self.transformer.transform( + params=input_params, model=self.operation_model.input_shape, + transformation=self.transformation, + target_shape=self.target_shape) + self.assertEqual( + input_params, + {'TransformMe': {'foo': self.transformed_value}, + 'LeaveAlone': {'foo': self.original_value}} + ) + + def test_transform_list(self): + input_params = { + 'TransformMe': [ + self.original_value, self.original_value + ], + 'LeaveAlone': [ + self.original_value, self.original_value + ] + } + + targeted_input_shape = { + 'TransformMe': { + 'type': 'list', + 'member': {'shape': self.target_shape} + } + } + + untargeted_input_shape = { + 'LeaveAlone': { + 'type': 'list', + 'member': {'shape': 'String'} + } + } + + self.add_input_shape(targeted_input_shape) + self.add_input_shape(untargeted_input_shape) + + self.transformer.transform( + params=input_params, model=self.operation_model.input_shape, + transformation=self.transformation, target_shape=self.target_shape) + self.assertEqual( + input_params, + {'TransformMe': [self.transformed_value, self.transformed_value], + 'LeaveAlone': [self.original_value, self.original_value]} + ) + + def test_transform_nested_structure(self): + input_params = { + 'WrapperStructure': { + 'Structure': { + 'TransformMe': self.original_value, + 'LeaveAlone': self.original_value + } + } + } + + structure_shape = { + 'Structure': { + 'type': 'structure', + 'members': { + 'TransformMe': {'shape': self.target_shape}, + 'LeaveAlone': {'shape': 'String'} + } + } + } + + input_shape = { + 'WrapperStructure': { + 'type': 'structure', + 'members': {'Structure': {'shape': 'Structure'}}} + } + self.add_shape(structure_shape) + self.add_input_shape(input_shape) + + self.transformer.transform( + params=input_params, model=self.operation_model.input_shape, + transformation=self.transformation, + target_shape=self.target_shape) + self.assertEqual( + input_params, + {'WrapperStructure': { + 'Structure': {'TransformMe': self.transformed_value, + 'LeaveAlone': self.original_value}}} + ) + + def test_transform_nested_map(self): + input_params = { + 'TargetedWrapperMap': { + 'foo': { + 'bar': self.original_value + } + }, + 'UntargetedWrapperMap': { + 'foo': { + 'bar': self.original_value + } + } + + } + + targeted_map_shape = { + 'TransformMeMap': { + 'type': 'map', + 'key': {'shape': 'String'}, + 'value': {'shape': self.target_shape} + } + } + + targeted_wrapper_shape = { + 'TargetedWrapperMap': { + 'type': 'map', + 'key': {'shape': 'Name'}, + 'value': {'shape': 'TransformMeMap'}} + } + + self.add_shape(targeted_map_shape) + self.add_input_shape(targeted_wrapper_shape) + + untargeted_map_shape = { + 'LeaveAloneMap': { + 'type': 'map', + 'key': {'shape': 'String'}, + 'value': {'shape': 'String'} + } + } + + untargeted_wrapper_shape = { + 'UntargetedWrapperMap': { + 'type': 'map', + 'key': {'shape': 'Name'}, + 'value': {'shape': 'LeaveAloneMap'}} + } + + self.add_shape(untargeted_map_shape) + self.add_input_shape(untargeted_wrapper_shape) + + self.transformer.transform( + params=input_params, model=self.operation_model.input_shape, + transformation=self.transformation, target_shape=self.target_shape) + self.assertEqual( + input_params, + {'TargetedWrapperMap': {'foo': {'bar': self.transformed_value}}, + 'UntargetedWrapperMap': {'foo': {'bar': self.original_value}}} + ) + + def test_transform_nested_list(self): + input_params = { + 'TargetedWrapperList': [ + [self.original_value, self.original_value] + ], + 'UntargetedWrapperList': [ + [self.original_value, self.original_value] + ] + } + + targeted_list_shape = { + 'TransformMe': { + 'type': 'list', + 'member': {'shape': self.target_shape} + } + } + + targeted_wrapper_shape = { + 'TargetedWrapperList': { + 'type': 'list', + 'member': {'shape': 'TransformMe'}} + } + + self.add_shape(targeted_list_shape) + self.add_input_shape(targeted_wrapper_shape) + + untargeted_list_shape = { + 'LeaveAlone': { + 'type': 'list', + 'member': {'shape': 'String'} + } + } + + untargeted_wrapper_shape = { + 'UntargetedWrapperList': { + 'type': 'list', + 'member': {'shape': 'LeaveAlone'}} + } + + self.add_shape(untargeted_list_shape) + self.add_input_shape(untargeted_wrapper_shape) + + self.transformer.transform( + params=input_params, model=self.operation_model.input_shape, + transformation=self.transformation, + target_shape=self.target_shape) + self.assertEqual( + input_params, + {'TargetedWrapperList': [[ + self.transformed_value, self.transformed_value]], + 'UntargetedWrapperList': [[ + self.original_value, self.original_value]]} + ) + + def test_transform_incorrect_type_for_structure(self): + input_params = { + 'Structure': 'foo' + } + + input_shape = { + 'Structure': { + 'type': 'structure', + 'members': { + 'TransformMe': {'shape': self.target_shape}, + } + } + } + + self.add_input_shape(input_shape) + + self.transformer.transform( + params=input_params, model=self.operation_model.input_shape, + transformation=self.transformation, + target_shape=self.target_shape) + self.assertEqual(input_params, {'Structure': 'foo'}) + + def test_transform_incorrect_type_for_map(self): + input_params = { + 'Map': 'foo' + } + + input_shape = { + 'Map': { + 'type': 'map', + 'key': {'shape': 'String'}, + 'value': {'shape': self.target_shape} + } + } + + self.add_input_shape(input_shape) + + self.transformer.transform( + params=input_params, model=self.operation_model.input_shape, + transformation=self.transformation, + target_shape=self.target_shape) + self.assertEqual(input_params, {'Map': 'foo'}) + + def test_transform_incorrect_type_for_list(self): + input_params = { + 'List': 'foo' + } + + input_shape = { + 'List': { + 'type': 'list', + 'member': {'shape': self.target_shape} + } + } + + self.add_input_shape(input_shape) + + self.transformer.transform( + params=input_params, model=self.operation_model.input_shape, + transformation=self.transformation, target_shape=self.target_shape) + self.assertEqual(input_params, {'List': 'foo'}) + + +class BaseTransformAttributeValueTest(BaseTransformationTest): + def setUp(self): + self.target_shape = 'AttributeValue' + self.setup_models() + self.build_models() + self.python_value = 'mystring' + self.dynamodb_value = {'S': self.python_value} + self.injector = TransformationInjector() + self.add_shape({self.target_shape: {'type': 'string'}}) + + +class TestTransformAttributeValueInput(BaseTransformAttributeValueTest): + def test_handler(self): + input_params = { + 'Structure': { + 'TransformMe': self.python_value, + 'LeaveAlone': 'unchanged' + } + } + input_shape = { + 'Structure': { + 'type': 'structure', + 'members': { + 'TransformMe': {'shape': self.target_shape}, + 'LeaveAlone': {'shape': 'String'} + } + } + } + + self.add_input_shape(input_shape) + + self.injector.inject_attribute_value_input( + params=input_params, model=self.operation_model) + self.assertEqual( + input_params, + {'Structure': { + 'TransformMe': self.dynamodb_value, + 'LeaveAlone': 'unchanged'}} + ) + + +class TestTransformAttributeValueOutput(BaseTransformAttributeValueTest): + def test_handler(self): + parsed = { + 'Structure': { + 'TransformMe': self.dynamodb_value, + 'LeaveAlone': 'unchanged' + } + } + input_shape = { + 'Structure': { + 'type': 'structure', + 'members': { + 'TransformMe': {'shape': self.target_shape}, + 'LeaveAlone': {'shape': 'String'} + } + } + } + + self.add_input_shape(input_shape) + self.injector.inject_attribute_value_output( + parsed=parsed, model=self.operation_model) + self.assertEqual( + parsed, + {'Structure': { + 'TransformMe': self.python_value, + 'LeaveAlone': 'unchanged'}} + ) + + +class TestTransformConditionExpression(BaseTransformationTest): + def setUp(self): + super(TestTransformConditionExpression, self).setUp() + self.add_shape({'ConditionExpression': {'type': 'string'}}) + self.add_shape({'KeyExpression': {'type': 'string'}}) + + shapes = self.json_model['shapes'] + input_members = shapes['SampleOperationInputOutput']['members'] + input_members['KeyCondition'] = {'shape': 'KeyExpression'} + input_members['AttrCondition'] = {'shape': 'ConditionExpression'} + self.injector = TransformationInjector() + self.build_models() + + def test_non_condition_input(self): + params = { + 'KeyCondition': 'foo', + 'AttrCondition': 'bar' + } + self.injector.inject_condition_expressions( + params, self.operation_model) + self.assertEqual( + params, {'KeyCondition': 'foo', 'AttrCondition': 'bar'}) + + def test_single_attr_condition_expression(self): + params = { + 'AttrCondition': Attr('foo').eq('bar') + } + self.injector.inject_condition_expressions( + params, self.operation_model) + self.assertEqual( + params, + {'AttrCondition': '#n0 = :v0', + 'ExpressionAttributeNames': {'#n0': 'foo'}, + 'ExpressionAttributeValues': {':v0': 'bar'}} + ) + + def test_single_key_conditon_expression(self): + params = { + 'KeyCondition': Key('foo').eq('bar') + } + self.injector.inject_condition_expressions( + params, self.operation_model) + self.assertEqual( + params, + {'KeyCondition': '#n0 = :v0', + 'ExpressionAttributeNames': {'#n0': 'foo'}, + 'ExpressionAttributeValues': {':v0': 'bar'}} + ) + + def test_key_and_attr_conditon_expression(self): + params = { + 'KeyCondition': Key('foo').eq('bar'), + 'AttrCondition': Attr('biz').eq('baz') + } + self.injector.inject_condition_expressions( + params, self.operation_model) + self.assertEqual( + params, + {'KeyCondition': '#n1 = :v1', + 'AttrCondition': '#n0 = :v0', + 'ExpressionAttributeNames': {'#n0': 'biz', '#n1': 'foo'}, + 'ExpressionAttributeValues': {':v0': 'baz', ':v1': 'bar'}} + ) + + def test_key_and_attr_conditon_expression_with_placeholders(self): + params = { + 'KeyCondition': Key('foo').eq('bar'), + 'AttrCondition': Attr('biz').eq('baz'), + 'ExpressionAttributeNames': {'#a': 'b'}, + 'ExpressionAttributeValues': {':c': 'd'} + } + self.injector.inject_condition_expressions( + params, self.operation_model) + self.assertEqual( + params, + {'KeyCondition': '#n1 = :v1', + 'AttrCondition': '#n0 = :v0', + 'ExpressionAttributeNames': { + '#n0': 'biz', '#n1': 'foo', '#a': 'b'}, + 'ExpressionAttributeValues': { + ':v0': 'baz', ':v1': 'bar', ':c': 'd'}} + ) + + +class TestCopyDynamoDBParams(unittest.TestCase): + def test_copy_dynamodb_params(self): + params = {'foo': 'bar'} + new_params = copy_dynamodb_params(params) + self.assertEqual(params, new_params) + self.assertIsNot(new_params, params) + + +class TestDynamoDBHighLevelResource(unittest.TestCase): + def setUp(self): + self.events = mock.Mock() + self.client = mock.Mock() + self.client.meta.events = self.events + self.meta = ResourceMeta('dynamodb') + + def test_instantiation(self): + # Instantiate the class. + dynamodb_class = type( + 'dynamodb', (DynamoDBHighLevelResource, ServiceResource), + {'meta': self.meta}) + with mock.patch('boto3.dynamodb.transform.TransformationInjector') \ + as mock_injector: + with mock.patch( + 'boto3.dynamodb.transform.DocumentModifiedShape.' + 'replace_documentation_for_matching_shape') \ + as mock_modify_documentation_method: + dynamodb_class(client=self.client) + + # It should have fired the following events upon instantiation. + event_call_args = self.events.register.call_args_list + self.assertEqual( + event_call_args, + [mock.call( + 'provide-client-params.dynamodb', + copy_dynamodb_params, + unique_id='dynamodb-create-params-copy'), + mock.call( + 'before-parameter-build.dynamodb', + mock_injector.return_value.inject_condition_expressions, + unique_id='dynamodb-condition-expression'), + mock.call( + 'before-parameter-build.dynamodb', + mock_injector.return_value.inject_attribute_value_input, + unique_id='dynamodb-attr-value-input'), + mock.call( + 'after-call.dynamodb', + mock_injector.return_value.inject_attribute_value_output, + unique_id='dynamodb-attr-value-output'), + mock.call( + 'docs.*.dynamodb.*.complete-section', + mock_modify_documentation_method, + unique_id='dynamodb-attr-value-docs'), + mock.call( + 'docs.*.dynamodb.*.complete-section', + mock_modify_documentation_method, + unique_id='dynamodb-key-expression-docs'), + mock.call( + 'docs.*.dynamodb.*.complete-section', + mock_modify_documentation_method, + unique_id='dynamodb-cond-expression-docs')] + ) + + +class TestRegisterHighLevelInterface(unittest.TestCase): + def test_register(self): + base_classes = [object] + register_high_level_interface(base_classes) + + # Check that the base classes are as expected + self.assertEqual(base_classes, [DynamoDBHighLevelResource, object]) diff --git a/tests/unit/dynamodb/test_types.py b/tests/unit/dynamodb/test_types.py new file mode 100644 index 0000000..f4089e8 --- /dev/null +++ b/tests/unit/dynamodb/test_types.py @@ -0,0 +1,203 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the 'license' file accompanying this file. This file is +# distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from decimal import Decimal +from tests import unittest + +from botocore.compat import six + +from boto3.dynamodb.types import Binary, TypeSerializer, TypeDeserializer + + +class TestBinary(unittest.TestCase): + def test_bytes_input(self): + data = Binary(b'\x01') + self.assertEqual(b'\x01', data) + self.assertEqual(b'\x01', data.value) + + def test_non_ascii_bytes_input(self): + # Binary data that is out of ASCII range + data = Binary(b'\x88') + self.assertEqual(b'\x88', data) + self.assertEqual(b'\x88', data.value) + + def test_bytearray_input(self): + data = Binary(bytearray([1])) + self.assertEqual(b'\x01', data) + self.assertEqual(b'\x01', data.value) + + def test_unicode_throws_error(self): + with self.assertRaises(TypeError): + Binary(u'\u00e9') + + def test_integer_throws_error(self): + with self.assertRaises(TypeError): + Binary(1) + + def test_not_equal(self): + self.assertTrue(Binary(b'\x01') != b'\x02') + + def test_str(self): + self.assertEqual(Binary(b'\x01').__str__(), b'\x01') + + def test_repr(self): + self.assertIn('Binary', repr(Binary(b'1'))) + + +class TestSerializer(unittest.TestCase): + def setUp(self): + self.serializer = TypeSerializer() + + def test_serialize_unsupported_type(self): + with self.assertRaisesRegexp(TypeError, 'Unsupported type'): + self.serializer.serialize(object()) + + def test_serialize_null(self): + self.assertEqual(self.serializer.serialize(None), {'NULL': True}) + + def test_serialize_boolean(self): + self.assertEqual(self.serializer.serialize(False), {'BOOL': False}) + + def test_serialize_integer(self): + self.assertEqual(self.serializer.serialize(1), {'N': '1'}) + + def test_serialize_decimal(self): + self.assertEqual( + self.serializer.serialize(Decimal('1.25')), {'N': '1.25'}) + + def test_serialize_float_error(self): + with self.assertRaisesRegexp( + TypeError, + 'Float types are not supported. Use Decimal types instead'): + self.serializer.serialize(1.25) + + def test_serialize_NaN_error(self): + with self.assertRaisesRegexp( + TypeError, + 'Infinity and NaN not supported'): + self.serializer.serialize(Decimal('NaN')) + + def test_serialize_string(self): + self.assertEqual(self.serializer.serialize('foo'), {'S': 'foo'}) + + def test_serialize_binary(self): + self.assertEqual(self.serializer.serialize( + Binary(b'\x01')), {'B': b'\x01'}) + + def test_serialize_bytearray(self): + self.assertEqual(self.serializer.serialize(bytearray([1])), + {'B': b'\x01'}) + + @unittest.skipIf(six.PY2, + 'This is a test when using python3 version of bytes') + def test_serialize_bytes(self): + self.assertEqual(self.serializer.serialize(b'\x01'), {'B': b'\x01'}) + + def test_serialize_number_set(self): + serialized_value = self.serializer.serialize(set([1, 2, 3])) + self.assertEqual(len(serialized_value), 1) + self.assertIn('NS', serialized_value) + self.assertCountEqual(serialized_value['NS'], ['1', '2', '3']) + + def test_serialize_string_set(self): + serialized_value = self.serializer.serialize(set(['foo', 'bar'])) + self.assertEqual(len(serialized_value), 1) + self.assertIn('SS', serialized_value) + self.assertCountEqual(serialized_value['SS'], ['foo', 'bar']) + + def test_serialize_binary_set(self): + serialized_value = self.serializer.serialize( + set([Binary(b'\x01'), Binary(b'\x02')])) + self.assertEqual(len(serialized_value), 1) + self.assertIn('BS', serialized_value) + self.assertCountEqual(serialized_value['BS'], [b'\x01', b'\x02']) + + def test_serialize_list(self): + serialized_value = self.serializer.serialize(['foo', 1, [1]]) + self.assertEqual(len(serialized_value), 1) + self.assertIn('L', serialized_value) + self.assertCountEqual( + serialized_value['L'], + [{'S': 'foo'}, {'N': '1'}, {'L': [{'N': '1'}]}] + ) + + def test_serialize_map(self): + serialized_value = self.serializer.serialize( + {'foo': 'bar', 'baz': {'biz': 1}}) + self.assertEqual( + serialized_value, + {'M': {'foo': {'S': 'bar'}, 'baz': {'M': {'biz': {'N': '1'}}}}}) + + +class TestDeserializer(unittest.TestCase): + def setUp(self): + self.deserializer = TypeDeserializer() + + def test_deserialize_invalid_type(self): + with self.assertRaisesRegexp(TypeError, 'FOO is not supported'): + self.deserializer.deserialize({'FOO': 'bar'}) + + def test_deserialize_empty_structure(self): + with self.assertRaisesRegexp(TypeError, 'Value must be a nonempty'): + self.assertEqual(self.deserializer.deserialize({}), {}) + + def test_deserialize_null(self): + self.assertEqual(self.deserializer.deserialize({"NULL": True}), None) + + def test_deserialize_boolean(self): + self.assertEqual(self.deserializer.deserialize({"BOOL": False}), False) + + def test_deserialize_integer(self): + self.assertEqual( + self.deserializer.deserialize({'N': '1'}), Decimal('1')) + + def test_deserialize_decimal(self): + self.assertEqual( + self.deserializer.deserialize({'N': '1.25'}), Decimal('1.25')) + + def test_deserialize_string(self): + self.assertEqual( + self.deserializer.deserialize({'S': 'foo'}), 'foo') + + def test_deserialize_binary(self): + self.assertEqual( + self.deserializer.deserialize({'B': b'\x00'}), Binary(b'\x00')) + + def test_deserialize_number_set(self): + self.assertEqual( + self.deserializer.deserialize( + {'NS': ['1', '1.25']}), set([Decimal('1'), Decimal('1.25')])) + + def test_deserialize_string_set(self): + self.assertEqual( + self.deserializer.deserialize( + {'SS': ['foo', 'bar']}), set(['foo', 'bar'])) + + def test_deserialize_binary_set(self): + self.assertEqual( + self.deserializer.deserialize( + {'BS': [b'\x00', b'\x01']}), + set([Binary(b'\x00'), Binary(b'\x01')])) + + def test_deserialize_list(self): + self.assertEqual( + self.deserializer.deserialize( + {'L': [{'N': '1'}, {'S': 'foo'}, {'L': [{'N': '1.25'}]}]}), + [Decimal('1'), 'foo', [Decimal('1.25')]]) + + def test_deserialize_map(self): + self.assertEqual( + self.deserializer.deserialize( + {'M': {'foo': {'S': 'mystring'}, + 'bar': {'M': {'baz': {'N': '1'}}}}}), + {'foo': 'mystring', 'bar': {'baz': Decimal('1')}} + ) diff --git a/tests/unit/ec2/__init__.py b/tests/unit/ec2/__init__.py new file mode 100644 index 0000000..2372440 --- /dev/null +++ b/tests/unit/ec2/__init__.py @@ -0,0 +1,12 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the 'license' file accompanying this file. This file is +# distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. diff --git a/tests/unit/ec2/test_createtags.py b/tests/unit/ec2/test_createtags.py new file mode 100644 index 0000000..0ba620c --- /dev/null +++ b/tests/unit/ec2/test_createtags.py @@ -0,0 +1,73 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the 'license' file accompanying this file. This file is +# distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from tests import unittest +import mock + +import boto3.session +from boto3.ec2 import createtags + + +class TestCreateTags(unittest.TestCase): + def setUp(self): + self.client = mock.Mock() + self.resource = mock.Mock() + self.resource.meta.client = self.client + self.ref_tags = [ + 'tag1', + 'tag2', + 'tag3', + 'tag4', + 'tag5', + 'tag6' + ] + self.resource.Tag.side_effect = self.ref_tags + + def test_create_tags(self): + ref_kwargs = { + 'Resources': ['foo', 'bar'], + 'Tags': [ + {'Key': 'key1', 'Value': 'value1'}, + {'Key': 'key2', 'Value': 'value2'}, + {'Key': 'key3', 'Value': 'value3'} + ] + } + + result_tags = createtags.create_tags(self.resource, **ref_kwargs) + + # Ensure the client method was called properly. + self.client.create_tags.assert_called_with(**ref_kwargs) + + # Ensure the calls to the Tag reference were correct. + self.assertEqual( + self.resource.Tag.call_args_list, + [mock.call('foo', 'key1', 'value1'), + mock.call('foo', 'key2', 'value2'), + mock.call('foo', 'key3', 'value3'), + mock.call('bar', 'key1', 'value1'), + mock.call('bar', 'key2', 'value2'), + mock.call('bar', 'key3', 'value3')]) + + # Ensure the return values are as expected. + self.assertEqual(result_tags, self.ref_tags) + + +class TestCreateTagsInjection(unittest.TestCase): + def test_create_tags_injected_to_resource(self): + session = boto3.session.Session(region_name='us-west-2') + with mock.patch('boto3.ec2.createtags.create_tags') as mock_method: + resource = session.resource('ec2') + self.assertTrue(hasattr(resource, 'create_tags'), + 'EC2 resource does not have create_tags method.') + self.assertIs(resource.create_tags, mock_method, + 'custom create_tags method was not injected onto ' + 'EC2 service resource') diff --git a/tests/unit/resources/__init__.py b/tests/unit/resources/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/resources/test_action.py b/tests/unit/resources/test_action.py new file mode 100644 index 0000000..bf04afa --- /dev/null +++ b/tests/unit/resources/test_action.py @@ -0,0 +1,276 @@ +# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the 'license' file accompanying this file. This file is +# distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + +from boto3.utils import ServiceContext +from boto3.resources.action import BatchAction, ServiceAction, WaiterAction +from boto3.resources.base import ResourceMeta +from boto3.resources.model import Action, Waiter +from tests import BaseTestCase, mock + + +class TestServiceActionCall(BaseTestCase): + def setUp(self): + super(TestServiceActionCall, self).setUp() + + self.action_def = { + 'request': { + 'operation': 'GetFrobs', + 'params': [] + } + } + + @property + def action(self): + return Action('test', self.action_def, {}) + + @mock.patch('boto3.resources.action.create_request_parameters', + return_value={}) + def test_service_action_creates_params(self, params_mock): + resource = mock.Mock() + resource.meta = ResourceMeta('test', client=mock.Mock()) + + action = ServiceAction(self.action) + + action(resource, foo=1) + + self.assertTrue(params_mock.called, + 'Parameters for operation not created') + + @mock.patch('boto3.resources.action.create_request_parameters', + return_value={'bar': 'baz'}) + def test_service_action_calls_operation(self, params_mock): + resource = mock.Mock() + resource.meta = ResourceMeta('test', client=mock.Mock()) + operation = resource.meta.client.get_frobs + operation.return_value = 'response' + + action = ServiceAction(self.action) + + response = action(resource, foo=1) + + operation.assert_called_with(foo=1, bar='baz') + self.assertEqual(response, 'response', + 'Unexpected low-level response data returned') + + @mock.patch('boto3.resources.action.create_request_parameters', + return_value={}) + @mock.patch('boto3.resources.action.RawHandler') + def test_service_action_calls_raw_handler(self, handler_mock, params_mock): + resource = mock.Mock() + resource.meta = ResourceMeta('test', client=mock.Mock()) + operation = resource.meta.client.get_frobs + operation.return_value = 'response' + + action = ServiceAction(self.action) + + handler_mock.return_value.return_value = 'response' + + action(resource) + + handler_mock.assert_called_with(None) + handler_mock.return_value.assert_called_with(resource, {}, 'response') + + @mock.patch('boto3.resources.action.create_request_parameters', + return_value={}) + @mock.patch('boto3.resources.action.ResourceHandler') + def test_service_action_calls_resource_handler(self, handler_mock, params_mock): + self.action_def['resource'] = { + 'type': 'Frob', + 'path': 'Container' + } + + resource = mock.Mock() + resource.meta = ResourceMeta('test', client=mock.Mock()) + operation = resource.meta.client.get_frobs + operation.return_value = 'response' + + factory = mock.Mock() + resource_defs = {} + service_model = mock.Mock() + + action_model = self.action + + service_context = ServiceContext( + service_name='test', + service_model=service_model, + resource_json_definitions=resource_defs, + service_waiter_model=None + ) + + action = ServiceAction( + action_model=action_model, factory=factory, + service_context=service_context + ) + + handler_mock.return_value.return_value = 'response' + + action(resource) + + handler_mock.assert_called_with( + search_path='Container', factory=factory, + resource_model=action_model.resource, + service_context=service_context, + operation_name='GetFrobs' + ) + + +class TestWaiterActionCall(BaseTestCase): + def setUp(self): + super(TestWaiterActionCall, self).setUp() + self.waiter_resource_name = 'wait_until_exists' + self.waiter_def = { + "waiterName": "FrobExists", + "params": [ + {"target": "Frob", "sourceType": "identifier", + "source": "Name"}] + } + + @property + def waiter(self): + return Waiter('test', self.waiter_def) + + @mock.patch('boto3.resources.action.create_request_parameters', + return_value={}) + def test_service_waiter_creates_params(self, params_mock): + resource = mock.Mock() + resource.meta = ResourceMeta('test', client=mock.Mock()) + + action = WaiterAction(self.waiter, self.waiter_resource_name) + + action(resource, foo=1) + + self.assertTrue(params_mock.called, + 'Parameters for operation not created') + + @mock.patch('boto3.resources.action.create_request_parameters', + return_value={'bar': 'baz'}) + def test_service_action_calls_operation(self, params_mock): + resource = mock.Mock() + resource.meta = ResourceMeta('test', client=mock.Mock()) + get_waiter = resource.meta.client.get_waiter + mock_waiter = mock.Mock() + get_waiter.return_value = mock_waiter + + action = WaiterAction(self.waiter, self.waiter_resource_name) + + action(resource, foo=1) + + get_waiter.assert_called_with('frob_exists') + mock_waiter.wait.assert_called_with(foo=1, bar='baz') + + +class TestBatchActionCall(BaseTestCase): + def setUp(self): + super(TestBatchActionCall, self).setUp() + + self.action_def = { + 'request': { + 'operation': 'GetFrobs', + 'params': [] + } + } + + @property + def model(self): + return Action('test', self.action_def, {}) + + def test_batch_action_gets_pages_from_collection(self): + collection = mock.Mock() + collection.pages.return_value = [] + action = BatchAction(self.model) + + action(collection) + + collection.pages.assert_called_with() + + def test_batch_action_creates_parameters_from_items(self): + self.action_def['request']['params'] = [ + {'target': 'Bucket', 'source': 'data', 'path': 'BucketName'}, + {'target': 'Delete.Objects[].Key', 'source': 'data', + 'path': 'Key'} + ] + + client = mock.Mock() + + item1 = mock.Mock() + item1.meta = ResourceMeta('test', client=client, data={ + 'BucketName': 'bucket', + 'Key': 'item1' + }) + + item2 = mock.Mock() + item2.meta = ResourceMeta('test', client=client, data={ + 'BucketName': 'bucket', + 'Key': 'item2' + }) + + collection = mock.Mock() + collection.pages.return_value = [[item1, item2]] + + action = BatchAction(self.model) + action(collection) + + client.get_frobs.assert_called_with(Bucket='bucket', Delete={ + 'Objects': [ + {'Key': 'item1'}, + {'Key': 'item2'} + ] + }) + + @mock.patch('boto3.resources.action.create_request_parameters', + return_value={}) + def test_batch_action_skips_operation(self, crp_mock): + # In this test we have an item from the collection, but no + # parameters are set up. Because of this, we do NOT call + # the batch operation. + client = mock.Mock() + + item = mock.Mock() + item.meta = ResourceMeta('test', client=client) + + collection = mock.Mock() + collection.pages.return_value = [[item]] + + model = self.model + action = BatchAction(model) + action(collection) + + crp_mock.assert_called_with(item, model.request, params={}) + client.get_frobs.assert_not_called() + + @mock.patch('boto3.resources.action.create_request_parameters') + def test_batch_action_calls_operation(self, crp_mock): + # In this test we have an item and parameters, so the call + # to the batch operation should be made. + def side_effect(resource, model, params=None): + params['foo'] = 'bar' + + crp_mock.side_effect = side_effect + + client = mock.Mock() + + item = mock.Mock() + item.meta = ResourceMeta('test', client=client) + + collection = mock.Mock() + collection.pages.return_value = [[item]] + + model = self.model + action = BatchAction(model) + action(collection) + + # Here the call is made with params={}, but they are edited + # in-place so we need to compare to the final edited value. + crp_mock.assert_called_with(item, model.request, + params={'foo': 'bar'}) + client.get_frobs.assert_called_with(foo='bar') diff --git a/tests/unit/resources/test_collection.py b/tests/unit/resources/test_collection.py new file mode 100644 index 0000000..63e89aa --- /dev/null +++ b/tests/unit/resources/test_collection.py @@ -0,0 +1,671 @@ +# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the 'license' file accompanying this file. This file is +# distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from botocore.hooks import HierarchicalEmitter +from botocore.model import ServiceModel + +from boto3.utils import ServiceContext +from boto3.resources.collection import CollectionFactory, CollectionManager, \ + ResourceCollection +from boto3.resources.base import ResourceMeta +from boto3.resources.factory import ResourceFactory +from boto3.resources.model import Collection +from tests import BaseTestCase, mock + + +class TestCollectionFactory(BaseTestCase): + def setUp(self): + super(TestCollectionFactory, self).setUp() + + self.client = mock.Mock() + self.client.can_paginate.return_value = False + self.parent = mock.Mock() + self.parent.meta = ResourceMeta('test', client=self.client) + self.resource_factory = ResourceFactory(mock.Mock()) + self.service_model = ServiceModel({}) + self.event_emitter = HierarchicalEmitter() + + self.factory = CollectionFactory() + self.load = self.factory.load_from_definition + + def test_create_subclasses(self): + resource_defs = { + 'Frob': {}, + 'Chain': { + 'hasMany': { + 'Frobs': { + 'request': { + 'operation': 'GetFrobs' + }, + 'resource': { + 'type': 'Frob' + } + } + } + } + } + collection_model = Collection( + 'Frobs', resource_defs['Chain']['hasMany']['Frobs'], + resource_defs) + + service_context = ServiceContext( + service_name='test', + resource_json_definitions=resource_defs, + service_model=self.service_model, + service_waiter_model=None + ) + collection_cls = self.load( + resource_name='Chain', + collection_model=collection_model, + service_context=service_context, + event_emitter=self.event_emitter + ) + collection = collection_cls( + collection_model=collection_model, + parent=self.parent, + factory=self.resource_factory, + service_context=service_context + ) + + self.assertEqual(collection_cls.__name__, + 'test.Chain.FrobsCollectionManager') + self.assertIsInstance(collection, CollectionManager) + + self.assertIsInstance(collection.all(), ResourceCollection) + + @mock.patch('boto3.resources.collection.BatchAction') + def test_create_batch_actions(self, action_mock): + resource_defs = { + 'Frob': { + 'batchActions': { + 'Delete': { + 'request': { + 'operation': 'DeleteFrobs' + } + } + } + }, + 'Chain': { + 'hasMany': { + 'Frobs': { + 'request': { + 'operation': 'GetFrobs' + }, + 'resource': { + 'type': 'Frob' + } + } + } + } + } + + collection_model = Collection( + 'Frobs', resource_defs['Chain']['hasMany']['Frobs'], + resource_defs) + + service_context = ServiceContext( + service_name='test', + resource_json_definitions=resource_defs, + service_model=self.service_model, + service_waiter_model=None + ) + collection_cls = self.load( + resource_name='Chain', + collection_model=collection_model, + service_context=service_context, + event_emitter=self.event_emitter + ) + collection = collection_cls( + collection_model=collection_model, + parent=self.parent, + factory=self.resource_factory, + service_context=service_context + ) + + self.assertTrue(hasattr(collection, 'delete')) + + collection.delete() + + action_mock.return_value.assert_called_with(collection) + + +class TestResourceCollection(BaseTestCase): + def setUp(self): + super(TestResourceCollection, self).setUp() + + # Minimal definition so things like repr work + self.collection_def = { + 'request': { + 'operation': 'TestOperation' + }, + 'resource': { + 'type': 'Frob' + } + } + self.client = mock.Mock() + self.client.can_paginate.return_value = False + self.parent = mock.Mock() + self.parent.meta = ResourceMeta('test', client=self.client) + self.factory = ResourceFactory(mock.Mock()) + self.service_model = ServiceModel({}) + + def get_collection(self): + resource_defs = { + 'Frob': { + 'identifiers': [] + } + } + + # Build up a resource def identifier list based on what + # the collection is expecting to be required from its + # definition. This saves a bunch of repetitive typing + # and lets you just define a collection in the tests + # below. Any identifiers you expect to be availabe in + # the resource definition will automatically be there. + resource_def = self.collection_def.get('resource', {}) + for identifier in resource_def.get('identifiers', []): + resource_defs['Frob']['identifiers'].append( + {'name': identifier['target']}) + + collection_model = Collection( + 'test', self.collection_def, resource_defs) + + collection = CollectionManager( + collection_model=collection_model, + parent=self.parent, + factory=self.factory, + service_context=ServiceContext( + service_name='test', + service_model=self.service_model, + resource_json_definitions=resource_defs, + service_waiter_model=None + ) + ) + return collection + + def test_repr(self): + collection = self.get_collection() + self.assertIn('CollectionManager', repr(collection)) + + def test_iteration_manager(self): + # A collection manager is not iterable. You must first call + # .all or .filter or another method to get an iterable. + collection = self.get_collection() + with self.assertRaises(TypeError): + list(collection) + + def test_iteration_non_paginated(self): + self.collection_def = { + 'request': { + 'operation': 'GetFrobs' + }, + 'resource': { + 'type': 'Frob', + 'identifiers': [ + { + 'target': 'Id', + 'source': 'response', + 'path': 'Frobs[].Id' + } + ] + } + } + self.client.get_frobs.return_value = { + 'Frobs': [ + {'Id': 'one'}, + {'Id': 'two'}, + {'Id': 'three'}, + {'Id': 'four'} + ] + } + collection = self.get_collection() + items = list(collection.all()) + self.assertEqual(len(items), 4) + self.assertEqual(items[0].id, 'one') + self.assertEqual(items[1].id, 'two') + self.assertEqual(items[2].id, 'three') + self.assertEqual(items[3].id, 'four') + + def test_limit_param_non_paginated(self): + self.collection_def = { + 'request': { + 'operation': 'GetFrobs' + }, + 'resource': { + 'type': 'Frob', + 'identifiers': [ + { + 'target': 'Id', + 'source': 'response', + 'path': 'Frobs[].Id' + } + ] + } + } + self.client.get_frobs.return_value = { + 'Frobs': [ + {'Id': 'one'}, + {'Id': 'two'}, + {'Id': 'three'}, + {'Id': 'four'} + ] + } + collection = self.get_collection() + items = list(collection.all().limit(2)) + self.assertEqual(len(items), 2) + + # Only the first two should be present + self.assertEqual(items[0].id, 'one') + self.assertEqual(items[1].id, 'two') + + def test_limit_method_non_paginated(self): + self.collection_def = { + 'request': { + 'operation': 'GetFrobs' + }, + 'resource': { + 'type': 'Frob', + 'identifiers': [ + { + 'target': 'Id', + 'source': 'response', + 'path': 'Frobs[].Id' + } + ] + } + } + self.client.get_frobs.return_value = { + 'Frobs': [ + {'Id': 'one'}, + {'Id': 'two'}, + {'Id': 'three'}, + {'Id': 'four'} + ] + } + collection = self.get_collection() + items = list(collection.limit(2)) + self.assertEqual(len(items), 2) + + # Only the first two should be present + self.assertEqual(items[0].id, 'one') + self.assertEqual(items[1].id, 'two') + + @mock.patch('boto3.resources.collection.ResourceHandler') + def test_filters_non_paginated(self, handler): + self.collection_def = { + 'request': { + 'operation': 'GetFrobs' + }, + 'resource': { + 'type': 'Frob', + 'identifiers': [] + } + } + self.client.get_frobs.return_value = {} + handler.return_value.return_value = [] + collection = self.get_collection() + + list(collection.filter(Param1='foo', Param2=3).limit(2)) + + # Note - limit is not passed through to the low-level call + self.client.get_frobs.assert_called_with(Param1='foo', Param2=3) + + def test_page_iterator_returns_pages_of_items(self): + self.collection_def = { + 'request': { + 'operation': 'GetFrobs' + }, + 'resource': { + 'type': 'Frob', + 'identifiers': [ + { + 'target': 'Id', + 'source': 'response', + 'path': 'Frobs[].Id' + } + ] + } + } + self.client.can_paginate.return_value = True + self.client.get_paginator.return_value.paginate.return_value = [ + { + 'Frobs': [ + {'Id': 'one'}, + {'Id': 'two'} + ] + }, { + 'Frobs': [ + {'Id': 'three'}, + {'Id': 'four'} + ] + } + ] + collection = self.get_collection() + pages = list(collection.limit(3).pages()) + self.assertEqual(len(pages), 2) + self.assertEqual(len(pages[0]), 2) + self.assertEqual(len(pages[1]), 1) + + def test_page_iterator_page_size(self): + self.collection_def = { + 'request': { + 'operation': 'GetFrobs' + }, + 'resource': { + 'type': 'Frob', + 'identifiers': [ + { + 'target': 'Id', + 'source': 'response', + 'path': 'Frobs[].Id' + } + ] + } + } + self.client.can_paginate.return_value = True + paginator = self.client.get_paginator.return_value + paginator.paginate.return_value = [] + + collection = self.get_collection() + list(collection.page_size(5).pages()) + + paginator.paginate.assert_called_with( + PaginationConfig={'PageSize': 5, 'MaxItems': None}) + + def test_iteration_paginated(self): + self.collection_def = { + 'request': { + 'operation': 'GetFrobs' + }, + 'resource': { + 'type': 'Frob', + 'identifiers': [ + { + 'target': 'Id', + 'source': 'response', + 'path': 'Frobs[].Id' + } + ] + } + } + self.client.can_paginate.return_value = True + self.client.get_paginator.return_value.paginate.return_value = [ + { + 'Frobs': [ + {'Id': 'one'}, + {'Id': 'two'} + ] + }, { + 'Frobs': [ + {'Id': 'three'}, + {'Id': 'four'} + ] + } + ] + collection = self.get_collection() + items = list(collection.all()) + self.assertEqual(len(items), 4) + self.assertEqual(items[0].id, 'one') + self.assertEqual(items[1].id, 'two') + self.assertEqual(items[2].id, 'three') + self.assertEqual(items[3].id, 'four') + + # Low-level pagination should have been called + self.client.get_paginator.assert_called_with('get_frobs') + paginator = self.client.get_paginator.return_value + paginator.paginate.assert_called_with( + PaginationConfig={'PageSize': None, 'MaxItems': None}) + + def test_limit_param_paginated(self): + self.collection_def = { + 'request': { + 'operation': 'GetFrobs' + }, + 'resource': { + 'type': 'Frob', + 'identifiers': [ + { + 'target': 'Id', + 'source': 'response', + 'path': 'Frobs[].Id' + } + ] + } + } + self.client.can_paginate.return_value = True + self.client.get_paginator.return_value.paginate.return_value = [ + { + 'Frobs': [ + {'Id': 'one'}, + {'Id': 'two'} + ] + }, { + 'Frobs': [ + {'Id': 'three'}, + {'Id': 'four'} + ] + } + ] + collection = self.get_collection() + items = list(collection.all().limit(2)) + self.assertEqual(len(items), 2) + + # Only the first two should be present + self.assertEqual(items[0].id, 'one') + self.assertEqual(items[1].id, 'two') + + def test_limit_method_paginated(self): + self.collection_def = { + 'request': { + 'operation': 'GetFrobs' + }, + 'resource': { + 'type': 'Frob', + 'identifiers': [ + { + 'target': 'Id', + 'source': 'response', + 'path': 'Frobs[].Id' + } + ] + } + } + self.client.can_paginate.return_value = True + self.client.get_paginator.return_value.paginate.return_value = [ + { + 'Frobs': [ + {'Id': 'one'}, + {'Id': 'two'} + ] + }, { + 'Frobs': [ + {'Id': 'three'}, + {'Id': 'four'} + ] + } + ] + collection = self.get_collection() + items = list(collection.all().limit(2)) + self.assertEqual(len(items), 2) + + # Only the first two should be present + self.assertEqual(items[0].id, 'one') + self.assertEqual(items[1].id, 'two') + + @mock.patch('boto3.resources.collection.ResourceHandler') + def test_filters_paginated(self, handler): + self.client.can_paginate.return_value = True + self.client.get_paginator.return_value.paginate.return_value = [] + handler.return_value.return_value = [] + collection = self.get_collection() + + list(collection.filter(Param1='foo', Param2=3).limit(2)) + + paginator = self.client.get_paginator.return_value + paginator.paginate.assert_called_with( + PaginationConfig={'PageSize': None, 'MaxItems': 2}, + Param1='foo', Param2=3) + + @mock.patch('boto3.resources.collection.ResourceHandler') + def test_filter_does_not_clobber_existing_list_values(self, handler): + self.collection_def = { + 'request': { + 'operation': 'GetFrobs', + "params": [ + {"target": "Filters[0].Name", "source": "string", + "value": "frob-id"}, + {"target": "Filters[0].Values[0]", "source": "identifier", + "name": "Id"} + ] + }, + 'resource': { + 'type': 'Frob', + 'identifiers': [ + {'target': 'Id', 'source': 'response', + 'path': 'Frobs[].Id'} + ] + } + } + self.client.can_paginate.return_value = True + self.client.get_paginator.return_value.paginate.return_value = [] + handler.return_value.return_value = [] + collection = self.get_collection() + + self.parent.id = 'my-id' + list(collection.filter( + Filters=[{'Name': 'another-filter', 'Values': ['foo']}])) + paginator = self.client.get_paginator.return_value + paginator.paginate.assert_called_with( + PaginationConfig={'PageSize': None, 'MaxItems': None}, + Filters=[ + {'Values': ['my-id'], 'Name': 'frob-id'}, + {'Values': ['foo'], 'Name': 'another-filter'} + ] + ) + + @mock.patch('boto3.resources.collection.ResourceHandler') + def test_page_size_param(self, handler): + self.client.can_paginate.return_value = True + self.client.get_paginator.return_value.paginate.return_value = [] + handler.return_value.return_value = [] + collection = self.get_collection() + + list(collection.all().page_size(1)) + + paginator = self.client.get_paginator.return_value + paginator.paginate.assert_called_with( + PaginationConfig={'PageSize': 1, 'MaxItems': None}) + + @mock.patch('boto3.resources.collection.ResourceHandler') + def test_page_size_method(self, handler): + self.client.can_paginate.return_value = True + self.client.get_paginator.return_value.paginate.return_value = [] + handler.return_value.return_value = [] + collection = self.get_collection() + + list(collection.page_size(1)) + + paginator = self.client.get_paginator.return_value + paginator.paginate.assert_called_with( + PaginationConfig={'PageSize': 1, 'MaxItems': None}) + + def test_chaining(self): + self.collection_def = { + 'request': { + 'operation': 'GetFrobs' + }, + 'resource': { + 'type': 'Frob', + 'identifiers': [ + { + 'target': 'Id', + 'source': 'response', + 'path': 'Frobs[].Id' + } + ] + } + } + self.client.get_frobs.return_value = { + 'Frobs': [ + {'Id': 'one'}, + {'Id': 'two'}, + {'Id': 'three'}, + {'Id': 'four'} + ] + } + collection = self.get_collection() + + items = list(collection.filter().all().all()) + + self.assertEqual(len(items), 4) + self.assertEqual(items[0].id, 'one') + self.assertEqual(items[1].id, 'two') + self.assertEqual(items[2].id, 'three') + self.assertEqual(items[3].id, 'four') + + @mock.patch('boto3.resources.collection.ResourceHandler') + def test_chaining_copies_parameters(self, handler): + self.client.can_paginate.return_value = True + self.client.get_paginator.return_value.paginate.return_value = [] + handler.return_value.return_value = [] + collection = self.get_collection() + + list(collection.all().filter(CustomArg=1).limit(3).page_size(3)) + + paginator = self.client.get_paginator.return_value + paginator.paginate.assert_called_with( + PaginationConfig={'PageSize': 3, 'MaxItems': 3}, CustomArg=1) + + @mock.patch('boto3.resources.collection.ResourceHandler') + def test_chaining_filters_does_not_clobber_list_values(self, handler): + self.collection_def = { + 'request': { + 'operation': 'GetFrobs', + "params": [ + {"target": "Filters[0].Name", "source": "string", + "value": "frob-id"}, + {"target": "Filters[0].Values[0]", "source": "identifier", + "name": "Id"} + ] + }, + 'resource': { + 'type': 'Frob', + 'identifiers': [ + {'target': 'Id', 'source': 'response', + 'path': 'Frobs[].Id'} + ] + } + } + self.client.can_paginate.return_value = True + self.client.get_paginator.return_value.paginate.return_value = [] + handler.return_value.return_value = [] + collection = self.get_collection() + + self.parent.id = 'my-id' + collection = collection.filter( + Filters=[{'Name': 'second-filter', 'Values': ['foo']}]) + list(collection.filter( + Filters=[{'Name': 'third-filter', 'Values': ['bar']}])) + paginator = self.client.get_paginator.return_value + paginator.paginate.assert_called_with( + PaginationConfig={'PageSize': None, 'MaxItems': None}, + Filters=[ + {'Values': ['my-id'], 'Name': 'frob-id'}, + {'Values': ['foo'], 'Name': 'second-filter'}, + {'Values': ['bar'], 'Name': 'third-filter'} + ] + ) + + def test_chained_repr(self): + collection = self.get_collection() + + self.assertIn('ResourceCollection', repr(collection.all())) diff --git a/tests/unit/resources/test_collection_smoke.py b/tests/unit/resources/test_collection_smoke.py new file mode 100644 index 0000000..cb9f228 --- /dev/null +++ b/tests/unit/resources/test_collection_smoke.py @@ -0,0 +1,117 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +import botocore.session +from botocore import xform_name +from nose.tools import assert_false + +from boto3.session import Session +from boto3.resources.model import ResourceModel + + +# A list of names that are common names of a pagination parameter. +# Note that this list is not comprehensive. It may have to be updated +# in the future, but this covers a lot of the pagination parameters. +COMMON_PAGINATION_PARAM_NAMES = [ + 'nextToken', + 'NextToken', + 'marker', + 'Marker', + 'NextMarker', + 'nextPageToken', + 'NextPageToken', +] + + +def operation_looks_paginated(operation_model): + """Checks whether an operation looks like it can be paginated + + :type operation_model: botocore.model.OperationModel + :param operation_model: The model for a particular operation + + :returns: True if determines it can be paginated. False otherwise. + """ + has_input_param = _shape_has_pagination_param(operation_model.input_shape) + has_output_param = _shape_has_pagination_param( + operation_model.output_shape) + # If there is a parameter in either the input or output that + # is used in pagination, mark the operation as paginateable. + return (has_input_param and has_output_param) + + +def _shape_has_pagination_param(shape): + if shape: + members = shape.members + # Go through the list of common names that may be a pagination + # parameter name + for param in COMMON_PAGINATION_PARAM_NAMES: + # Go through all of the shapes members. + for member in members: + # See if the name is the member name. If it is, mark + # it as a pagination parameter. + if param == member: + return True + return False + + +def test_all_collections_have_paginators_if_needed(): + # If a collection relies on an operation that is paginated, it + # will require a paginator to iterate through all of the resources + # with the all() method. If there is no paginator, it will only + # make it through the first page of results. So we need to make sure + # if a collection looks like it uses a paginated operation then there + # should be a paginator applied to it. + botocore_session = botocore.session.get_session() + session = Session(botocore_session=botocore_session) + loader = botocore_session.get_component('data_loader') + for service_name in session.get_available_resources(): + client = session.client(service_name, region_name='us-east-1') + json_resource_model = loader.load_service_model( + service_name, 'resources-1') + resource_defs = json_resource_model['resources'] + resource_models = [] + # Get the service resource model + service_resource_model = ResourceModel( + service_name, json_resource_model['service'], resource_defs) + resource_models.append(service_resource_model) + # Generate all of the resource models for a service + for resource_name, resource_defintion in resource_defs.items(): + resource_models.append(ResourceModel( + resource_name, resource_defintion, resource_defs)) + for resource_model in resource_models: + # Iterate over all of the collections for each resource model + # and ensure that the collection has a paginator if it needs one. + for collection_model in resource_model.collections: + yield ( + _assert_collection_has_paginator_if_needed, client, + service_name, resource_name, collection_model) + + +def _assert_collection_has_paginator_if_needed( + client, service_name, resource_name, collection_model): + underlying_operation_name = collection_model.request.operation + # See if the operation can be paginated from the client. + can_paginate_operation = client.can_paginate( + xform_name(underlying_operation_name)) + # See if the operation looks paginated. + looks_paginated = operation_looks_paginated( + client.meta.service_model.operation_model(underlying_operation_name)) + # Make sure that if the operation looks paginated then there is + # a paginator for the client to use for the collection. + if not can_paginate_operation: + assert_false( + looks_paginated, + 'Collection %s on resource %s of service %s uses the operation ' + '%s, but the operation has no paginator even though it looks ' + 'paginated.' % ( + collection_model.name, resource_name, service_name, + underlying_operation_name)) diff --git a/tests/unit/resources/test_factory.py b/tests/unit/resources/test_factory.py new file mode 100644 index 0000000..c9e4c20 --- /dev/null +++ b/tests/unit/resources/test_factory.py @@ -0,0 +1,853 @@ +# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the 'license' file accompanying this file. This file is +# distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from botocore.model import DenormalizedStructureBuilder, ServiceModel +from tests import BaseTestCase, mock + +from boto3.exceptions import ResourceLoadException +from boto3.utils import ServiceContext +from boto3.resources.base import ServiceResource +from boto3.resources.collection import CollectionManager +from boto3.resources.factory import ResourceFactory +from boto3.resources.action import WaiterAction + + +class BaseTestResourceFactory(BaseTestCase): + def setUp(self): + super(BaseTestResourceFactory, self).setUp() + self.emitter = mock.Mock() + self.factory = ResourceFactory(self.emitter) + + def load(self, resource_name, resource_json_definition=None, + resource_json_definitions=None, service_model=None): + if resource_json_definition is None: + resource_json_definition = {} + if resource_json_definitions is None: + resource_json_definitions = {} + service_context=ServiceContext( + service_name='test', + resource_json_definitions=resource_json_definitions, + service_model=service_model, + service_waiter_model=None + ) + + return self.factory.load_from_definition( + resource_name=resource_name, + single_resource_json_definition=resource_json_definition, + service_context=service_context + ) + + +class TestResourceFactory(BaseTestResourceFactory): + def test_get_service_returns_resource_class(self): + TestResource = self.load('test') + + self.assertIn(ServiceResource, TestResource.__bases__, + 'Did not return a ServiceResource subclass for service') + + def test_get_resource_returns_resource_class(self): + QueueResource = self.load('Queue') + + self.assertIn(ServiceResource, QueueResource.__bases__, + 'Did not return a ServiceResource subclass for resource') + + def test_factory_sets_service_name(self): + QueueResource = self.load('Queue') + + self.assertEqual(QueueResource.meta.service_name, 'test', + 'Service name not set') + + def test_factory_sets_identifiers(self): + model = { + 'identifiers': [ + {'name': 'QueueUrl'}, + {'name': 'ReceiptHandle'}, + ], + } + + MessageResource = self.load('Message', model) + + self.assertIn('queue_url', MessageResource.meta.identifiers, + 'Missing queue_url identifier from model') + self.assertIn('receipt_handle', MessageResource.meta.identifiers, + 'Missing receipt_handle identifier from model') + + def test_identifiers_in_repr(self): + model = { + 'identifiers': [ + {'name': 'QueueUrl'}, + {'name': 'ReceiptHandle'}, + ], + } + defs = { + 'Message': model + } + + resource = self.load('Message', model, defs)('url', 'handle') + + # Class name + self.assertIn('test.Message', repr(resource)) + + # Identifier names and values + self.assertIn('queue_url', repr(resource)) + self.assertIn("'url'", repr(resource)) + self.assertIn('receipt_handle', repr(resource)) + self.assertIn("'handle'", repr(resource)) + + def test_factory_creates_dangling_resources(self): + model = { + 'has': { + 'Queue': { + 'resource': { + 'type': 'Queue', + 'identifiers': [ + {'target': 'Url', 'source': 'input'} + ] + } + }, + 'Message': { + 'resource': { + 'type': 'Message', + 'identifiers': [ + {'target': 'QueueUrl', 'source': 'input'}, + {'target': 'Handle', 'source': 'input'} + ] + } + } + } + } + defs = { + 'Queue': {}, + 'Message': {} + } + + TestResource = self.load('test', model, defs) + + self.assertTrue(hasattr(TestResource, 'Queue'), + 'Missing Queue class from model') + self.assertTrue(hasattr(TestResource, 'Message'), + 'Missing Message class from model') + + def test_factory_creates_properties(self): + model = { + 'shape': 'TestShape', + 'load': { + 'request': { + 'operation': 'DescribeTest', + } + } + } + shape = DenormalizedStructureBuilder().with_members({ + 'ETag': { + 'type': 'string', + }, + 'LastModified': { + 'type': 'string' + } + }).build_model() + service_model = mock.Mock() + service_model.shape_for.return_value = shape + + TestResource = self.load('test', model, service_model=service_model) + + self.assertTrue(hasattr(TestResource, 'e_tag'), + 'ETag shape member not available on resource') + self.assertTrue(hasattr(TestResource, 'last_modified'), + 'LastModified shape member not available on resource') + + def test_factory_renames_on_clobber_identifier(self): + model = { + 'identifiers': [ + {'name': 'Meta'} + ] + } + + # Each resource has a ``meta`` defined, so this identifier + # must be renamed. + cls = self.load('test', model) + + self.assertTrue(hasattr(cls, 'meta_identifier')) + + def test_factory_fails_on_clobber_action(self): + model = { + 'identifiers': [ + {'name': 'Test'}, + {'name': 'TestAction'} + ], + 'actions': { + 'Test': { + 'request': { + 'operation': 'GetTest' + } + } + } + } + + # This fails because the resource has an identifier + # that would be clobbered by the action name. + with self.assertRaises(ValueError) as cm: + self.load('test', model) + + self.assertIn('test', str(cm.exception)) + self.assertIn('action', str(cm.exception)) + + def test_can_instantiate_service_resource(self): + TestResource = self.load('test') + resource = TestResource() + + self.assertIsInstance(resource, ServiceResource, + 'Object is not an instance of ServiceResource') + + def test_non_service_resource_missing_defs(self): + # Only services should get dangling defs + defs = { + 'Queue': { + 'identifiers': [ + {'name': 'Url'} + ] + }, + 'Message': { + 'identifiers': [ + {'name': 'QueueUrl'}, + {'name': 'ReceiptHandle'} + ] + } + } + + model = defs['Queue'] + + queue = self.load('Queue', model, defs)('url') + + self.assertTrue(not hasattr(queue, 'Queue')) + self.assertTrue(not hasattr(queue, 'Message')) + + def test_subresource_requires_only_identifier(self): + defs = { + 'Queue': { + 'identifiers': [ + {'name': 'Url'} + ], + 'has': { + 'Message': { + 'resource': { + 'type': 'Message', + 'identifiers': [ + {'target': 'QueueUrl', 'source': 'identifier', + 'name': 'Url'}, + {'target': 'ReceiptHandle', 'source': 'input'} + ] + } + } + } + }, + 'Message': { + 'identifiers': [ + {'name': 'QueueUrl'}, + {'name': 'ReceiptHandle'} + ] + } + } + + model = defs['Queue'] + + queue = self.load('Queue', model, defs)('url') + + # Let's create a message and only give it a receipt handle + # The required queue_url identifier should be set from the + # queue itself. + message = queue.Message('receipt') + + self.assertEqual(message.queue_url, 'url', + 'Wrong queue URL set on the message resource instance') + self.assertEqual(message.receipt_handle, 'receipt', + 'Wrong receipt handle set on the message resource instance') + + def test_resource_meta_unique(self): + queue_cls = self.load('Queue') + + queue1 = queue_cls() + queue2 = queue_cls() + + self.assertEqual(queue1.meta, queue2.meta, + 'Queue meta copies not equal after creation') + + queue1.meta.data = {'id': 'foo'} + queue2.meta.data = {'id': 'bar'} + + self.assertNotEqual(queue_cls.meta, queue1.meta, + 'Modified queue instance data should not modify the class data') + self.assertNotEqual(queue1.meta, queue2.meta, + 'Queue data should be unique to queue instance') + self.assertNotEqual(queue1.meta, 'bad-value') + + def test_resource_meta_repr(self): + queue_cls = self.load('Queue') + queue = queue_cls() + self.assertEqual(repr(queue.meta), + 'ResourceMeta(\'test\', identifiers=[])') + + @mock.patch('boto3.resources.factory.ServiceAction') + def test_resource_calls_action(self, action_cls): + model = { + 'actions': { + 'GetMessageStatus': { + 'request': { + 'operation': 'DescribeMessageStatus' + } + } + } + } + + action = action_cls.return_value + + queue = self.load('Queue', model)() + queue.get_message_status('arg1', arg2=2) + + action.assert_called_with(queue, 'arg1', arg2=2) + + @mock.patch('boto3.resources.factory.ServiceAction') + def test_resource_action_clears_data(self, action_cls): + model = { + 'load': { + 'request': { + 'operation': 'DescribeQueue' + } + }, + 'actions': { + 'GetMessageStatus': { + 'request': { + 'operation': 'DescribeMessageStatus' + } + } + } + } + + queue = self.load('Queue', model)() + + # Simulate loaded data + queue.meta.data = {'some': 'data'} + + # Perform a call + queue.get_message_status() + + # Cached data should be cleared + self.assertIsNone(queue.meta.data) + + @mock.patch('boto3.resources.factory.ServiceAction') + def test_resource_action_leaves_data(self, action_cls): + # This model has NO load method. Cached data should + # never be cleared since it cannot be reloaded! + model = { + 'actions': { + 'GetMessageStatus': { + 'request': { + 'operation': 'DescribeMessageStatus' + } + } + } + } + + queue = self.load('Queue', model)() + + # Simulate loaded data + queue.meta.data = {'some': 'data'} + + # Perform a call + queue.get_message_status() + + # Cached data should not be cleared + self.assertEqual(queue.meta.data, {'some': 'data'}) + + @mock.patch('boto3.resources.factory.ServiceAction') + def test_resource_lazy_loads_properties(self, action_cls): + model = { + 'shape': 'TestShape', + 'identifiers': [ + {'name': 'Url'} + ], + 'load': { + 'request': { + 'operation': 'DescribeTest', + } + } + } + shape = DenormalizedStructureBuilder().with_members({ + 'ETag': { + 'type': 'string', + 'shape_name': 'ETag' + }, + 'LastModified': { + 'type': 'string', + 'shape_name': 'LastModified' + }, + 'Url': { + 'type': 'string', + 'shape_name': 'Url' + } + }).build_model() + service_model = mock.Mock() + service_model.shape_for.return_value = shape + + action = action_cls.return_value + action.return_value = {'ETag': 'tag', 'LastModified': 'never'} + + resource = self.load( + 'test', model, service_model=service_model)('url') + + # Accessing an identifier should not call load, even if it's in + # the shape members. + resource.url + action.assert_not_called() + + # Accessing a property should call load + self.assertEqual(resource.e_tag, 'tag', + 'ETag property returned wrong value') + action.assert_called_once() + + # Both params should have been loaded into the data bag + self.assertIn('ETag', resource.meta.data) + self.assertIn('LastModified', resource.meta.data) + + # Accessing another property should use cached value + # instead of making a second call. + self.assertEqual(resource.last_modified, 'never', + 'LastModified property returned wrong value') + action.assert_called_once() + + @mock.patch('boto3.resources.factory.ServiceAction') + def test_resource_lazy_properties_missing_load(self, action_cls): + model = { + 'shape': 'TestShape', + 'identifiers': [ + {'name': 'Url'} + ] + # Note the lack of a `load` method. These resources + # are usually loaded via a call on a parent resource. + } + shape = DenormalizedStructureBuilder().with_members({ + 'ETag': { + 'type': 'string', + }, + 'LastModified': { + 'type': 'string' + }, + 'Url': { + 'type': 'string' + } + }).build_model() + service_model = mock.Mock() + service_model.shape_for.return_value = shape + + action = action_cls.return_value + action.return_value = {'ETag': 'tag', 'LastModified': 'never'} + + resource = self.load( + 'test', model, service_model=service_model)('url') + + with self.assertRaises(ResourceLoadException): + resource.last_modified + + def test_resource_loads_references(self): + model = { + 'shape': 'InstanceShape', + 'identifiers': [{'name': 'GroupId'}], + 'has': { + 'Subnet': { + 'resource': { + 'type': 'Subnet', + 'identifiers': [ + {'target': 'Id', 'source': 'data', + 'path': 'SubnetId'} + ] + } + }, + 'Vpcs': { + 'resource': { + 'type': 'Vpc', + 'identifiers': [ + {'target': 'Id', 'source': 'data', + 'path': 'Vpcs[].Id'} + ] + } + } + } + } + defs = { + 'Subnet': { + 'identifiers': [{'name': 'Id'}] + }, + 'Vpc': { + 'identifiers': [{'name': 'Id'}] + } + } + service_model = ServiceModel({ + 'shapes': { + 'InstanceShape': { + 'type': 'structure', + 'members': { + 'SubnetId': { + 'shape': 'String' + } + } + }, + 'String': { + 'type': 'string' + } + } + }) + + resource = self.load('Instance', model, defs, + service_model)('group-id') + + # Load the resource with no data + resource.meta.data = {} + + self.assertTrue( + hasattr(resource, 'subnet'), + 'Resource should have a subnet reference') + self.assertIsNone( + resource.subnet, + 'Missing identifier, should return None') + self.assertIsNone(resource.vpcs) + + # Load the resource with data to instantiate a reference + resource.meta.data = { + 'SubnetId': 'abc123', + 'Vpcs': [ + {'Id': 'vpc1'}, + {'Id': 'vpc2'} + ] + } + + self.assertIsInstance(resource.subnet, ServiceResource) + self.assertEqual(resource.subnet.id, 'abc123') + + vpcs = resource.vpcs + self.assertIsInstance(vpcs, list) + self.assertEqual(len(vpcs), 2) + self.assertEqual(vpcs[0].id, 'vpc1') + self.assertEqual(vpcs[1].id, 'vpc2') + + @mock.patch('boto3.resources.model.Collection') + def test_resource_loads_collections(self, mock_model): + model = { + 'hasMany': { + u'Queues': { + 'request': { + 'operation': 'ListQueues' + }, + 'resource': { + 'type': 'Queue' + } + } + } + } + defs = { + 'Queue': {} + } + service_model = ServiceModel({}) + mock_model.return_value.name = 'queues' + + resource = self.load('test', model, defs, service_model)() + + self.assertTrue(hasattr(resource, 'queues'), + 'Resource should expose queues collection') + self.assertIsInstance(resource.queues, CollectionManager, + 'Queues collection should be a collection manager') + + def test_resource_loads_waiters(self): + model = { + "waiters": { + "Exists": { + "waiterName": "BucketExists", + "params": [ + {"target": "Bucket", "source": "identifier", + "name": "Name"}] + } + } + } + + defs = { + 'Bucket': {} + } + service_model = ServiceModel({}) + + resource = self.load('test', model, defs, service_model)() + + self.assertTrue(hasattr(resource, 'wait_until_exists'), + 'Resource should expose resource waiter: wait_until_exists') + + @mock.patch('boto3.resources.factory.WaiterAction') + def test_resource_waiter_calls_waiter_method(self, waiter_action_cls): + model = { + "waiters": { + "Exists": { + "waiterName": "BucketExists", + "params": [ + {"target": "Bucket", "source": "identifier", + "name": "Name"}] + } + } + } + + defs = { + 'Bucket': {} + } + service_model = ServiceModel({}) + + waiter_action = waiter_action_cls.return_value + resource = self.load('test', model, defs, service_model)() + + resource.wait_until_exists('arg1', arg2=2) + waiter_action.assert_called_with(resource, 'arg1', arg2=2) + + +class TestResourceFactoryDanglingResource(BaseTestResourceFactory): + def setUp(self): + super(TestResourceFactoryDanglingResource, self).setUp() + + self.model = { + 'has': { + 'Queue': { + 'resource': { + 'type': 'Queue', + 'identifiers': [ + {'target': 'Url', 'source': 'input'} + ] + } + } + } + } + + self.defs = { + 'Queue': { + 'identifiers': [ + {'name': 'Url'} + ] + } + } + + def test_dangling_resources_create_resource_instance(self): + resource = self.load('test', self.model, self.defs)() + q = resource.Queue('test') + + self.assertIsInstance(q, ServiceResource, + 'Dangling resource instance not a ServiceResource') + + def test_dangling_resource_create_with_kwarg(self): + resource = self.load('test', self.model, self.defs)() + q = resource.Queue(url='test') + + self.assertIsInstance(q, ServiceResource, + 'Dangling resource created with kwargs is not a ServiceResource') + + def test_dangling_resource_shares_client(self): + resource = self.load('test', self.model, self.defs)() + q = resource.Queue('test') + + self.assertEqual(resource.meta.client, q.meta.client, + 'Client was not shared to dangling resource instance') + + def test_dangling_resource_requires_identifier(self): + resource = self.load('test', self.model, self.defs)() + + with self.assertRaises(ValueError): + resource.Queue() + + def test_dangling_resource_raises_for_unknown_arg(self): + resource = self.load('test', self.model, self.defs)() + + with self.assertRaises(ValueError): + resource.Queue(url='foo', bar='baz') + + def test_dangling_resource_identifier_is_immutable(self): + resource = self.load('test', self.model, self.defs)() + queue = resource.Queue('url') + # We should not be able to change the identifier's value + with self.assertRaises(AttributeError): + queue.url = 'foo' + + def test_dangling_resource_equality(self): + resource = self.load('test', self.model, self.defs)() + + q1 = resource.Queue('url') + q2 = resource.Queue('url') + + self.assertEqual(q1, q2) + + def test_dangling_resource_inequality(self): + self.defs = { + 'Queue': { + 'identifiers': [{'name': 'Url'}], + 'has': { + 'Message': { + 'resource': { + 'type': 'Message', + 'identifiers': [ + {'target': 'QueueUrl', 'source': 'identifier', + 'name': 'Url'}, + {'target': 'Handle', 'source': 'input'} + ] + } + } + } + }, + 'Message': { + 'identifiers': [{'name': 'QueueUrl'}, {'name': 'Handle'}] + } + } + + resource = self.load('test', self.model, self.defs)() + + q1 = resource.Queue('url') + q2 = resource.Queue('different') + m = q1.Message('handle') + + self.assertNotEqual(q1, q2) + self.assertNotEqual(q1, m) + + def test_dangling_resource_loads_data(self): + # Given a loadable resource instance that contains a reference + # to another resource which has a resource data path, the + # referenced resource should be loaded with all of the data + # contained at that path. This allows loading references + # which would otherwise not be loadable (missing load method) + # and prevents extra load calls for others when we already + # have the data available. + self.defs = { + 'Instance': { + 'identifiers': [{'name': 'Id'}], + 'has': { + 'NetworkInterface': { + 'resource': { + 'type': 'NetworkInterface', + 'identifiers': [ + {'target': 'Id', 'source': 'data', + 'path': 'NetworkInterface.Id'} + ], + 'path': 'NetworkInterface' + } + } + } + }, + 'NetworkInterface': { + 'identifiers': [{'name': 'Id'}], + 'shape': 'NetworkInterfaceShape' + } + } + self.model = self.defs['Instance'] + shape = DenormalizedStructureBuilder().with_members({ + 'Id': { + 'type': 'string', + }, + 'PublicIp': { + 'type': 'string' + } + }).build_model() + service_model = mock.Mock() + service_model.shape_for.return_value = shape + + cls = self.load('Instance', self.model, self.defs, service_model) + instance = cls('instance-id') + + # Set some data as if we had completed a load action. + def set_meta_data(): + instance.meta.data = { + 'NetworkInterface': { + 'Id': 'network-interface-id', + 'PublicIp': '127.0.0.1' + } + } + instance.load = mock.Mock(side_effect=set_meta_data) + + # Now, get the reference and make sure it has its data + # set as expected. + interface = instance.network_interface + self.assertIsNotNone(interface.meta.data) + self.assertEqual(interface.public_ip, '127.0.0.1') + + +class TestServiceResourceSubresources(BaseTestResourceFactory): + def setUp(self): + super(TestServiceResourceSubresources, self).setUp() + + self.model = { + 'has': { + 'QueueObject': { + 'resource': { + 'type': 'Queue', + 'identifiers': [ + {'target': 'Url', 'source': 'input'} + ] + } + }, + 'PriorityQueue': { + 'resource': { + 'type': 'Queue', + 'identifiers': [ + {'target': 'Url', 'source': 'input'} + ] + } + } + } + } + + self.defs = { + 'Queue': { + 'identifiers': [ + {'name': 'Url'} + ] + }, + 'Message': { + 'identifiers': [ + {'name': 'QueueUrl'}, + {'name': 'ReceiptHandle'} + ] + } + } + + def test_subresource_custom_name(self): + resource = self.load('test', self.model, self.defs)() + + self.assertTrue(hasattr(resource, 'QueueObject')) + + def test_contains_all_subresources(self): + resource = self.load('test', self.model, self.defs)() + + self.assertIn('QueueObject', dir(resource)) + self.assertIn('PriorityQueue', dir(resource)) + self.assertIn('Message', dir(resource)) + + def test_subresource_missing_all_subresources(self): + resource = self.load('test', self.model, self.defs)() + message = resource.Message('url', 'handle') + + self.assertNotIn('QueueObject', dir(message)) + self.assertNotIn('PriorityQueue', dir(message)) + self.assertNotIn('Queue', dir(message)) + self.assertNotIn('Message', dir(message)) + + def test_event_emitted_when_class_created(self): + self.load('test', self.model, self.defs) + self.assertTrue(self.emitter.emit.called) + call_args = self.emitter.emit.call_args + # Verify the correct event name emitted. + self.assertEqual(call_args[0][0], + 'creating-resource-class.test.ServiceResource') + + # Verify we send out the class attributes dict. + actual_class_attrs = sorted(call_args[1]['class_attributes']) + self.assertEqual(actual_class_attrs, + ['Message', 'PriorityQueue', 'QueueObject', 'meta']) + + base_classes = sorted(call_args[1]['base_classes']) + self.assertEqual(base_classes, [ServiceResource]) diff --git a/tests/unit/resources/test_model.py b/tests/unit/resources/test_model.py new file mode 100644 index 0000000..5936385 --- /dev/null +++ b/tests/unit/resources/test_model.py @@ -0,0 +1,425 @@ +# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the 'license' file accompanying this file. This file is +# distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + +from botocore.model import DenormalizedStructureBuilder + +from boto3.resources.model import ResourceModel, Action, Collection, Waiter +from tests import BaseTestCase + + +class TestModels(BaseTestCase): + def test_resource_name(self): + model = ResourceModel('test', {}, {}) + + self.assertEqual(model.name, 'test') + + def test_resource_shape(self): + model = ResourceModel('test', { + 'shape': 'Frob' + }, {}) + + self.assertEqual(model.shape, 'Frob') + + def test_resource_identifiers(self): + model = ResourceModel('test', { + 'identifiers': [ + {'name': 'one'}, + {'name': 'two'} + ] + }, {}) + + self.assertEqual(model.identifiers[0].name, 'one') + self.assertEqual(model.identifiers[1].name, 'two') + + def test_resource_action_raw(self): + model = ResourceModel('test', { + 'actions': { + 'GetFrobs': { + 'request': { + 'operation': 'GetFrobsOperation', + 'params': [ + {'target': 'FrobId', 'source': 'identifier', + 'name': 'Id'} + ] + }, + 'path': 'Container.Frobs[]' + } + } + }, {}) + + self.assertIsInstance(model.actions, list) + self.assertEqual(len(model.actions), 1) + + action = model.actions[0] + self.assertIsInstance(action, Action) + self.assertEqual(action.request.operation, 'GetFrobsOperation') + self.assertIsInstance(action.request.params, list) + self.assertEqual(len(action.request.params), 1) + self.assertEqual(action.request.params[0].target, 'FrobId') + self.assertEqual(action.request.params[0].source, 'identifier') + self.assertEqual(action.request.params[0].name, 'Id') + self.assertEqual(action.path, 'Container.Frobs[]') + + def test_resource_action_response_resource(self): + model = ResourceModel('test', { + 'actions': { + 'GetFrobs': { + 'resource': { + 'type': 'Frob', + 'path': 'Container.Frobs[]' + } + } + } + }, { + 'Frob': {} + }) + + action = model.actions[0] + self.assertEqual(action.resource.type, 'Frob') + self.assertEqual(action.resource.path, 'Container.Frobs[]') + self.assertIsInstance(action.resource.model, ResourceModel) + self.assertEqual(action.resource.model.name, 'Frob') + + def test_resource_load_action(self): + model = ResourceModel('test', { + 'load': { + 'request': { + 'operation': 'GetFrobInfo' + }, + 'path': '$' + } + }, {}) + + self.assertIsInstance(model.load, Action) + self.assertEqual(model.load.request.operation, 'GetFrobInfo') + self.assertEqual(model.load.path, '$') + + def test_resource_batch_action(self): + model = ResourceModel('test', { + 'batchActions': { + 'Delete': { + 'request': { + 'operation': 'DeleteObjects', + 'params': [ + {'target': 'Bucket', 'sourceType': 'identifier', + 'source': 'BucketName'} + ] + } + } + } + }, {}) + + self.assertIsInstance(model.batch_actions, list) + + action = model.batch_actions[0] + self.assertIsInstance(action, Action) + self.assertEqual(action.request.operation, 'DeleteObjects') + self.assertEqual(action.request.params[0].target, 'Bucket') + + def test_sub_resources(self): + model = ResourceModel('test', { + 'has': { + 'RedFrob': { + 'resource': { + 'type': 'Frob', + 'identifiers': [ + {'target': 'Id', 'source': 'input'} + ] + } + }, + 'GreenFrob': { + 'resource': { + 'type': 'Frob', + 'identifiers': [ + {'target': 'Id', 'source': 'input'} + ] + } + } + } + }, { + 'Frob': {} + }) + + self.assertIsInstance(model.subresources, list) + self.assertEqual(len(model.subresources), 2) + + action = model.subresources[0] + resource = action.resource + + self.assertIn(action.name, ['RedFrob', 'GreenFrob']) + self.assertEqual(resource.identifiers[0].target, 'Id') + self.assertEqual(resource.identifiers[0].source, 'input') + self.assertEqual(resource.type, 'Frob') + + def test_resource_references(self): + model_def = { + 'has': { + 'Frob': { + 'resource': { + 'type': 'Frob', + 'identifiers': [ + {'target':'Id', 'source':'data', + 'path':'FrobId'} + ] + } + } + } + } + resource_defs = { + 'Frob': {} + } + model = ResourceModel('test', model_def, resource_defs) + + self.assertIsInstance(model.references, list) + self.assertEqual(len(model.references), 1) + + ref = model.references[0] + self.assertEqual(ref.name, 'frob') + self.assertEqual(ref.resource.type, 'Frob') + self.assertEqual(ref.resource.identifiers[0].target, 'Id') + self.assertEqual(ref.resource.identifiers[0].source, 'data') + self.assertEqual(ref.resource.identifiers[0].path, 'FrobId') + + def test_resource_collections(self): + model = ResourceModel('test', { + 'hasMany': { + 'Frobs': { + 'request': { + 'operation': 'GetFrobList' + }, + 'resource': { + 'type': 'Frob', + 'path': 'FrobList[]' + } + } + } + }, { + 'Frob': {} + }) + + self.assertIsInstance(model.collections, list) + self.assertEqual(len(model.collections), 1) + self.assertIsInstance(model.collections[0], Collection) + self.assertEqual(model.collections[0].request.operation, 'GetFrobList') + self.assertEqual(model.collections[0].resource.type, 'Frob') + self.assertEqual(model.collections[0].resource.model.name, 'Frob') + self.assertEqual(model.collections[0].resource.path, 'FrobList[]') + + def test_waiter(self): + model = ResourceModel('test', { + 'waiters': { + 'Exists': { + 'waiterName': 'ObjectExists', + 'params': [ + {'target': 'Bucket', 'sourceType': 'identifier', + 'source': 'BucketName'} + ] + } + } + }, {}) + + self.assertIsInstance(model.waiters, list) + + waiter = model.waiters[0] + self.assertIsInstance(waiter, Waiter) + self.assertEqual(waiter.name, 'wait_until_exists') + self.assertEqual(waiter.waiter_name, 'ObjectExists') + self.assertEqual(waiter.params[0].target, 'Bucket') + +class TestRenaming(BaseTestCase): + def test_multiple(self): + # This tests a bunch of different renames working together + model = ResourceModel('test', { + 'identifiers': [{'name': 'Foo'}], + 'actions': { + 'Foo': {} + }, + 'has': { + 'Foo': { + 'resource': { + 'type': 'Frob', + 'identifiers': [ + {'target':'Id', 'source':'data', + 'path': 'FrobId'} + ] + } + } + }, + 'hasMany': { + 'Foo': {} + }, + 'waiters': { + 'Foo': {} + } + }, { + 'Frob': {} + }) + + shape = DenormalizedStructureBuilder().with_members({ + 'Foo': { + 'type': 'string', + }, + 'Bar': { + 'type': 'string' + } + }).build_model() + + model.load_rename_map(shape) + + self.assertEqual(model.identifiers[0].name, 'foo') + self.assertEqual(model.actions[0].name, 'foo_action') + self.assertEqual(model.references[0].name, 'foo_reference') + self.assertEqual(model.collections[0].name, 'foo_collection') + self.assertEqual(model.waiters[0].name, 'wait_until_foo') + + # If an identifier and an attribute share the same name, then + # the attribute is essentially hidden. + self.assertNotIn('foo_attribute', model.get_attributes(shape)) + + # Other attributes need to be there, though + self.assertIn('bar', model.get_attributes(shape)) + + # The rest of the tests below ensure the correct order of precedence + # for the various categories of attributes/properties/methods on the + # resource model. + def test_meta_beats_identifier(self): + model = ResourceModel('test', { + 'identifiers': [{'name': 'Meta'}] + }, {}) + + model.load_rename_map() + + self.assertEqual(model.identifiers[0].name, 'meta_identifier') + + def test_load_beats_identifier(self): + model = ResourceModel('test', { + 'identifiers': [{'name': 'Load'}], + 'load': { + 'request': { + 'operation': 'GetFrobs' + } + } + }, {}) + + model.load_rename_map() + + self.assertTrue(model.load) + self.assertEqual(model.identifiers[0].name, 'load_identifier') + + def test_identifier_beats_action(self): + model = ResourceModel('test', { + 'identifiers': [{'name': 'foo'}], + 'actions': { + 'Foo': { + 'request': { + 'operation': 'GetFoo' + } + } + } + }, {}) + + model.load_rename_map() + + self.assertEqual(model.identifiers[0].name, 'foo') + self.assertEqual(model.actions[0].name, 'foo_action') + + def test_action_beats_reference(self): + model = ResourceModel('test', { + 'actions': { + 'Foo': { + 'request': { + 'operation': 'GetFoo' + } + } + }, + 'has': { + 'Foo': { + 'resource': { + 'type': 'Frob', + 'identifiers': [ + {'target':'Id', 'source':'data', + 'path': 'FrobId'} + ] + } + } + } + }, {'Frob': {}}) + + model.load_rename_map() + + self.assertEqual(model.actions[0].name, 'foo') + self.assertEqual(model.references[0].name, 'foo_reference') + + def test_reference_beats_collection(self): + model = ResourceModel('test', { + 'has': { + 'Foo': { + 'resource': { + 'type': 'Frob', + 'identifiers': [ + {'target':'Id', 'source':'data', + 'path': 'FrobId'} + ] + } + } + }, + 'hasMany': { + 'Foo': { + 'resource': { + 'type': 'Frob' + } + } + } + }, {'Frob': {}}) + + model.load_rename_map() + + self.assertEqual(model.references[0].name, 'foo') + self.assertEqual(model.collections[0].name, 'foo_collection') + + def test_collection_beats_waiter(self): + model = ResourceModel('test', { + 'hasMany': { + 'WaitUntilFoo': { + 'resource': { + 'type': 'Frob' + } + } + }, + 'waiters': { + 'Foo': {} + } + }, {'Frob': {}}) + + model.load_rename_map() + + self.assertEqual(model.collections[0].name, 'wait_until_foo') + self.assertEqual(model.waiters[0].name, 'wait_until_foo_waiter') + + def test_waiter_beats_attribute(self): + model = ResourceModel('test', { + 'waiters': { + 'Foo': {} + } + }, {'Frob': {}}) + + shape = DenormalizedStructureBuilder().with_members({ + 'WaitUntilFoo': { + 'type': 'string', + } + }).build_model() + + model.load_rename_map(shape) + + self.assertEqual(model.waiters[0].name, 'wait_until_foo') + self.assertIn('wait_until_foo_attribute', model.get_attributes(shape)) diff --git a/tests/unit/resources/test_params.py b/tests/unit/resources/test_params.py new file mode 100644 index 0000000..c0b58a0 --- /dev/null +++ b/tests/unit/resources/test_params.py @@ -0,0 +1,271 @@ +# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the 'license' file accompanying this file. This file is +# distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + +from boto3.exceptions import ResourceLoadException +from boto3.resources.base import ResourceMeta, ServiceResource +from boto3.resources.model import Request +from boto3.resources.params import create_request_parameters, \ + build_param_structure +from tests import BaseTestCase, mock + +class TestServiceActionParams(BaseTestCase): + def test_service_action_params_identifier(self): + request_model = Request({ + 'operation': 'GetFrobs', + 'params': [ + { + 'target': 'WarehouseUrl', + 'source': 'identifier', + 'name': 'Url' + } + ] + }) + + parent = mock.Mock() + parent.url = 'w-url' + + params = create_request_parameters(parent, request_model) + + self.assertEqual(params['WarehouseUrl'], 'w-url', + 'Parameter not set from resource identifier') + + def test_service_action_params_data_member(self): + request_model = Request({ + 'operation': 'GetFrobs', + 'params': [ + { + 'target': 'WarehouseUrl', + 'source': 'data', + 'path': 'SomeMember' + } + ] + }) + + parent = mock.Mock() + parent.meta = ResourceMeta('test', data={ + 'SomeMember': 'w-url' + }) + + params = create_request_parameters(parent, request_model) + + self.assertEqual(params['WarehouseUrl'], 'w-url', + 'Parameter not set from resource property') + + def test_service_action_params_data_member_missing(self): + request_model = Request({ + 'operation': 'GetFrobs', + 'params': [ + { + 'target': 'WarehouseUrl', + 'source': 'data', + 'path': 'SomeMember' + } + ] + }) + + parent = mock.Mock() + + def load_data(): + parent.meta.data = { + 'SomeMember': 'w-url' + } + + parent.load.side_effect = load_data + parent.meta = ResourceMeta('test') + + params = create_request_parameters(parent, request_model) + + parent.load.assert_called_with() + self.assertEqual(params['WarehouseUrl'], 'w-url', + 'Parameter not set from resource property') + + def test_service_action_params_data_member_missing_no_load(self): + request_model = Request({ + 'operation': 'GetFrobs', + 'params': [ + { + 'target': 'WarehouseUrl', + 'source': 'data', + 'path': 'SomeMember' + } + ] + }) + + # This mock has no ``load`` method. + parent = mock.Mock(spec=ServiceResource) + parent.meta = ResourceMeta('test', data=None) + + with self.assertRaises(ResourceLoadException): + params = create_request_parameters(parent, request_model) + + def test_service_action_params_constants(self): + request_model = Request({ + 'operation': 'GetFrobs', + 'params': [ + { + 'target': 'Param1', + 'source': 'string', + 'value': 'param1' + }, + { + 'target': 'Param2', + 'source': 'integer', + 'value': 123 + }, + { + 'target': 'Param3', + 'source': 'boolean', + 'value': True + } + ] + }) + + params = create_request_parameters(None, request_model) + + self.assertEqual(params['Param1'], 'param1', + 'Parameter not set from string constant') + self.assertEqual(params['Param2'], 123, + 'Parameter not set from integer constant') + self.assertEqual(params['Param3'], True, + 'Parameter not set from boolean constant') + + def test_service_action_params_input(self): + request_model = Request({ + 'operation': 'GetFrobs', + 'params': [ + {'target': 'Param1', 'source': 'input'} + ] + }) + + params = create_request_parameters(None, request_model) + self.assertEqual(params, {}) + + params['param1'] = 'myinput' + params = create_request_parameters(None, request_model, params=params) + self.assertEqual(params, {'param1': 'myinput'}) + + def test_service_action_params_invalid(self): + request_model = Request({ + 'operation': 'GetFrobs', + 'params': [ + { + 'target': 'Param1', + 'source': 'invalid' + } + ] + }) + + with self.assertRaises(NotImplementedError): + create_request_parameters(None, request_model) + + def test_service_action_params_list(self): + request_model = Request({ + 'operation': 'GetFrobs', + 'params': [ + { + 'target': 'WarehouseUrls[0]', + 'source': 'string', + 'value': 'w-url' + } + ] + }) + + params = create_request_parameters(None, request_model) + + self.assertIsInstance(params['WarehouseUrls'], list, + 'Parameter did not create a list') + self.assertEqual(len(params['WarehouseUrls']), 1, + 'Parameter list should only have a single item') + self.assertIn('w-url', params['WarehouseUrls'], + 'Parameter not in expected list') + + def test_service_action_params_reuse(self): + request_model = Request({ + 'operation': 'GetFrobs', + 'params': [ + { + 'target': 'Delete.Objects[].Key', + 'source': 'data', + 'path': 'Key' + } + ] + }) + + item1 = mock.Mock() + item1.meta = ResourceMeta('test', data={ + 'Key': 'item1' + }) + + item2 = mock.Mock() + item2.meta = ResourceMeta('test', data={ + 'Key': 'item2' + }) + + # Here we create params and then re-use it to build up a more + # complex structure over multiple calls. + params = create_request_parameters(item1, request_model) + create_request_parameters(item2, request_model, params=params) + + self.assertEqual(params, { + 'Delete': { + 'Objects': [ + {'Key': 'item1'}, + {'Key': 'item2'} + ] + } + }) + + +class TestStructBuilder(BaseTestCase): + def test_simple_value(self): + params = {} + build_param_structure(params, 'foo', 'bar') + self.assertEqual(params['foo'], 'bar') + + def test_nested_dict(self): + params = {} + build_param_structure(params, 'foo.bar.baz', 123) + self.assertEqual(params['foo']['bar']['baz'], 123) + + def test_nested_list(self): + params = {} + build_param_structure(params, 'foo.bar[0]', 'test') + self.assertEqual(params['foo']['bar'][0], 'test') + + def test_strange_offset(self): + params = {} + build_param_structure(params, 'foo[2]', 'test') + self.assertEqual(params['foo'], [{}, {}, 'test']) + + def test_nested_list_dict(self): + params = {} + build_param_structure(params, 'foo.bar[0].baz', 123) + self.assertEqual(params['foo']['bar'][0]['baz'], 123) + + def test_modify_existing(self): + params = { + 'foo': [ + {'key': 'abc'} + ] + } + build_param_structure(params, 'foo[0].secret', 123) + self.assertEqual(params['foo'][0]['key'], 'abc') + self.assertEqual(params['foo'][0]['secret'], 123) + + def test_append_no_index(self): + params = {} + build_param_structure(params, 'foo[]', 123) + self.assertEqual(params['foo'], [123]) + + build_param_structure(params, 'foo[]', 456) + self.assertEqual(params['foo'], [123, 456]) diff --git a/tests/unit/resources/test_response.py b/tests/unit/resources/test_response.py new file mode 100644 index 0000000..61558fd --- /dev/null +++ b/tests/unit/resources/test_response.py @@ -0,0 +1,447 @@ +# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the 'license' file accompanying this file. This file is +# distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + +from tests import BaseTestCase, mock +from boto3.utils import ServiceContext +from boto3.resources.base import ResourceMeta, ServiceResource +from boto3.resources.model import ResponseResource, Parameter +from boto3.resources.factory import ResourceFactory +from boto3.resources.response import build_identifiers, build_empty_response,\ + RawHandler, ResourceHandler + + +class TestBuildIdentifiers(BaseTestCase): + def test_build_identifier_from_res_path_scalar(self): + identifiers = [Parameter(target='Id', source='response', + path='Container.Frob.Id')] + + parent = mock.Mock() + params = {} + response = { + 'Container': { + 'Frob': { + 'Id': 'response-path' + } + } + } + + values = build_identifiers(identifiers, parent, params, response) + + self.assertEqual(values[0][1], 'response-path', + 'Identifier loaded from responsePath scalar not set') + + def test_build_identifier_from_res_path_list(self): + identifiers = [Parameter(target='Id', source='response', + path='Container.Frobs[].Id')] + + parent = mock.Mock() + params = {} + response = { + 'Container': { + 'Frobs': [ + { + 'Id': 'response-path' + } + ] + } + } + + values = build_identifiers(identifiers, parent, params, response) + + self.assertEqual(values[0][1], ['response-path'], + 'Identifier loaded from responsePath list not set') + + def test_build_identifier_from_parent_identifier(self): + identifiers = [Parameter(target='Id', source='identifier', + name='Id')] + + parent = mock.Mock() + parent.id = 'identifier' + params = {} + response = { + 'Container': { + 'Frobs': [] + } + } + + values = build_identifiers(identifiers, parent, params, response) + + self.assertEqual(values[0][1], 'identifier', + 'Identifier loaded from parent identifier not set') + + def test_build_identifier_from_parent_data_member(self): + identifiers = [Parameter(target='Id', source='data', + path='Member')] + + parent = mock.Mock() + parent.meta = ResourceMeta('test', data={ + 'Member': 'data-member' + }) + params = {} + response = { + 'Container': { + 'Frobs': [] + } + } + + values = build_identifiers(identifiers, parent, params, response) + + self.assertEqual(values[0][1], 'data-member', + 'Identifier loaded from parent data member not set') + + def test_build_identifier_from_req_param(self): + identifiers = [Parameter(target='Id', source='requestParameter', + path='Param')] + + parent = mock.Mock() + params = { + 'Param': 'request-param' + } + response = { + 'Container': { + 'Frobs': [] + } + } + + values = build_identifiers(identifiers, parent, params, response) + + self.assertEqual(values[0][1], 'request-param', + 'Identifier loaded from request parameter not set') + + def test_build_identifier_from_invalid_source_type(self): + identifiers = [Parameter(target='Id', source='invalid')] + + parent = mock.Mock() + params = {} + response = { + 'Container': { + 'Frobs': [] + } + } + + with self.assertRaises(NotImplementedError): + build_identifiers(identifiers, parent, params, response) + + +class TestBuildEmptyResponse(BaseTestCase): + def setUp(self): + super(TestBuildEmptyResponse, self).setUp() + + self.search_path = '' + self.operation_name = 'GetFrobs' + + self.output_shape = mock.Mock() + + operation_model = mock.Mock() + operation_model.output_shape = self.output_shape + + self.service_model = mock.Mock() + self.service_model.operation_model.return_value = operation_model + + def get_response(self): + return build_empty_response(self.search_path, self.operation_name, + self.service_model) + + def test_empty_structure(self): + self.output_shape.type_name = 'structure' + + response = self.get_response() + + self.assertIsInstance(response, dict, + 'Structure should default to empty dictionary') + self.assertFalse(response.items(), + 'Dictionary should be empty') + + def test_empty_list(self): + self.output_shape.type_name = 'list' + + response = self.get_response() + + self.assertIsInstance(response, list, + 'List should default to empty list') + self.assertFalse(len(response), + 'List should be empty') + + def test_empty_map(self): + self.output_shape.type_name = 'map' + + response = self.get_response() + + self.assertIsInstance(response, dict, + 'Map should default to empty dictionary') + self.assertFalse(response.items(), + 'Dictionary should be empty') + + def test_empty_string(self): + self.output_shape.type_name = 'string' + + response = self.get_response() + + self.assertIsNone(response, + 'String should default to None') + + def test_empty_integer(self): + self.output_shape.type_name = 'integer' + + response = self.get_response() + + self.assertIsNone(response, + 'Integer should default to None') + + def test_empty_unkown_returns_none(self): + self.output_shape.type_name = 'invalid' + + response = self.get_response() + + self.assertIsNone(response, + 'Unknown types should default to None') + + def test_path_structure(self): + self.search_path = 'Container.Frob' + + frob = mock.Mock() + frob.type_name = 'integer' + + container = mock.Mock() + container.type_name = 'structure' + container.members = { + 'Frob': frob + } + + self.output_shape.type_name = 'structure' + self.output_shape.members = { + 'Container': container + } + + response = self.get_response() + + self.assertEqual(response, None) + + def test_path_list(self): + self.search_path = 'Container[1].Frob' + + frob = mock.Mock() + frob.type_name = 'integer' + + container = mock.Mock() + container.type_name = 'list' + container.member = frob + + self.output_shape.type_name = 'structure' + self.output_shape.members = { + 'Container': container + } + + response = self.get_response() + + self.assertEqual(response, None) + + def test_path_invalid(self): + self.search_path = 'Container.Invalid' + + container = mock.Mock() + container.type_name = 'invalid' + + self.output_shape.type_name = 'structure' + self.output_shape.members = { + 'Container': container + } + + with self.assertRaises(NotImplementedError): + self.get_response() + + +class TestRawHandler(BaseTestCase): + def test_raw_handler_response(self): + parent = mock.Mock() + params = {} + response = { + 'Id': 'foo' + } + + handler = RawHandler(search_path=None) + parsed_response = handler(parent, params, response) + + self.assertEqual(parsed_response, response, + 'Raw response not passed through unmodified') + + def test_raw_handler_response_path(self): + parent = mock.Mock() + params = {} + frob = { + 'Id': 'foo' + } + response = { + 'Container': { + 'Frob': frob + } + } + + handler = RawHandler(search_path='Container.Frob') + parsed_response = handler(parent, params, response) + + self.assertEqual(parsed_response, frob, + 'Search path not processed correctly') + + +class TestResourceHandler(BaseTestCase): + def setUp(self): + super(TestResourceHandler, self).setUp() + self.identifier_path = '' + self.factory = ResourceFactory(mock.Mock()) + self.resource_defs = { + 'Frob': { + 'shape': 'Frob', + 'identifiers': [ + {'name': 'Id'} + ] + } + } + self.service_model = mock.Mock() + shape = mock.Mock() + shape.members = {} + self.service_model.shape_for.return_value = shape + + frobs = mock.Mock() + frobs.type_name = 'list' + container = mock.Mock() + container.type_name = 'structure' + container.members = { + 'Frobs': frobs + } + self.output_shape = mock.Mock() + self.output_shape.type_name = 'structure' + self.output_shape.members = { + 'Container': container + } + operation_model = mock.Mock() + operation_model.output_shape = self.output_shape + self.service_model.operation_model.return_value = operation_model + + self.parent = mock.Mock() + self.parent.meta = ResourceMeta('test', client=mock.Mock()) + self.params = {} + + def get_resource(self, search_path, response): + request_resource_def = { + 'type': 'Frob', + 'identifiers': [ + {'target': 'Id', 'source': 'response', + 'path': self.identifier_path}, + ] + } + resource_model = ResponseResource( + request_resource_def, self.resource_defs) + + handler = ResourceHandler( + search_path=search_path, factory=self.factory, + resource_model=resource_model, + service_context=ServiceContext( + service_name='myservice', + resource_json_definitions=self.resource_defs, + service_model=self.service_model, + service_waiter_model=None + ), + operation_name='GetFrobs' + ) + return handler(self.parent, self.params, response) + + def test_create_resource_scalar(self): + self.identifier_path = 'Container.Id' + search_path = 'Container' + response = { + 'Container': { + 'Id': 'a-frob', + 'OtherValue': 'other', + } + } + resource = self.get_resource(search_path, response) + + self.assertIsInstance(resource, ServiceResource, + 'No resource instance returned from handler') + + @mock.patch('boto3.resources.response.build_empty_response') + def test_missing_data_scalar_builds_empty_response(self, build_mock): + self.identifier_path = 'Container.Id' + search_path = 'Container' + response = { + 'something': 'irrelevant' + } + + resources = self.get_resource(search_path, response) + + self.assertTrue(build_mock.called, + 'build_empty_response was never called') + self.assertEqual(resources, build_mock.return_value, + 'build_empty_response return value was not returned') + + def test_create_resource_list(self): + self.identifier_path = 'Container.Frobs[].Id' + search_path = 'Container.Frobs[]' + response = { + 'Container': { + 'Frobs': [ + { + 'Id': 'a-frob', + 'OtherValue': 'other', + }, + { + 'Id': 'another-frob', + 'OtherValue': 'foo', + } + ] + } + } + + resources = self.get_resource(search_path, response) + + self.assertIsInstance(resources, list, + 'No list returned from handler') + self.assertEqual(len(resources), 2, + 'Exactly two frobs should be returned') + self.assertIsInstance(resources[0], ServiceResource, + 'List items are not resource instances') + + def test_create_resource_list_no_search_path(self): + self.identifier_path = '[].Id' + search_path = '' + response = [ + { + 'Id': 'a-frob', + 'OtherValue': 'other' + } + ] + + resources = self.get_resource(search_path, response) + + self.assertIsInstance(resources, list, + 'No list returned from handler') + self.assertEqual(len(resources), 1, + 'Exactly one frob should be returned') + self.assertIsInstance(resources[0], ServiceResource, + 'List items are not resource instances') + + @mock.patch('boto3.resources.response.build_empty_response') + def test_missing_data_list_builds_empty_response(self, build_mock): + self.identifier_path = 'Container.Frobs[].Id' + search_path = 'Container.Frobs[]' + response = { + 'something': 'irrelevant' + } + + resources = self.get_resource(search_path, response) + + self.assertTrue(build_mock.called, + 'build_empty_response was never called') + self.assertEqual(resources, build_mock.return_value, + 'build_empty_response return value was not returned') diff --git a/tests/unit/s3/__init__.py b/tests/unit/s3/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/s3/test_inject.py b/tests/unit/s3/test_inject.py new file mode 100644 index 0000000..666a238 --- /dev/null +++ b/tests/unit/s3/test_inject.py @@ -0,0 +1,113 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the 'license' file accompanying this file. This file is +# distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from tests import unittest +import mock + +from botocore.exceptions import ClientError + +from boto3.s3 import inject + + +class TestInjectTransferMethods(unittest.TestCase): + def test_inject_upload_download_file_to_client(self): + class_attributes = {} + inject.inject_s3_transfer_methods(class_attributes=class_attributes) + self.assertIn('upload_file', class_attributes) + self.assertIn('download_file', class_attributes) + + def test_upload_file_proxies_to_transfer_object(self): + with mock.patch('boto3.s3.inject.S3Transfer') as transfer: + inject.upload_file(mock.sentinel.CLIENT, + Filename='filename', + Bucket='bucket', Key='key') + transfer.return_value.upload_file.assert_called_with( + filename='filename', bucket='bucket', key='key', + extra_args=None, callback=None) + + def test_download_file_proxies_to_transfer_object(self): + with mock.patch('boto3.s3.inject.S3Transfer') as transfer: + inject.download_file( + mock.sentinel.CLIENT, + Bucket='bucket', Key='key', + Filename='filename') + transfer.return_value.download_file.assert_called_with( + bucket='bucket', key='key', filename='filename', + extra_args=None, callback=None) + + +class TestBucketLoad(unittest.TestCase): + def setUp(self): + self.client = mock.Mock() + self.resource = mock.Mock() + self.resource.meta.client = self.client + + def test_bucket_load_finds_bucket(self): + self.resource.name = 'MyBucket' + self.client.list_buckets.return_value = { + 'Buckets': [ + {'Name': 'NotMyBucket', 'CreationDate': 1}, + {'Name': self.resource.name, 'CreationDate': 2}, + ], + } + + inject.bucket_load(self.resource) + self.assertEqual( + self.resource.meta.data, + {'Name': self.resource.name, 'CreationDate': 2}) + + def test_bucket_load_raise_error(self): + self.resource.name = 'MyBucket' + self.client.list_buckets.return_value = { + 'Buckets': [ + {'Name': 'NotMyBucket', 'CreationDate': 1}, + {'Name': 'NotMine2', 'CreationDate': 2}, + ], + } + with self.assertRaises(ClientError): + inject.bucket_load(self.resource) + + +class TestBucketTransferMethods(unittest.TestCase): + + def setUp(self): + self.bucket = mock.Mock(name='my_bucket') + + def test_upload_file_proxies_to_meta_client(self): + inject.bucket_upload_file(self.bucket, Filename='foo', Key='key') + self.bucket.meta.client.upload_file.assert_called_with( + Filename='foo', Bucket=self.bucket.name, Key='key', + ExtraArgs=None, Callback=None, Config=None) + + def test_download_file_proxies_to_meta_client(self): + inject.bucket_download_file(self.bucket, Key='key', Filename='foo') + self.bucket.meta.client.download_file.assert_called_with( + Bucket=self.bucket.name, Key='key', Filename='foo', + ExtraArgs=None, Callback=None, Config=None) + + +class TestObjectTransferMethods(unittest.TestCase): + + def setUp(self): + self.obj = mock.Mock(bucket_name='my_bucket', key='my_key') + + def test_upload_file_proxies_to_meta_client(self): + inject.object_upload_file(self.obj, Filename='foo') + self.obj.meta.client.upload_file.assert_called_with( + Filename='foo', Bucket=self.obj.bucket_name, Key=self.obj.key, + ExtraArgs=None, Callback=None, Config=None) + + def test_download_file_proxies_to_meta_client(self): + inject.object_download_file(self.obj, Filename='foo') + self.obj.meta.client.download_file.assert_called_with( + Bucket=self.obj.bucket_name, Key=self.obj.key, Filename='foo', + ExtraArgs=None, Callback=None, Config=None) diff --git a/tests/unit/s3/test_transfer.py b/tests/unit/s3/test_transfer.py new file mode 100644 index 0000000..fb778fd --- /dev/null +++ b/tests/unit/s3/test_transfer.py @@ -0,0 +1,726 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the 'license' file accompanying this file. This file is +# distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +import os +import tempfile +import shutil +import socket +from tests import unittest +from contextlib import closing + +import mock +from botocore.vendored import six +from concurrent import futures + +from boto3.exceptions import RetriesExceededError +from boto3.exceptions import S3UploadFailedError +from boto3.s3.transfer import ReadFileChunk, StreamReaderProgress +from boto3.s3.transfer import S3Transfer +from boto3.s3.transfer import OSUtils, TransferConfig +from boto3.s3.transfer import MultipartDownloader, MultipartUploader +from boto3.s3.transfer import ShutdownQueue +from boto3.s3.transfer import QueueShutdownError +from boto3.s3.transfer import random_file_extension +from boto3.s3.transfer import disable_upload_callbacks, enable_upload_callbacks + + +class InMemoryOSLayer(OSUtils): + def __init__(self, filemap): + self.filemap = filemap + + def get_file_size(self, filename): + return len(self.filemap[filename]) + + def open_file_chunk_reader(self, filename, start_byte, size, callback): + return closing(six.BytesIO(self.filemap[filename])) + + def open(self, filename, mode): + if 'wb' in mode: + fileobj = six.BytesIO() + self.filemap[filename] = fileobj + return closing(fileobj) + else: + return closing(self.filemap[filename]) + + def remove_file(self, filename): + if filename in self.filemap: + del self.filemap[filename] + + def rename_file(self, current_filename, new_filename): + if current_filename in self.filemap: + self.filemap[new_filename] = self.filemap.pop( + current_filename) + + +class SequentialExecutor(object): + def __init__(self, max_workers): + pass + + def __enter__(self): + return self + + def __exit__(self, *args, **kwargs): + pass + + # The real map() interface actually takes *args, but we specifically do + # _not_ use this interface. + def map(self, function, args): + results = [] + for arg in args: + results.append(function(arg)) + return results + + def submit(self, function): + future = futures.Future() + future.set_result(function()) + return future + + +class TestOSUtils(unittest.TestCase): + def setUp(self): + self.tempdir = tempfile.mkdtemp() + + def tearDown(self): + shutil.rmtree(self.tempdir) + + def test_get_file_size(self): + with mock.patch('os.path.getsize') as m: + OSUtils().get_file_size('myfile') + m.assert_called_with('myfile') + + def test_open_file_chunk_reader(self): + with mock.patch('boto3.s3.transfer.ReadFileChunk') as m: + OSUtils().open_file_chunk_reader('myfile', 0, 100, None) + m.from_filename.assert_called_with('myfile', 0, 100, + None, enable_callback=False) + + def test_open_file(self): + fileobj = OSUtils().open(os.path.join(self.tempdir, 'foo'), 'w') + self.assertTrue(hasattr(fileobj, 'write')) + + def test_remove_file_ignores_errors(self): + with mock.patch('os.remove') as remove: + remove.side_effect = OSError('fake error') + OSUtils().remove_file('foo') + remove.assert_called_with('foo') + + def test_remove_file_proxies_remove_file(self): + with mock.patch('os.remove') as remove: + OSUtils().remove_file('foo') + remove.assert_called_with('foo') + + def test_rename_file(self): + with mock.patch('boto3.compat.rename_file') as rename_file: + OSUtils().rename_file('foo', 'newfoo') + rename_file.assert_called_with('foo', 'newfoo') + + +class TestReadFileChunk(unittest.TestCase): + def setUp(self): + self.tempdir = tempfile.mkdtemp() + + def tearDown(self): + shutil.rmtree(self.tempdir) + + def test_read_entire_chunk(self): + filename = os.path.join(self.tempdir, 'foo') + with open(filename, 'wb') as f: + f.write(b'onetwothreefourfivesixseveneightnineten') + chunk = ReadFileChunk.from_filename( + filename, start_byte=0, chunk_size=3) + self.assertEqual(chunk.read(), b'one') + self.assertEqual(chunk.read(), b'') + + def test_read_with_amount_size(self): + filename = os.path.join(self.tempdir, 'foo') + with open(filename, 'wb') as f: + f.write(b'onetwothreefourfivesixseveneightnineten') + chunk = ReadFileChunk.from_filename( + filename, start_byte=11, chunk_size=4) + self.assertEqual(chunk.read(1), b'f') + self.assertEqual(chunk.read(1), b'o') + self.assertEqual(chunk.read(1), b'u') + self.assertEqual(chunk.read(1), b'r') + self.assertEqual(chunk.read(1), b'') + + def test_reset_stream_emulation(self): + filename = os.path.join(self.tempdir, 'foo') + with open(filename, 'wb') as f: + f.write(b'onetwothreefourfivesixseveneightnineten') + chunk = ReadFileChunk.from_filename( + filename, start_byte=11, chunk_size=4) + self.assertEqual(chunk.read(), b'four') + chunk.seek(0) + self.assertEqual(chunk.read(), b'four') + + def test_read_past_end_of_file(self): + filename = os.path.join(self.tempdir, 'foo') + with open(filename, 'wb') as f: + f.write(b'onetwothreefourfivesixseveneightnineten') + chunk = ReadFileChunk.from_filename( + filename, start_byte=36, chunk_size=100000) + self.assertEqual(chunk.read(), b'ten') + self.assertEqual(chunk.read(), b'') + self.assertEqual(len(chunk), 3) + + def test_tell_and_seek(self): + filename = os.path.join(self.tempdir, 'foo') + with open(filename, 'wb') as f: + f.write(b'onetwothreefourfivesixseveneightnineten') + chunk = ReadFileChunk.from_filename( + filename, start_byte=36, chunk_size=100000) + self.assertEqual(chunk.tell(), 0) + self.assertEqual(chunk.read(), b'ten') + self.assertEqual(chunk.tell(), 3) + chunk.seek(0) + self.assertEqual(chunk.tell(), 0) + + def test_callback_is_invoked_on_read(self): + filename = os.path.join(self.tempdir, 'foo') + with open(filename, 'wb') as f: + f.write(b'abc') + amounts_seen = [] + + def callback(amount): + amounts_seen.append(amount) + + chunk = ReadFileChunk.from_filename( + filename, start_byte=0, chunk_size=3, callback=callback) + chunk.read(1) + chunk.read(1) + chunk.read(1) + + self.assertEqual(amounts_seen, [1, 1, 1]) + + def test_callback_can_be_disabled(self): + filename = os.path.join(self.tempdir, 'foo') + with open(filename, 'wb') as f: + f.write(b'abc') + callback_calls = [] + + def callback(amount): + callback_calls.append(amount) + + chunk = ReadFileChunk.from_filename( + filename, start_byte=0, chunk_size=3, callback=callback) + chunk.disable_callback() + # Now reading from the ReadFileChunk should not invoke + # the callback. + chunk.read() + self.assertEqual(callback_calls, []) + + def test_file_chunk_supports_context_manager(self): + filename = os.path.join(self.tempdir, 'foo') + with open(filename, 'wb') as f: + f.write(b'abc') + with ReadFileChunk.from_filename(filename, + start_byte=0, + chunk_size=2) as chunk: + val = chunk.read() + self.assertEqual(val, b'ab') + + def test_iter_is_always_empty(self): + # This tests the workaround for the httplib bug (see + # the source for more info). + filename = os.path.join(self.tempdir, 'foo') + open(filename, 'wb').close() + chunk = ReadFileChunk.from_filename( + filename, start_byte=0, chunk_size=10) + self.assertEqual(list(chunk), []) + + +class TestStreamReaderProgress(unittest.TestCase): + + def test_proxies_to_wrapped_stream(self): + original_stream = six.StringIO('foobarbaz') + wrapped = StreamReaderProgress(original_stream) + self.assertEqual(wrapped.read(), 'foobarbaz') + + def test_callback_invoked(self): + amounts_seen = [] + + def callback(amount): + amounts_seen.append(amount) + + original_stream = six.StringIO('foobarbaz') + wrapped = StreamReaderProgress(original_stream, callback) + self.assertEqual(wrapped.read(), 'foobarbaz') + self.assertEqual(amounts_seen, [9]) + + +class TestMultipartUploader(unittest.TestCase): + def test_multipart_upload_uses_correct_client_calls(self): + client = mock.Mock() + uploader = MultipartUploader( + client, TransferConfig(), + InMemoryOSLayer({'filename': b'foobar'}), SequentialExecutor) + client.create_multipart_upload.return_value = {'UploadId': 'upload_id'} + client.upload_part.return_value = {'ETag': 'first'} + + uploader.upload_file('filename', 'bucket', 'key', None, {}) + + # We need to check both the sequence of calls (create/upload/complete) + # as well as the params passed between the calls, including + # 1. The upload_id was plumbed through + # 2. The collected etags were added to the complete call. + client.create_multipart_upload.assert_called_with( + Bucket='bucket', Key='key') + # Should be two parts. + client.upload_part.assert_called_with( + Body=mock.ANY, Bucket='bucket', + UploadId='upload_id', Key='key', PartNumber=1) + client.complete_multipart_upload.assert_called_with( + MultipartUpload={'Parts': [{'PartNumber': 1, 'ETag': 'first'}]}, + Bucket='bucket', + UploadId='upload_id', + Key='key') + + def test_multipart_upload_injects_proper_kwargs(self): + client = mock.Mock() + uploader = MultipartUploader( + client, TransferConfig(), + InMemoryOSLayer({'filename': b'foobar'}), SequentialExecutor) + client.create_multipart_upload.return_value = {'UploadId': 'upload_id'} + client.upload_part.return_value = {'ETag': 'first'} + + extra_args = { + 'SSECustomerKey': 'fakekey', + 'SSECustomerAlgorithm': 'AES256', + 'StorageClass': 'REDUCED_REDUNDANCY' + } + uploader.upload_file('filename', 'bucket', 'key', None, extra_args) + + client.create_multipart_upload.assert_called_with( + Bucket='bucket', Key='key', + # The initial call should inject all the storage class params. + SSECustomerKey='fakekey', + SSECustomerAlgorithm='AES256', + StorageClass='REDUCED_REDUNDANCY') + client.upload_part.assert_called_with( + Body=mock.ANY, Bucket='bucket', + UploadId='upload_id', Key='key', PartNumber=1, + # We only have to forward certain **extra_args in subsequent + # UploadPart calls. + SSECustomerKey='fakekey', + SSECustomerAlgorithm='AES256', + ) + client.complete_multipart_upload.assert_called_with( + MultipartUpload={'Parts': [{'PartNumber': 1, 'ETag': 'first'}]}, + Bucket='bucket', + UploadId='upload_id', + Key='key') + + def test_multipart_upload_is_aborted_on_error(self): + # If the create_multipart_upload succeeds and any upload_part + # fails, then abort_multipart_upload will be called. + client = mock.Mock() + uploader = MultipartUploader( + client, TransferConfig(), + InMemoryOSLayer({'filename': b'foobar'}), SequentialExecutor) + client.create_multipart_upload.return_value = {'UploadId': 'upload_id'} + client.upload_part.side_effect = Exception( + "Some kind of error occurred.") + + with self.assertRaises(S3UploadFailedError): + uploader.upload_file('filename', 'bucket', 'key', None, {}) + + client.abort_multipart_upload.assert_called_with( + Bucket='bucket', Key='key', UploadId='upload_id') + + +class TestMultipartDownloader(unittest.TestCase): + + maxDiff = None + + def test_multipart_download_uses_correct_client_calls(self): + client = mock.Mock() + response_body = b'foobarbaz' + client.get_object.return_value = {'Body': six.BytesIO(response_body)} + + downloader = MultipartDownloader(client, TransferConfig(), + InMemoryOSLayer({}), + SequentialExecutor) + downloader.download_file('bucket', 'key', 'filename', + len(response_body), {}) + + client.get_object.assert_called_with( + Range='bytes=0-', + Bucket='bucket', + Key='key' + ) + + def test_multipart_download_with_multiple_parts(self): + client = mock.Mock() + response_body = b'foobarbaz' + client.get_object.return_value = {'Body': six.BytesIO(response_body)} + # For testing purposes, we're testing with a multipart threshold + # of 4 bytes and a chunksize of 4 bytes. Given b'foobarbaz', + # this should result in 3 calls. In python slices this would be: + # r[0:4], r[4:8], r[8:9]. But the Range param will be slightly + # different because they use inclusive ranges. + config = TransferConfig(multipart_threshold=4, + multipart_chunksize=4) + + downloader = MultipartDownloader(client, config, + InMemoryOSLayer({}), + SequentialExecutor) + downloader.download_file('bucket', 'key', 'filename', + len(response_body), {}) + + # We're storing these in **extra because the assertEqual + # below is really about verifying we have the correct value + # for the Range param. + extra = {'Bucket': 'bucket', 'Key': 'key'} + self.assertEqual(client.get_object.call_args_list, + # Note these are inclusive ranges. + [mock.call(Range='bytes=0-3', **extra), + mock.call(Range='bytes=4-7', **extra), + mock.call(Range='bytes=8-', **extra)]) + + def test_retry_on_failures_from_stream_reads(self): + # If we get an exception during a call to the response body's .read() + # method, we should retry the request. + client = mock.Mock() + response_body = b'foobarbaz' + stream_with_errors = mock.Mock() + stream_with_errors.read.side_effect = [ + socket.error("fake error"), + response_body + ] + client.get_object.return_value = {'Body': stream_with_errors} + config = TransferConfig(multipart_threshold=4, + multipart_chunksize=4) + + downloader = MultipartDownloader(client, config, + InMemoryOSLayer({}), + SequentialExecutor) + downloader.download_file('bucket', 'key', 'filename', + len(response_body), {}) + + # We're storing these in **extra because the assertEqual + # below is really about verifying we have the correct value + # for the Range param. + extra = {'Bucket': 'bucket', 'Key': 'key'} + self.assertEqual(client.get_object.call_args_list, + # The first call to range=0-3 fails because of the + # side_effect above where we make the .read() raise a + # socket.error. + # The second call to range=0-3 then succeeds. + [mock.call(Range='bytes=0-3', **extra), + mock.call(Range='bytes=0-3', **extra), + mock.call(Range='bytes=4-7', **extra), + mock.call(Range='bytes=8-', **extra)]) + + def test_exception_raised_on_exceeded_retries(self): + client = mock.Mock() + response_body = b'foobarbaz' + stream_with_errors = mock.Mock() + stream_with_errors.read.side_effect = socket.error("fake error") + client.get_object.return_value = {'Body': stream_with_errors} + config = TransferConfig(multipart_threshold=4, + multipart_chunksize=4) + + downloader = MultipartDownloader(client, config, + InMemoryOSLayer({}), + SequentialExecutor) + with self.assertRaises(RetriesExceededError): + downloader.download_file('bucket', 'key', 'filename', + len(response_body), {}) + + def test_io_thread_failure_triggers_shutdown(self): + client = mock.Mock() + response_body = b'foobarbaz' + client.get_object.return_value = {'Body': six.BytesIO(response_body)} + os_layer = mock.Mock() + mock_fileobj = mock.MagicMock() + mock_fileobj.__enter__.return_value = mock_fileobj + mock_fileobj.write.side_effect = Exception("fake IO error") + os_layer.open.return_value = mock_fileobj + + downloader = MultipartDownloader(client, TransferConfig(), + os_layer, SequentialExecutor) + # We're verifying that the exception raised from the IO future + # propogates back up via download_file(). + with self.assertRaisesRegexp(Exception, "fake IO error"): + downloader.download_file('bucket', 'key', 'filename', + len(response_body), {}) + + def test_download_futures_fail_triggers_shutdown(self): + class FailedDownloadParts(SequentialExecutor): + def __init__(self, max_workers): + self.is_first = True + + def submit(self, function): + future = super(FailedDownloadParts, self).submit(function) + if self.is_first: + # This is the download_parts_thread. + future.set_exception( + Exception("fake download parts error")) + self.is_first = False + return future + + client = mock.Mock() + response_body = b'foobarbaz' + client.get_object.return_value = {'Body': six.BytesIO(response_body)} + + downloader = MultipartDownloader(client, TransferConfig(), + InMemoryOSLayer({}), + FailedDownloadParts) + with self.assertRaisesRegexp(Exception, "fake download parts error"): + downloader.download_file('bucket', 'key', 'filename', + len(response_body), {}) + + +class TestS3Transfer(unittest.TestCase): + def setUp(self): + self.client = mock.Mock() + self.random_file_patch = mock.patch( + 'boto3.s3.transfer.random_file_extension') + self.random_file = self.random_file_patch.start() + self.random_file.return_value = 'RANDOM' + + def tearDown(self): + self.random_file_patch.stop() + + def test_callback_handlers_register_on_put_item(self): + osutil = InMemoryOSLayer({'smallfile': b'foobar'}) + transfer = S3Transfer(self.client, osutil=osutil) + transfer.upload_file('smallfile', 'bucket', 'key') + events = self.client.meta.events + events.register_first.assert_called_with( + 'request-created.s3', + disable_upload_callbacks, + unique_id='s3upload-callback-disable', + ) + events.register_last.assert_called_with( + 'request-created.s3', + enable_upload_callbacks, + unique_id='s3upload-callback-enable', + ) + + def test_upload_below_multipart_threshold_uses_put_object(self): + fake_files = { + 'smallfile': b'foobar', + } + osutil = InMemoryOSLayer(fake_files) + transfer = S3Transfer(self.client, osutil=osutil) + transfer.upload_file('smallfile', 'bucket', 'key') + self.client.put_object.assert_called_with( + Bucket='bucket', Key='key', Body=mock.ANY + ) + + def test_extra_args_on_uploaded_passed_to_api_call(self): + extra_args = {'ACL': 'public-read'} + fake_files = { + 'smallfile': b'hello world' + } + osutil = InMemoryOSLayer(fake_files) + transfer = S3Transfer(self.client, osutil=osutil) + transfer.upload_file('smallfile', 'bucket', 'key', + extra_args=extra_args) + self.client.put_object.assert_called_with( + Bucket='bucket', Key='key', Body=mock.ANY, + ACL='public-read' + ) + + def test_uses_multipart_upload_when_over_threshold(self): + with mock.patch('boto3.s3.transfer.MultipartUploader') as uploader: + fake_files = { + 'smallfile': b'foobar', + } + osutil = InMemoryOSLayer(fake_files) + config = TransferConfig(multipart_threshold=2, + multipart_chunksize=2) + transfer = S3Transfer(self.client, osutil=osutil, config=config) + transfer.upload_file('smallfile', 'bucket', 'key') + + uploader.return_value.upload_file.assert_called_with( + 'smallfile', 'bucket', 'key', None, {}) + + def test_uses_multipart_download_when_over_threshold(self): + with mock.patch('boto3.s3.transfer.MultipartDownloader') as downloader: + osutil = InMemoryOSLayer({}) + over_multipart_threshold = 100 * 1024 * 1024 + transfer = S3Transfer(self.client, osutil=osutil) + callback = mock.sentinel.CALLBACK + self.client.head_object.return_value = { + 'ContentLength': over_multipart_threshold, + } + transfer.download_file('bucket', 'key', 'filename', + callback=callback) + + downloader.return_value.download_file.assert_called_with( + # Note how we're downloading to a temorary random file. + 'bucket', 'key', 'filename.RANDOM', over_multipart_threshold, + {}, callback) + + def test_download_file_with_invalid_extra_args(self): + below_threshold = 20 + osutil = InMemoryOSLayer({}) + transfer = S3Transfer(self.client, osutil=osutil) + self.client.head_object.return_value = { + 'ContentLength': below_threshold} + with self.assertRaises(ValueError): + transfer.download_file('bucket', 'key', '/tmp/smallfile', + extra_args={'BadValue': 'foo'}) + + def test_upload_file_with_invalid_extra_args(self): + osutil = InMemoryOSLayer({}) + transfer = S3Transfer(self.client, osutil=osutil) + bad_args = {"WebsiteRedirectLocation": "/foo"} + with self.assertRaises(ValueError): + transfer.upload_file('bucket', 'key', '/tmp/smallfile', + extra_args=bad_args) + + def test_download_file_fowards_extra_args(self): + extra_args = { + 'SSECustomerKey': 'foo', + 'SSECustomerAlgorithm': 'AES256', + } + below_threshold = 20 + osutil = InMemoryOSLayer({'smallfile': b'hello world'}) + transfer = S3Transfer(self.client, osutil=osutil) + self.client.head_object.return_value = { + 'ContentLength': below_threshold} + self.client.get_object.return_value = { + 'Body': six.BytesIO(b'foobar') + } + transfer.download_file('bucket', 'key', '/tmp/smallfile', + extra_args=extra_args) + + # Note that we need to invoke the HeadObject call + # and the PutObject call with the extra_args. + # This is necessary. Trying to HeadObject an SSE object + # will return a 400 if you don't provide the required + # params. + self.client.get_object.assert_called_with( + Bucket='bucket', Key='key', SSECustomerAlgorithm='AES256', + SSECustomerKey='foo') + + def test_get_object_stream_is_retried_and_succeeds(self): + below_threshold = 20 + osutil = InMemoryOSLayer({'smallfile': b'hello world'}) + transfer = S3Transfer(self.client, osutil=osutil) + self.client.head_object.return_value = { + 'ContentLength': below_threshold} + self.client.get_object.side_effect = [ + # First request fails. + socket.error("fake error"), + # Second succeeds. + {'Body': six.BytesIO(b'foobar')} + ] + transfer.download_file('bucket', 'key', '/tmp/smallfile') + + self.assertEqual(self.client.get_object.call_count, 2) + + def test_get_object_stream_uses_all_retries_and_errors_out(self): + below_threshold = 20 + osutil = InMemoryOSLayer({}) + transfer = S3Transfer(self.client, osutil=osutil) + self.client.head_object.return_value = { + 'ContentLength': below_threshold} + # Here we're raising an exception every single time, which + # will exhaust our retry count and propogate a + # RetriesExceededError. + self.client.get_object.side_effect = socket.error("fake error") + with self.assertRaises(RetriesExceededError): + transfer.download_file('bucket', 'key', 'smallfile') + + self.assertEqual(self.client.get_object.call_count, 5) + # We should have also cleaned up the in progress file + # we were downloading to. + self.assertEqual(osutil.filemap, {}) + + def test_download_below_multipart_threshold(self): + below_threshold = 20 + osutil = InMemoryOSLayer({'smallfile': b'hello world'}) + transfer = S3Transfer(self.client, osutil=osutil) + self.client.head_object.return_value = { + 'ContentLength': below_threshold} + self.client.get_object.return_value = { + 'Body': six.BytesIO(b'foobar') + } + transfer.download_file('bucket', 'key', 'smallfile') + + self.client.get_object.assert_called_with(Bucket='bucket', Key='key') + + def test_can_create_with_just_client(self): + transfer = S3Transfer(client=mock.Mock()) + self.assertIsInstance(transfer, S3Transfer) + + +class TestShutdownQueue(unittest.TestCase): + def test_handles_normal_put_get_requests(self): + q = ShutdownQueue() + q.put('foo') + self.assertEqual(q.get(), 'foo') + + def test_put_raises_error_on_shutdown(self): + q = ShutdownQueue() + q.trigger_shutdown() + with self.assertRaises(QueueShutdownError): + q.put('foo') + + +class TestRandomFileExtension(unittest.TestCase): + def test_has_proper_length(self): + self.assertEqual( + len(random_file_extension(num_digits=4)), 4) + + +class TestCallbackHandlers(unittest.TestCase): + def setUp(self): + self.request = mock.Mock() + + def test_disable_request_on_put_object(self): + disable_upload_callbacks(self.request, + 'PutObject') + self.request.body.disable_callback.assert_called_with() + + def test_disable_request_on_upload_part(self): + disable_upload_callbacks(self.request, + 'UploadPart') + self.request.body.disable_callback.assert_called_with() + + def test_enable_object_on_put_object(self): + enable_upload_callbacks(self.request, + 'PutObject') + self.request.body.enable_callback.assert_called_with() + + def test_enable_object_on_upload_part(self): + enable_upload_callbacks(self.request, + 'UploadPart') + self.request.body.enable_callback.assert_called_with() + + def test_dont_disable_if_missing_interface(self): + del self.request.body.disable_callback + disable_upload_callbacks(self.request, + 'PutObject') + self.assertEqual(self.request.body.method_calls, []) + + def test_dont_enable_if_missing_interface(self): + del self.request.body.enable_callback + enable_upload_callbacks(self.request, + 'PutObject') + self.assertEqual(self.request.body.method_calls, []) + + def test_dont_disable_if_wrong_operation(self): + disable_upload_callbacks(self.request, + 'OtherOperation') + self.assertFalse( + self.request.body.disable_callback.called) + + def test_dont_enable_if_wrong_operation(self): + enable_upload_callbacks(self.request, + 'OtherOperation') + self.assertFalse( + self.request.body.enable_callback.called) diff --git a/tests/unit/test_boto3.py b/tests/unit/test_boto3.py new file mode 100644 index 0000000..840e864 --- /dev/null +++ b/tests/unit/test_boto3.py @@ -0,0 +1,107 @@ +# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + +import boto3 + +from tests import mock, unittest + + +class TestBoto3(unittest.TestCase): + def setUp(self): + self.session_patch = mock.patch('boto3.Session', autospec=True) + self.Session = self.session_patch.start() + + def tearDown(self): + boto3.DEFAULT_SESSION = None + self.session_patch.stop() + + def test_create_default_session(self): + session = self.Session.return_value + + boto3.setup_default_session() + + self.assertEqual(boto3.DEFAULT_SESSION, session, + 'Default session not created properly') + + def test_create_default_session_with_args(self): + boto3.setup_default_session( + aws_access_key_id='key', + aws_secret_access_key='secret') + + self.Session.assert_called_with( + aws_access_key_id='key', + aws_secret_access_key='secret') + + @mock.patch('boto3.setup_default_session', + wraps=boto3.setup_default_session) + def test_client_creates_default_session(self, setup_session): + boto3.DEFAULT_SESSION = None + + boto3.client('sqs') + + self.assertTrue(setup_session.called, + 'setup_default_session not called') + self.assertTrue(boto3.DEFAULT_SESSION.client.called, + 'Default session client method not called') + + @mock.patch('boto3.setup_default_session', + wraps=boto3.setup_default_session) + def test_client_uses_existing_session(self, setup_session): + boto3.DEFAULT_SESSION = self.Session() + + boto3.client('sqs') + + self.assertFalse(setup_session.called, + 'setup_default_session should not have been called') + self.assertTrue(boto3.DEFAULT_SESSION.client.called, + 'Default session client method not called') + + def test_client_passes_through_arguments(self): + boto3.DEFAULT_SESSION = self.Session() + + boto3.client('sqs', region_name='us-west-2', verify=False) + + boto3.DEFAULT_SESSION.client.assert_called_with( + 'sqs', region_name='us-west-2', verify=False) + + @mock.patch('boto3.setup_default_session', + wraps=boto3.setup_default_session) + def test_resource_creates_default_session(self, setup_session): + boto3.DEFAULT_SESSION = None + + boto3.resource('sqs') + + self.assertTrue(setup_session.called, + 'setup_default_session not called') + self.assertTrue(boto3.DEFAULT_SESSION.resource.called, + 'Default session resource method not called') + + @mock.patch('boto3.setup_default_session', + wraps=boto3.setup_default_session) + def test_resource_uses_existing_session(self, setup_session): + boto3.DEFAULT_SESSION = self.Session() + + boto3.resource('sqs') + + self.assertFalse(setup_session.called, + 'setup_default_session should not have been called') + self.assertTrue(boto3.DEFAULT_SESSION.resource.called, + 'Default session resource method not called') + + def test_resource_passes_through_arguments(self): + boto3.DEFAULT_SESSION = self.Session() + + boto3.resource('sqs', region_name='us-west-2', verify=False) + + boto3.DEFAULT_SESSION.resource.assert_called_with( + 'sqs', region_name='us-west-2', verify=False) diff --git a/tests/unit/test_session.py b/tests/unit/test_session.py new file mode 100644 index 0000000..604a6e5 --- /dev/null +++ b/tests/unit/test_session.py @@ -0,0 +1,254 @@ +# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + +from botocore import loaders +from botocore.exceptions import DataNotFoundError +from botocore.client import Config + +from boto3 import __version__ +from boto3.exceptions import NoVersionFound +from boto3.session import Session +from tests import mock, BaseTestCase + + +class TestSession(BaseTestCase): + def test_repr(self): + bc_session = self.bc_session_cls.return_value + bc_session.get_credentials.return_value.access_key = 'abc123' + bc_session.get_config_variable.return_value = 'us-west-2' + + session = Session('abc123', region_name='us-west-2') + + self.assertEqual(repr(session), 'Session(region=\'us-west-2\')') + + def test_arguments_not_required(self): + Session() + + self.assertTrue(self.bc_session_cls.called, + 'Botocore session was not created') + + def test_credentials_can_be_set(self): + bc_session = self.bc_session_cls.return_value + + # Set values in constructor + Session(aws_access_key_id='key', + aws_secret_access_key='secret', + aws_session_token='token') + + self.assertTrue(self.bc_session_cls.called, + 'Botocore session was not created') + self.assertTrue(bc_session.set_credentials.called, + 'Botocore session set_credentials not called from constructor') + bc_session.set_credentials.assert_called_with( + 'key', 'secret', 'token') + + def test_profile_can_be_set(self): + bc_session = self.bc_session_cls.return_value + + session = Session(profile_name='foo') + + bc_session.set_config_variable.assert_called_with( + 'profile', 'foo') + bc_session.profile = 'foo' + + # We should also be able to read the value + self.assertEqual(session.profile_name, 'foo') + + def test_profile_default(self): + self.bc_session_cls.return_value.profile = None + + session = Session() + + self.assertEqual(session.profile_name, 'default') + + def test_custom_session(self): + bc_session = self.bc_session_cls() + self.bc_session_cls.reset_mock() + + Session(botocore_session=bc_session) + + # No new session was created + self.assertFalse(self.bc_session_cls.called) + + def test_user_agent(self): + # Here we get the underlying Botocore session, create a Boto 3 + # session, and ensure that the user-agent is modified as expected + bc_session = self.bc_session_cls.return_value + bc_session.user_agent_name = 'Botocore' + bc_session.user_agent_version = '0.68.0' + bc_session.user_agent_extra = '' + + Session(botocore_session=bc_session) + + self.assertEqual(bc_session.user_agent_name, 'Boto3') + self.assertEqual(bc_session.user_agent_version, __version__) + self.assertEqual(bc_session.user_agent_extra, 'Botocore/0.68.0') + + def test_user_agent_extra(self): + # This test is the same as above, but includes custom extra content + # which must still be in the final modified user-agent. + bc_session = self.bc_session_cls.return_value + bc_session.user_agent_name = 'Botocore' + bc_session.user_agent_version = '0.68.0' + bc_session.user_agent_extra = 'foo' + + Session(botocore_session=bc_session) + + self.assertEqual(bc_session.user_agent_extra, 'foo Botocore/0.68.0') + + def test_custom_user_agent(self): + # This test ensures that a customized user-agent is left untouched. + bc_session = self.bc_session_cls.return_value + bc_session.user_agent_name = 'Custom' + bc_session.user_agent_version = '1.0' + bc_session.user_agent_extra = '' + + Session(botocore_session=bc_session) + + self.assertEqual(bc_session.user_agent_name, 'Custom') + self.assertEqual(bc_session.user_agent_version, '1.0') + self.assertEqual(bc_session.user_agent_extra, '') + + def test_get_available_services(self): + bc_session = self.bc_session_cls.return_value + + session = Session() + session.get_available_services() + + self.assertTrue(bc_session.get_available_services.called, + 'Botocore session get_available_services not called') + + def test_get_available_resources(self): + mock_bc_session = mock.Mock() + loader = mock.Mock(spec=loaders.Loader) + loader.list_available_services.return_value = ['foo', 'bar'] + mock_bc_session.get_component.return_value = loader + session = Session(botocore_session=mock_bc_session) + + names = session.get_available_resources() + self.assertEqual(names, ['foo', 'bar']) + + def test_create_client(self): + session = Session(region_name='us-east-1') + client = session.client('sqs', region_name='us-west-2') + + self.assertTrue(client, + 'No low-level client was returned') + + def test_create_client_with_args(self): + bc_session = self.bc_session_cls.return_value + + session = Session(region_name='us-east-1') + session.client('sqs', region_name='us-west-2') + + bc_session.create_client.assert_called_with( + 'sqs', aws_secret_access_key=None, aws_access_key_id=None, + endpoint_url=None, use_ssl=True, aws_session_token=None, + verify=None, region_name='us-west-2', api_version=None, + config=None) + + def test_create_resource_with_args(self): + mock_bc_session = mock.Mock() + loader = mock.Mock(spec=loaders.Loader) + loader.determine_latest_version.return_value = '2014-11-02' + loader.load_service_model.return_value = {'resources': [], 'service': []} + mock_bc_session.get_component.return_value = loader + session = Session(botocore_session=mock_bc_session) + session.resource_factory.load_from_definition = mock.Mock() + session.client = mock.Mock() + + session.resource('sqs', verify=False) + + session.client.assert_called_with( + 'sqs', aws_secret_access_key=None, aws_access_key_id=None, + endpoint_url=None, use_ssl=True, aws_session_token=None, + verify=False, region_name=None, api_version='2014-11-02', + config=mock.ANY) + client_config = session.client.call_args[1]['config'] + self.assertEqual(client_config.user_agent_extra, 'Resource') + self.assertEqual(client_config.signature_version, None) + + def test_create_resource_with_config(self): + mock_bc_session = mock.Mock() + loader = mock.Mock(spec=loaders.Loader) + loader.determine_latest_version.return_value = '2014-11-02' + loader.load_service_model.return_value = {'resources': [], 'service': []} + mock_bc_session.get_component.return_value = loader + session = Session(botocore_session=mock_bc_session) + session.resource_factory.load_from_definition = mock.Mock() + session.client = mock.Mock() + config = Config(signature_version='v4') + + session.resource('sqs', config=config) + + session.client.assert_called_with( + 'sqs', aws_secret_access_key=None, aws_access_key_id=None, + endpoint_url=None, use_ssl=True, aws_session_token=None, + verify=None, region_name=None, api_version='2014-11-02', + config=mock.ANY) + client_config = session.client.call_args[1]['config'] + self.assertEqual(client_config.user_agent_extra, 'Resource') + self.assertEqual(client_config.signature_version, 'v4') + + def test_create_resource_with_config_override_user_agent_extra(self): + mock_bc_session = mock.Mock() + loader = mock.Mock(spec=loaders.Loader) + loader.determine_latest_version.return_value = '2014-11-02' + loader.load_service_model.return_value = {'resources': [], 'service': []} + mock_bc_session.get_component.return_value = loader + session = Session(botocore_session=mock_bc_session) + session.resource_factory.load_from_definition = mock.Mock() + session.client = mock.Mock() + config = Config(signature_version='v4', user_agent_extra='foo') + + session.resource('sqs', config=config) + + session.client.assert_called_with( + 'sqs', aws_secret_access_key=None, aws_access_key_id=None, + endpoint_url=None, use_ssl=True, aws_session_token=None, + verify=None, region_name=None, api_version='2014-11-02', + config=mock.ANY) + client_config = session.client.call_args[1]['config'] + self.assertEqual(client_config.user_agent_extra, 'foo') + self.assertEqual(client_config.signature_version, 'v4') + + def test_create_resource_latest_version(self): + mock_bc_session = mock.Mock() + loader = mock.Mock(spec=loaders.Loader) + loader.determine_latest_version.return_value = '2014-11-02' + loader.load_service_model.return_value = {'resources': [], 'service': []} + mock_bc_session.get_component.return_value = loader + session = Session(botocore_session=mock_bc_session) + session.resource_factory.load_from_definition = mock.Mock() + + session.resource('sqs') + + loader.load_service_model.assert_called_with( + 'sqs', 'resources-1', '2014-11-02') + + def test_bad_resource_name(self): + mock_bc_session = mock.Mock() + loader = mock.Mock(spec=loaders.Loader) + loader.determine_latest_version.side_effect = DataNotFoundError( + data_path='foo') + mock_bc_session.get_component.return_value = loader + + session = Session(botocore_session=mock_bc_session) + with self.assertRaises(DataNotFoundError): + session.resource('sqs') + + def test_can_reach_events(self): + mock_bc_session = self.bc_session_cls() + session = Session(botocore_session=mock_bc_session) + session.events + mock_bc_session.get_component.assert_called_with('event_emitter') diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py new file mode 100644 index 0000000..5072be5 --- /dev/null +++ b/tests/unit/test_utils.py @@ -0,0 +1,59 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +import types +from tests import unittest +import mock + +from boto3 import utils + + +class FakeModule(object): + @staticmethod + def entry_point(**kwargs): + return kwargs + + +class TestUtils(unittest.TestCase): + def test_lazy_call(self): + with mock.patch('boto3.utils.import_module') as importer: + importer.return_value = FakeModule + lazy_function = utils.lazy_call( + 'fakemodule.FakeModule.entry_point') + self.assertEqual(lazy_function(a=1, b=2), {'a': 1, 'b': 2}) + + def test_import_module(self): + module = utils.import_module('boto3.s3.transfer') + self.assertEqual(module.__name__, 'boto3.s3.transfer') + self.assertIsInstance(module, types.ModuleType) + + def test_inject_attributes_with_no_shadowing(self): + class_attributes = {} + utils.inject_attribute(class_attributes, 'foo', 'bar') + self.assertEqual(class_attributes['foo'], 'bar') + + def test_shadowing_existing_var_raises_exception(self): + class_attributes = {'foo': 'preexisting'} + with self.assertRaises(RuntimeError): + utils.inject_attribute(class_attributes, 'foo', 'bar') + + +class TestLazyLoadedWaiterModel(unittest.TestCase): + def test_get_waiter_model_is_lazy(self): + session = mock.Mock() + waiter_model = utils.LazyLoadedWaiterModel( + session, 'myservice', '2014-01-01') + self.assertFalse(session.get_waiter_model.called) + waiter_model.get_waiter('Foo') + self.assertTrue(session.get_waiter_model.called) + session.get_waiter_model.return_value.get_waiter.assert_called_with( + 'Foo') diff --git a/tox.ini b/tox.ini new file mode 100644 index 0000000..046e563 --- /dev/null +++ b/tox.ini @@ -0,0 +1,11 @@ +[tox] +envlist = py26,py27,py33,py34 + +# Comment to build sdist and install into virtualenv +# This is helpful to test installation but takes extra time +skipsdist = True + +[testenv] +commands = + {toxinidir}/scripts/ci/install + {toxinidir}/scripts/ci/run-tests