From 6d72c75e725a9875a8920c6471cd8d0aa43333e7 Mon Sep 17 00:00:00 2001 From: Noah Meyerhans Date: Wed, 3 Nov 2021 10:27:47 -0700 Subject: [PATCH] New upstream version 1.19.9+dfsg --- .changes/1.18.54.json | 17 ++ .changes/1.18.55.json | 32 +++ .changes/1.18.56.json | 22 ++ .changes/1.18.57.json | 32 +++ .changes/1.18.58.json | 32 +++ .changes/1.18.59.json | 27 +++ .changes/1.18.60.json | 22 ++ .changes/1.18.61.json | 27 +++ .changes/1.18.62.json | 22 ++ .changes/1.18.63.json | 12 + .changes/1.18.64.json | 17 ++ .changes/1.18.65.json | 12 + .changes/1.19.0.json | 42 ++++ .changes/1.19.1.json | 7 + .changes/1.19.2.json | 17 ++ .changes/1.19.3.json | 22 ++ .changes/1.19.4.json | 17 ++ .changes/1.19.5.json | 42 ++++ .changes/1.19.6.json | 32 +++ .changes/1.19.7.json | 27 +++ .changes/1.19.8.json | 22 ++ .changes/1.19.9.json | 17 ++ .github/workflows/lint.yml | 19 ++ .github/workflows/run-tests.yml | 3 +- .github/workflows/stale_issue.yml | 2 +- .pre-commit-config.yaml | 12 + CHANGELOG.rst | 205 ++++++++++++++++++ CODE_OF_CONDUCT.md | 1 - CONTRIBUTING.rst | 29 ++- README.rst | 7 +- boto3/__init__.py | 2 +- boto3/compat.py | 2 +- boto3/docs/collection.py | 8 +- boto3/dynamodb/conditions.py | 5 +- boto3/s3/transfer.py | 24 +- boto3/session.py | 12 + docs/source/guide/events.rst | 4 +- scripts/ci/run-crt-tests | 2 +- scripts/new-change | 2 +- setup.cfg | 10 +- setup.py | 2 +- tests/__init__.py | 12 - tests/functional/docs/test_ec2.py | 22 +- tests/functional/docs/test_smoke.py | 15 +- .../dynamodb/test_stubber_conditions.py | 21 +- tests/functional/dynamodb/test_table.py | 2 +- tests/functional/test_s3.py | 2 +- tests/functional/test_smoke.py | 2 + tests/integration/test_dynamodb.py | 12 +- tests/integration/test_s3.py | 13 +- tests/integration/test_session.py | 2 +- tests/unit/docs/test_collection.py | 10 +- tests/unit/dynamodb/test_table.py | 79 ++++--- tests/unit/dynamodb/test_transform.py | 17 +- tests/unit/dynamodb/test_types.py | 30 +-- tests/unit/resources/test_collection.py | 5 +- tests/unit/resources/test_collection_smoke.py | 2 + tests/unit/resources/test_factory.py | 29 ++- tests/unit/resources/test_model.py | 29 ++- tests/unit/resources/test_params.py | 8 +- tests/unit/resources/test_response.py | 7 +- tests/unit/s3/test_inject.py | 1 + tests/unit/s3/test_transfer.py | 21 ++ tests/unit/test_session.py | 19 +- 64 files changed, 1066 insertions(+), 164 deletions(-) create mode 100644 .changes/1.18.54.json create mode 100644 .changes/1.18.55.json create mode 100644 .changes/1.18.56.json create mode 100644 .changes/1.18.57.json create mode 100644 .changes/1.18.58.json create mode 100644 .changes/1.18.59.json create mode 100644 .changes/1.18.60.json create mode 100644 .changes/1.18.61.json create mode 100644 .changes/1.18.62.json create mode 100644 .changes/1.18.63.json create mode 100644 .changes/1.18.64.json create mode 100644 .changes/1.18.65.json create mode 100644 .changes/1.19.0.json create mode 100644 .changes/1.19.1.json create mode 100644 .changes/1.19.2.json create mode 100644 .changes/1.19.3.json create mode 100644 .changes/1.19.4.json create mode 100644 .changes/1.19.5.json create mode 100644 .changes/1.19.6.json create mode 100644 .changes/1.19.7.json create mode 100644 .changes/1.19.8.json create mode 100644 .changes/1.19.9.json create mode 100644 .github/workflows/lint.yml create mode 100644 .pre-commit-config.yaml diff --git a/.changes/1.18.54.json b/.changes/1.18.54.json new file mode 100644 index 0000000..268d3ec --- /dev/null +++ b/.changes/1.18.54.json @@ -0,0 +1,17 @@ +[ + { + "category": "``codebuild``", + "description": "[``botocore``] CodeBuild now allows you to select how batch build statuses are sent to the source provider for a project.", + "type": "api-change" + }, + { + "category": "``efs``", + "description": "[``botocore``] Update efs client to latest version", + "type": "api-change" + }, + { + "category": "``kms``", + "description": "[``botocore``] Added SDK examples for ConnectCustomKeyStore, CreateCustomKeyStore, CreateKey, DeleteCustomKeyStore, DescribeCustomKeyStores, DisconnectCustomKeyStore, GenerateDataKeyPair, GenerateDataKeyPairWithoutPlaintext, GetPublicKey, ReplicateKey, Sign, UpdateCustomKeyStore and Verify APIs", + "type": "api-change" + } +] \ No newline at end of file diff --git a/.changes/1.18.55.json b/.changes/1.18.55.json new file mode 100644 index 0000000..e0b04dc --- /dev/null +++ b/.changes/1.18.55.json @@ -0,0 +1,32 @@ +[ + { + "category": "``workmail``", + "description": "[``botocore``] This release allows customers to change their inbound DMARC settings in Amazon WorkMail.", + "type": "api-change" + }, + { + "category": "``location``", + "description": "[``botocore``] Add support for PositionFiltering.", + "type": "api-change" + }, + { + "category": "``application-autoscaling``", + "description": "[``botocore``] With this release, Application Auto Scaling adds support for Amazon Neptune. Customers can now automatically add or remove Read Replicas of their Neptune clusters to keep the average CPU Utilization at the target value specified by the customers.", + "type": "api-change" + }, + { + "category": "``ec2``", + "description": "[``botocore``] Released Capacity Reservation Fleet, a feature of Amazon EC2 Capacity Reservations, which provides a way to manage reserved capacity across instance types. For more information: https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/cr-fleets.html", + "type": "api-change" + }, + { + "category": "``glue``", + "description": "[``botocore``] This release adds tag as an input of CreateConnection", + "type": "api-change" + }, + { + "category": "``backup``", + "description": "[``botocore``] AWS Backup Audit Manager framework report.", + "type": "api-change" + } +] \ No newline at end of file diff --git a/.changes/1.18.56.json b/.changes/1.18.56.json new file mode 100644 index 0000000..4d1ab44 --- /dev/null +++ b/.changes/1.18.56.json @@ -0,0 +1,22 @@ +[ + { + "category": "``sagemaker``", + "description": "[``botocore``] This release adds a new TrainingInputMode FastFile for SageMaker Training APIs.", + "type": "api-change" + }, + { + "category": "``amplifybackend``", + "description": "[``botocore``] Adding a new field 'AmplifyFeatureFlags' to the response of the GetBackend operation. It will return a stringified version of the cli.json file for the given Amplify project.", + "type": "api-change" + }, + { + "category": "``fsx``", + "description": "[``botocore``] This release adds support for Lustre 2.12 to FSx for Lustre.", + "type": "api-change" + }, + { + "category": "``kendra``", + "description": "[``botocore``] Amazon Kendra now supports integration with AWS SSO", + "type": "api-change" + } +] \ No newline at end of file diff --git a/.changes/1.18.57.json b/.changes/1.18.57.json new file mode 100644 index 0000000..ae84f12 --- /dev/null +++ b/.changes/1.18.57.json @@ -0,0 +1,32 @@ +[ + { + "category": "``kendra``", + "description": "[``botocore``] Amazon Kendra now supports indexing and querying documents in different languages.", + "type": "api-change" + }, + { + "category": "``grafana``", + "description": "[``botocore``] Initial release of the SDK for Amazon Managed Grafana API.", + "type": "api-change" + }, + { + "category": "``firehose``", + "description": "[``botocore``] Allow support for Amazon Opensearch Service(successor to Amazon Elasticsearch Service) as a Kinesis Data Firehose delivery destination.", + "type": "api-change" + }, + { + "category": "``backup``", + "description": "[``botocore``] Launch of AWS Backup Vault Lock, which protects your backups from malicious and accidental actions, works with existing backup policies, and helps you meet compliance requirements.", + "type": "api-change" + }, + { + "category": "``schemas``", + "description": "[``botocore``] Removing unused request/response objects.", + "type": "api-change" + }, + { + "category": "``chime``", + "description": "[``botocore``] This release enables customers to configure Chime MediaCapturePipeline via API.", + "type": "api-change" + } +] \ No newline at end of file diff --git a/.changes/1.18.58.json b/.changes/1.18.58.json new file mode 100644 index 0000000..6ffdfda --- /dev/null +++ b/.changes/1.18.58.json @@ -0,0 +1,32 @@ +[ + { + "category": "``lexv2-runtime``", + "description": "[``botocore``] Update lexv2-runtime client to latest version", + "type": "api-change" + }, + { + "category": "``lexv2-models``", + "description": "[``botocore``] Update lexv2-models client to latest version", + "type": "api-change" + }, + { + "category": "``secretsmanager``", + "description": "[``botocore``] Documentation updates for Secrets Manager", + "type": "api-change" + }, + { + "category": "``securityhub``", + "description": "[``botocore``] Added new resource details objects to ASFF, including resources for WAF rate-based rules, EC2 VPC endpoints, ECR repositories, EKS clusters, X-Ray encryption, and OpenSearch domains. Added additional details for CloudFront distributions, CodeBuild projects, ELB V2 load balancers, and S3 buckets.", + "type": "api-change" + }, + { + "category": "``mediaconvert``", + "description": "[``botocore``] AWS Elemental MediaConvert has added the ability to set account policies which control access restrictions for HTTP, HTTPS, and S3 content sources.", + "type": "api-change" + }, + { + "category": "``ec2``", + "description": "[``botocore``] This release removes a requirement for filters on SearchLocalGatewayRoutes operations.", + "type": "api-change" + } +] \ No newline at end of file diff --git a/.changes/1.18.59.json b/.changes/1.18.59.json new file mode 100644 index 0000000..be745aa --- /dev/null +++ b/.changes/1.18.59.json @@ -0,0 +1,27 @@ +[ + { + "category": "``elbv2``", + "description": "[``botocore``] Update elbv2 client to latest version", + "type": "api-change" + }, + { + "category": "Signing", + "description": "[``botocore``] SigV4QueryAuth and CrtSigV4QueryAuth now properly respect AWSRequest.params while signing boto/botocore`#2521 `__", + "type": "bugfix" + }, + { + "category": "``medialive``", + "description": "[``botocore``] This release adds support for Transport Stream files as an input type to MediaLive encoders.", + "type": "api-change" + }, + { + "category": "``ec2``", + "description": "[``botocore``] Documentation update for Amazon EC2.", + "type": "api-change" + }, + { + "category": "``frauddetector``", + "description": "[``botocore``] New model type: Transaction Fraud Insights, which is optimized for online transaction fraud. Stored Events, which allows customers to send and store data directly within Amazon Fraud Detector. Batch Import, which allows customers to upload a CSV file of historic event data for processing and storage", + "type": "api-change" + } +] \ No newline at end of file diff --git a/.changes/1.18.60.json b/.changes/1.18.60.json new file mode 100644 index 0000000..7d836d7 --- /dev/null +++ b/.changes/1.18.60.json @@ -0,0 +1,22 @@ +[ + { + "category": "``cloudsearch``", + "description": "[``botocore``] Adds an additional validation exception for Amazon CloudSearch configuration APIs for better error handling.", + "type": "api-change" + }, + { + "category": "``ecs``", + "description": "[``botocore``] Documentation only update to address tickets.", + "type": "api-change" + }, + { + "category": "``mediatailor``", + "description": "[``botocore``] MediaTailor now supports ad prefetching.", + "type": "api-change" + }, + { + "category": "``ec2``", + "description": "[``botocore``] EncryptionSupport for InstanceStorageInfo added to DescribeInstanceTypes API", + "type": "api-change" + } +] \ No newline at end of file diff --git a/.changes/1.18.61.json b/.changes/1.18.61.json new file mode 100644 index 0000000..d400b34 --- /dev/null +++ b/.changes/1.18.61.json @@ -0,0 +1,27 @@ +[ + { + "category": "``config``", + "description": "[``botocore``] Adding Config support for AWS::OpenSearch::Domain", + "type": "api-change" + }, + { + "category": "``ec2``", + "description": "[``botocore``] This release adds support for additional VPC Flow Logs delivery options to S3, such as Apache Parquet formatted files, Hourly partitions and Hive-compatible S3 prefixes", + "type": "api-change" + }, + { + "category": "``storagegateway``", + "description": "[``botocore``] Adding support for Audit Logs on NFS shares and Force Closing Files on SMB shares.", + "type": "api-change" + }, + { + "category": "``workmail``", + "description": "[``botocore``] This release adds APIs for adding, removing and retrieving details of mail domains", + "type": "api-change" + }, + { + "category": "``kinesisanalyticsv2``", + "description": "[``botocore``] Support for Apache Flink 1.13 in Kinesis Data Analytics. Changed the required status of some Update properties to better fit the corresponding Create properties.", + "type": "api-change" + } +] \ No newline at end of file diff --git a/.changes/1.18.62.json b/.changes/1.18.62.json new file mode 100644 index 0000000..195dbdb --- /dev/null +++ b/.changes/1.18.62.json @@ -0,0 +1,22 @@ +[ + { + "category": "``elbv2``", + "description": "[``botocore``] Update elbv2 client to latest version", + "type": "api-change" + }, + { + "category": "``autoscaling``", + "description": "[``botocore``] Amazon EC2 Auto Scaling now supports filtering describe Auto Scaling groups API using tags", + "type": "api-change" + }, + { + "category": "``sagemaker``", + "description": "[``botocore``] This release updates the provisioning artifact ID to an optional parameter in CreateProject API. The provisioning artifact ID defaults to the latest provisioning artifact ID of the product if you don't provide one.", + "type": "api-change" + }, + { + "category": "``robomaker``", + "description": "[``botocore``] Adding support to GPU simulation jobs as well as non-ROS simulation jobs.", + "type": "api-change" + } +] \ No newline at end of file diff --git a/.changes/1.18.63.json b/.changes/1.18.63.json new file mode 100644 index 0000000..f0c0003 --- /dev/null +++ b/.changes/1.18.63.json @@ -0,0 +1,12 @@ +[ + { + "category": "``efs``", + "description": "[``botocore``] Update efs client to latest version", + "type": "api-change" + }, + { + "category": "``glue``", + "description": "[``botocore``] Enable S3 event base crawler API.", + "type": "api-change" + } +] \ No newline at end of file diff --git a/.changes/1.18.64.json b/.changes/1.18.64.json new file mode 100644 index 0000000..d1dfd78 --- /dev/null +++ b/.changes/1.18.64.json @@ -0,0 +1,17 @@ +[ + { + "category": "``quicksight``", + "description": "[``botocore``] AWS QuickSight Service Features - Add IP Restriction UI and public APIs support.", + "type": "api-change" + }, + { + "category": "AWSCRT", + "description": "[``botocore``] Upgrade awscrt extra to 0.12.5", + "type": "enchancement" + }, + { + "category": "``ivs``", + "description": "[``botocore``] Bug fix: remove unsupported maxResults and nextToken pagination parameters from ListTagsForResource", + "type": "api-change" + } +] \ No newline at end of file diff --git a/.changes/1.18.65.json b/.changes/1.18.65.json new file mode 100644 index 0000000..e47d1aa --- /dev/null +++ b/.changes/1.18.65.json @@ -0,0 +1,12 @@ +[ + { + "category": "``dataexchange``", + "description": "[``botocore``] This release adds support for our public preview of AWS Data Exchange for Amazon Redshift. This enables data providers to list products including AWS Data Exchange datashares for Amazon Redshift, giving subscribers read-only access to provider data in Amazon Redshift.", + "type": "api-change" + }, + { + "category": "``chime-sdk-messaging``", + "description": "[``botocore``] The Amazon Chime SDK now allows developers to execute business logic on in-flight messages before they are delivered to members of a messaging channel with channel flows.", + "type": "api-change" + } +] \ No newline at end of file diff --git a/.changes/1.19.0.json b/.changes/1.19.0.json new file mode 100644 index 0000000..bf7d017 --- /dev/null +++ b/.changes/1.19.0.json @@ -0,0 +1,42 @@ +[ + { + "category": "``appflow``", + "description": "[``botocore``] Feature to add support for JSON-L format for S3 as a source.", + "type": "api-change" + }, + { + "category": "``mediapackage-vod``", + "description": "[``botocore``] MediaPackage passes through digital video broadcasting (DVB) subtitles into the output.", + "type": "api-change" + }, + { + "category": "``mediaconvert``", + "description": "[``botocore``] AWS Elemental MediaConvert SDK has added support for specifying caption time delta in milliseconds and the ability to apply color range legalization to source content other than AVC video.", + "type": "api-change" + }, + { + "category": "``mediapackage``", + "description": "[``botocore``] When enabled, MediaPackage passes through digital video broadcasting (DVB) subtitles into the output.", + "type": "api-change" + }, + { + "category": "``panorama``", + "description": "[``botocore``] General availability for AWS Panorama. AWS SDK for Panorama includes APIs to manage your devices and nodes, and deploy computer vision applications to the edge. For more information, see the AWS Panorama documentation at http://docs.aws.amazon.com/panorama", + "type": "api-change" + }, + { + "category": "Serialization", + "description": "[``botocore``] rest-json serialization defaults aligned across AWS SDKs", + "type": "feature" + }, + { + "category": "``directconnect``", + "description": "[``botocore``] This release adds 4 new APIS, which needs to be public able", + "type": "api-change" + }, + { + "category": "``securityhub``", + "description": "[``botocore``] Added support for cross-Region finding aggregation, which replicates findings from linked Regions to a single aggregation Region. Added operations to view, enable, update, and delete the finding aggregation.", + "type": "api-change" + } +] \ No newline at end of file diff --git a/.changes/1.19.1.json b/.changes/1.19.1.json new file mode 100644 index 0000000..796d6a0 --- /dev/null +++ b/.changes/1.19.1.json @@ -0,0 +1,7 @@ +[ + { + "category": "``connect``", + "description": "[``botocore``] Released Amazon Connect hours of operation API for general availability (GA). This API also supports AWS CloudFormation. For more information, see Amazon Connect Resource Type Reference in the AWS CloudFormation User Guide.", + "type": "api-change" + } +] \ No newline at end of file diff --git a/.changes/1.19.2.json b/.changes/1.19.2.json new file mode 100644 index 0000000..6e6bb1f --- /dev/null +++ b/.changes/1.19.2.json @@ -0,0 +1,17 @@ +[ + { + "category": "``quicksight``", + "description": "[``botocore``] Added QSearchBar option for GenerateEmbedUrlForRegisteredUser ExperienceConfiguration to support Q search bar embedding", + "type": "api-change" + }, + { + "category": "``auditmanager``", + "description": "[``botocore``] This release introduces character restrictions for ControlSet names. We updated regex patterns for the following attributes: ControlSet, CreateAssessmentFrameworkControlSet, and UpdateAssessmentFrameworkControlSet.", + "type": "api-change" + }, + { + "category": "``chime``", + "description": "[``botocore``] Chime VoiceConnector and VoiceConnectorGroup APIs will now return an ARN.", + "type": "api-change" + } +] \ No newline at end of file diff --git a/.changes/1.19.3.json b/.changes/1.19.3.json new file mode 100644 index 0000000..9b81351 --- /dev/null +++ b/.changes/1.19.3.json @@ -0,0 +1,22 @@ +[ + { + "category": "``rds``", + "description": "[``botocore``] This release adds support for Amazon RDS Custom, which is a new RDS management type that gives you full access to your database and operating system. For more information, see https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/rds-custom.html", + "type": "api-change" + }, + { + "category": "``auditmanager``", + "description": "[``botocore``] This release introduces a new feature for Audit Manager: Custom framework sharing. You can now share your custom frameworks with another AWS account, or replicate them into another AWS Region under your own account.", + "type": "api-change" + }, + { + "category": "``ec2``", + "description": "[``botocore``] This release adds support to create a VPN Connection that is not attached to a Gateway at the time of creation. Use this to create VPNs associated with Core Networks, or modify your VPN and attach a gateway using the modify API after creation.", + "type": "api-change" + }, + { + "category": "``route53resolver``", + "description": "[``botocore``] New API for ResolverConfig, which allows autodefined rules for reverse DNS resolution to be disabled for a VPC", + "type": "api-change" + } +] \ No newline at end of file diff --git a/.changes/1.19.4.json b/.changes/1.19.4.json new file mode 100644 index 0000000..799ec97 --- /dev/null +++ b/.changes/1.19.4.json @@ -0,0 +1,17 @@ +[ + { + "category": "``emr-containers``", + "description": "[``botocore``] This feature enables auto-generation of certificate to secure the managed-endpoint and removes the need for customer provided certificate-arn during managed-endpoint setup.", + "type": "api-change" + }, + { + "category": "``chime-sdk-messaging``", + "description": "[``botocore``] The Amazon Chime SDK now supports push notifications through Amazon Pinpoint", + "type": "api-change" + }, + { + "category": "``chime-sdk-identity``", + "description": "[``botocore``] The Amazon Chime SDK now supports push notifications through Amazon Pinpoint", + "type": "api-change" + } +] \ No newline at end of file diff --git a/.changes/1.19.5.json b/.changes/1.19.5.json new file mode 100644 index 0000000..104f282 --- /dev/null +++ b/.changes/1.19.5.json @@ -0,0 +1,42 @@ +[ + { + "category": "``autoscaling``", + "description": "[``botocore``] This release adds support for attribute-based instance type selection, a new EC2 Auto Scaling feature that lets customers express their instance requirements as a set of attributes, such as vCPU, memory, and storage.", + "type": "api-change" + }, + { + "category": "``ec2``", + "description": "[``botocore``] This release adds: attribute-based instance type selection for EC2 Fleet, Spot Fleet, a feature that lets customers express instance requirements as attributes like vCPU, memory, and storage; and Spot placement score, a feature that helps customers identify an optimal location to run Spot workloads.", + "type": "api-change" + }, + { + "category": "Session", + "description": "Added `get_partition_for_region` to lookup partition for a given region_name", + "type": "enhancement" + }, + { + "category": "``eks``", + "description": "[``botocore``] EKS managed node groups now support BOTTLEROCKET_x86_64 and BOTTLEROCKET_ARM_64 AMI types.", + "type": "api-change" + }, + { + "category": "``sagemaker``", + "description": "[``botocore``] This release allows customers to describe one or more versioned model packages through BatchDescribeModelPackage, update project via UpdateProject, modify and read customer metadata properties using Create, Update and Describe ModelPackage and enables cross account registration of model packages.", + "type": "api-change" + }, + { + "category": "Session", + "description": "[``botocore``] Added `get_partition_for_region` allowing partition lookup by region name.", + "type": "enhancement" + }, + { + "category": "``textract``", + "description": "[``botocore``] This release adds support for asynchronously analyzing invoice and receipt documents through two new APIs: StartExpenseAnalysis and GetExpenseAnalysis", + "type": "api-change" + }, + { + "category": "``s3``", + "description": "TransferConfig now supports the `max_bandwidth` argument.", + "type": "enchancement" + } +] \ No newline at end of file diff --git a/.changes/1.19.6.json b/.changes/1.19.6.json new file mode 100644 index 0000000..2d06de8 --- /dev/null +++ b/.changes/1.19.6.json @@ -0,0 +1,32 @@ +[ + { + "category": "``gamelift``", + "description": "[``botocore``] Added support for Arm-based AWS Graviton2 instances, such as M6g, C6g, and R6g.", + "type": "api-change" + }, + { + "category": "``ecs``", + "description": "[``botocore``] Amazon ECS now supports running Fargate tasks on Windows Operating Systems Families which includes Windows Server 2019 Core and Windows Server 2019 Full.", + "type": "api-change" + }, + { + "category": "``sagemaker``", + "description": "[``botocore``] This release adds support for RStudio on SageMaker.", + "type": "api-change" + }, + { + "category": "``connectparticipant``", + "description": "[``botocore``] This release adds a new boolean attribute - Connect Participant - to the CreateParticipantConnection API, which can be used to mark the participant as connected.", + "type": "api-change" + }, + { + "category": "``ec2``", + "description": "[``botocore``] Added new read-only DenyAllIGWTraffic network interface attribute. Added support for DL1 24xlarge instances powered by Habana Gaudi Accelerators for deep learning model training workloads", + "type": "api-change" + }, + { + "category": "``ssm-incidents``", + "description": "[``botocore``] Updating documentation, adding new field to ConflictException to indicate earliest retry timestamp for some operations, increase maximum length of nextToken fields", + "type": "api-change" + } +] \ No newline at end of file diff --git a/.changes/1.19.7.json b/.changes/1.19.7.json new file mode 100644 index 0000000..9a83c44 --- /dev/null +++ b/.changes/1.19.7.json @@ -0,0 +1,27 @@ +[ + { + "category": "``transcribe``", + "description": "[``botocore``] Transcribe and Transcribe Call Analytics now support automatic language identification along with custom vocabulary, vocabulary filter, custom language model and PII redaction.", + "type": "api-change" + }, + { + "category": "``application-insights``", + "description": "[``botocore``] Added Monitoring support for SQL Server Failover Cluster Instance. Additionally, added a new API to allow one-click monitoring of containers resources.", + "type": "api-change" + }, + { + "category": "``rekognition``", + "description": "[``botocore``] This release added new attributes to Rekognition Video GetCelebrityRecognition API operations.", + "type": "api-change" + }, + { + "category": "``connect``", + "description": "[``botocore``] Amazon Connect Chat now supports real-time message streaming.", + "type": "api-change" + }, + { + "category": "``ec2``", + "description": "[``botocore``] Support added for AMI sharing with organizations and organizational units in ModifyImageAttribute API", + "type": "api-change" + } +] \ No newline at end of file diff --git a/.changes/1.19.8.json b/.changes/1.19.8.json new file mode 100644 index 0000000..6d6b2bc --- /dev/null +++ b/.changes/1.19.8.json @@ -0,0 +1,22 @@ +[ + { + "category": "``rekognition``", + "description": "[``botocore``] This Amazon Rekognition Custom Labels release introduces the management of datasets with projects", + "type": "api-change" + }, + { + "category": "``networkmanager``", + "description": "[``botocore``] This release adds API support to aggregate resources, routes, and telemetry data across a Global Network.", + "type": "api-change" + }, + { + "category": "``lightsail``", + "description": "[``botocore``] This release adds support to enable access logging for buckets in the Lightsail object storage service.", + "type": "api-change" + }, + { + "category": "``neptune``", + "description": "[``botocore``] Adds support for major version upgrades to ModifyDbCluster API", + "type": "api-change" + } +] \ No newline at end of file diff --git a/.changes/1.19.9.json b/.changes/1.19.9.json new file mode 100644 index 0000000..0f6029c --- /dev/null +++ b/.changes/1.19.9.json @@ -0,0 +1,17 @@ +[ + { + "category": "``cloudfront``", + "description": "[``botocore``] CloudFront now supports response headers policies to add HTTP headers to the responses that CloudFront sends to viewers. You can use these policies to add CORS headers, control browser caching, and more, without modifying your origin or writing any code.", + "type": "api-change" + }, + { + "category": "``connect``", + "description": "[``botocore``] Amazon Connect Chat now supports real-time message streaming.", + "type": "api-change" + }, + { + "category": "``nimble``", + "description": "[``botocore``] Amazon Nimble Studio adds support for users to stop and start streaming sessions.", + "type": "api-change" + } +] \ No newline at end of file diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml new file mode 100644 index 0000000..0107268 --- /dev/null +++ b/.github/workflows/lint.yml @@ -0,0 +1,19 @@ +name: Lint code + +on: + push: + pull_request: + branches-ignore: [ master ] + +jobs: + lint: + runs-on: ubuntu-20.04 + + steps: + - uses: actions/checkout@v2 + - name: Set up Python 3.9 + uses: actions/setup-python@v2 + with: + python-version: 3.9 + - name: Run pre-commit + uses: pre-commit/action@v2.0.0 diff --git a/.github/workflows/run-tests.yml b/.github/workflows/run-tests.yml index 371eb3c..3aaeebd 100644 --- a/.github/workflows/run-tests.yml +++ b/.github/workflows/run-tests.yml @@ -1,4 +1,3 @@ - name: Run tests on: @@ -13,7 +12,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: [3.6, 3.7, 3.8, 3.9, 3.10-dev] + python-version: [3.6, 3.7, 3.8, 3.9, "3.10"] os: [ubuntu-latest, macOS-latest, windows-latest ] steps: diff --git a/.github/workflows/stale_issue.yml b/.github/workflows/stale_issue.yml index b8d3600..a2f5d78 100644 --- a/.github/workflows/stale_issue.yml +++ b/.github/workflows/stale_issue.yml @@ -43,7 +43,7 @@ jobs: # threshold of "upvotes", you can set this here. An "upvote" is # the total number of +1, heart, hooray, and rocket reactions # on an issue. - minimum-upvotes-to-exempt: 1 + minimum-upvotes-to-exempt: 2 repo-token: ${{ secrets.GITHUB_TOKEN }} loglevel: DEBUG diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..881c315 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,12 @@ +exclude: ^(.github|.changes|docs/|boto3/compat.py|boto3/data|CHANGELOG.rst) +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v2.3.0 + hooks: + - id: check-yaml + - id: end-of-file-fixer + - id: trailing-whitespace +- repo: https://github.com/pycqa/flake8 + rev: 3.9.2 + hooks: + - id: flake8 diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 00141b4..4138d65 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -2,6 +2,211 @@ CHANGELOG ========= +1.19.9 +====== + +* api-change:``cloudfront``: [``botocore``] CloudFront now supports response headers policies to add HTTP headers to the responses that CloudFront sends to viewers. You can use these policies to add CORS headers, control browser caching, and more, without modifying your origin or writing any code. +* api-change:``connect``: [``botocore``] Amazon Connect Chat now supports real-time message streaming. +* api-change:``nimble``: [``botocore``] Amazon Nimble Studio adds support for users to stop and start streaming sessions. + + +1.19.8 +====== + +* api-change:``rekognition``: [``botocore``] This Amazon Rekognition Custom Labels release introduces the management of datasets with projects +* api-change:``networkmanager``: [``botocore``] This release adds API support to aggregate resources, routes, and telemetry data across a Global Network. +* api-change:``lightsail``: [``botocore``] This release adds support to enable access logging for buckets in the Lightsail object storage service. +* api-change:``neptune``: [``botocore``] Adds support for major version upgrades to ModifyDbCluster API + + +1.19.7 +====== + +* api-change:``transcribe``: [``botocore``] Transcribe and Transcribe Call Analytics now support automatic language identification along with custom vocabulary, vocabulary filter, custom language model and PII redaction. +* api-change:``application-insights``: [``botocore``] Added Monitoring support for SQL Server Failover Cluster Instance. Additionally, added a new API to allow one-click monitoring of containers resources. +* api-change:``rekognition``: [``botocore``] This release added new attributes to Rekognition Video GetCelebrityRecognition API operations. +* api-change:``connect``: [``botocore``] Amazon Connect Chat now supports real-time message streaming. +* api-change:``ec2``: [``botocore``] Support added for AMI sharing with organizations and organizational units in ModifyImageAttribute API + + +1.19.6 +====== + +* api-change:``gamelift``: [``botocore``] Added support for Arm-based AWS Graviton2 instances, such as M6g, C6g, and R6g. +* api-change:``ecs``: [``botocore``] Amazon ECS now supports running Fargate tasks on Windows Operating Systems Families which includes Windows Server 2019 Core and Windows Server 2019 Full. +* api-change:``sagemaker``: [``botocore``] This release adds support for RStudio on SageMaker. +* api-change:``connectparticipant``: [``botocore``] This release adds a new boolean attribute - Connect Participant - to the CreateParticipantConnection API, which can be used to mark the participant as connected. +* api-change:``ec2``: [``botocore``] Added new read-only DenyAllIGWTraffic network interface attribute. Added support for DL1 24xlarge instances powered by Habana Gaudi Accelerators for deep learning model training workloads +* api-change:``ssm-incidents``: [``botocore``] Updating documentation, adding new field to ConflictException to indicate earliest retry timestamp for some operations, increase maximum length of nextToken fields + + +1.19.5 +====== + +* api-change:``autoscaling``: [``botocore``] This release adds support for attribute-based instance type selection, a new EC2 Auto Scaling feature that lets customers express their instance requirements as a set of attributes, such as vCPU, memory, and storage. +* api-change:``ec2``: [``botocore``] This release adds: attribute-based instance type selection for EC2 Fleet, Spot Fleet, a feature that lets customers express instance requirements as attributes like vCPU, memory, and storage; and Spot placement score, a feature that helps customers identify an optimal location to run Spot workloads. +* enhancement:Session: Added `get_partition_for_region` to lookup partition for a given region_name +* api-change:``eks``: [``botocore``] EKS managed node groups now support BOTTLEROCKET_x86_64 and BOTTLEROCKET_ARM_64 AMI types. +* api-change:``sagemaker``: [``botocore``] This release allows customers to describe one or more versioned model packages through BatchDescribeModelPackage, update project via UpdateProject, modify and read customer metadata properties using Create, Update and Describe ModelPackage and enables cross account registration of model packages. +* enhancement:Session: [``botocore``] Added `get_partition_for_region` allowing partition lookup by region name. +* api-change:``textract``: [``botocore``] This release adds support for asynchronously analyzing invoice and receipt documents through two new APIs: StartExpenseAnalysis and GetExpenseAnalysis +* enchancement:``s3``: TransferConfig now supports the `max_bandwidth` argument. + + +1.19.4 +====== + +* api-change:``emr-containers``: [``botocore``] This feature enables auto-generation of certificate to secure the managed-endpoint and removes the need for customer provided certificate-arn during managed-endpoint setup. +* api-change:``chime-sdk-messaging``: [``botocore``] The Amazon Chime SDK now supports push notifications through Amazon Pinpoint +* api-change:``chime-sdk-identity``: [``botocore``] The Amazon Chime SDK now supports push notifications through Amazon Pinpoint + + +1.19.3 +====== + +* api-change:``rds``: [``botocore``] This release adds support for Amazon RDS Custom, which is a new RDS management type that gives you full access to your database and operating system. For more information, see https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/rds-custom.html +* api-change:``auditmanager``: [``botocore``] This release introduces a new feature for Audit Manager: Custom framework sharing. You can now share your custom frameworks with another AWS account, or replicate them into another AWS Region under your own account. +* api-change:``ec2``: [``botocore``] This release adds support to create a VPN Connection that is not attached to a Gateway at the time of creation. Use this to create VPNs associated with Core Networks, or modify your VPN and attach a gateway using the modify API after creation. +* api-change:``route53resolver``: [``botocore``] New API for ResolverConfig, which allows autodefined rules for reverse DNS resolution to be disabled for a VPC + + +1.19.2 +====== + +* api-change:``quicksight``: [``botocore``] Added QSearchBar option for GenerateEmbedUrlForRegisteredUser ExperienceConfiguration to support Q search bar embedding +* api-change:``auditmanager``: [``botocore``] This release introduces character restrictions for ControlSet names. We updated regex patterns for the following attributes: ControlSet, CreateAssessmentFrameworkControlSet, and UpdateAssessmentFrameworkControlSet. +* api-change:``chime``: [``botocore``] Chime VoiceConnector and VoiceConnectorGroup APIs will now return an ARN. + + +1.19.1 +====== + +* api-change:``connect``: [``botocore``] Released Amazon Connect hours of operation API for general availability (GA). This API also supports AWS CloudFormation. For more information, see Amazon Connect Resource Type Reference in the AWS CloudFormation User Guide. + + +1.19.0 +====== + +* api-change:``appflow``: [``botocore``] Feature to add support for JSON-L format for S3 as a source. +* api-change:``mediapackage-vod``: [``botocore``] MediaPackage passes through digital video broadcasting (DVB) subtitles into the output. +* api-change:``mediaconvert``: [``botocore``] AWS Elemental MediaConvert SDK has added support for specifying caption time delta in milliseconds and the ability to apply color range legalization to source content other than AVC video. +* api-change:``mediapackage``: [``botocore``] When enabled, MediaPackage passes through digital video broadcasting (DVB) subtitles into the output. +* api-change:``panorama``: [``botocore``] General availability for AWS Panorama. AWS SDK for Panorama includes APIs to manage your devices and nodes, and deploy computer vision applications to the edge. For more information, see the AWS Panorama documentation at http://docs.aws.amazon.com/panorama +* feature:Serialization: [``botocore``] rest-json serialization defaults aligned across AWS SDKs +* api-change:``directconnect``: [``botocore``] This release adds 4 new APIS, which needs to be public able +* api-change:``securityhub``: [``botocore``] Added support for cross-Region finding aggregation, which replicates findings from linked Regions to a single aggregation Region. Added operations to view, enable, update, and delete the finding aggregation. + + +1.18.65 +======= + +* api-change:``dataexchange``: [``botocore``] This release adds support for our public preview of AWS Data Exchange for Amazon Redshift. This enables data providers to list products including AWS Data Exchange datashares for Amazon Redshift, giving subscribers read-only access to provider data in Amazon Redshift. +* api-change:``chime-sdk-messaging``: [``botocore``] The Amazon Chime SDK now allows developers to execute business logic on in-flight messages before they are delivered to members of a messaging channel with channel flows. + + +1.18.64 +======= + +* api-change:``quicksight``: [``botocore``] AWS QuickSight Service Features - Add IP Restriction UI and public APIs support. +* enchancement:AWSCRT: [``botocore``] Upgrade awscrt extra to 0.12.5 +* api-change:``ivs``: [``botocore``] Bug fix: remove unsupported maxResults and nextToken pagination parameters from ListTagsForResource + + +1.18.63 +======= + +* api-change:``efs``: [``botocore``] Update efs client to latest version +* api-change:``glue``: [``botocore``] Enable S3 event base crawler API. + + +1.18.62 +======= + +* api-change:``elbv2``: [``botocore``] Update elbv2 client to latest version +* api-change:``autoscaling``: [``botocore``] Amazon EC2 Auto Scaling now supports filtering describe Auto Scaling groups API using tags +* api-change:``sagemaker``: [``botocore``] This release updates the provisioning artifact ID to an optional parameter in CreateProject API. The provisioning artifact ID defaults to the latest provisioning artifact ID of the product if you don't provide one. +* api-change:``robomaker``: [``botocore``] Adding support to GPU simulation jobs as well as non-ROS simulation jobs. + + +1.18.61 +======= + +* api-change:``config``: [``botocore``] Adding Config support for AWS::OpenSearch::Domain +* api-change:``ec2``: [``botocore``] This release adds support for additional VPC Flow Logs delivery options to S3, such as Apache Parquet formatted files, Hourly partitions and Hive-compatible S3 prefixes +* api-change:``storagegateway``: [``botocore``] Adding support for Audit Logs on NFS shares and Force Closing Files on SMB shares. +* api-change:``workmail``: [``botocore``] This release adds APIs for adding, removing and retrieving details of mail domains +* api-change:``kinesisanalyticsv2``: [``botocore``] Support for Apache Flink 1.13 in Kinesis Data Analytics. Changed the required status of some Update properties to better fit the corresponding Create properties. + + +1.18.60 +======= + +* api-change:``cloudsearch``: [``botocore``] Adds an additional validation exception for Amazon CloudSearch configuration APIs for better error handling. +* api-change:``ecs``: [``botocore``] Documentation only update to address tickets. +* api-change:``mediatailor``: [``botocore``] MediaTailor now supports ad prefetching. +* api-change:``ec2``: [``botocore``] EncryptionSupport for InstanceStorageInfo added to DescribeInstanceTypes API + + +1.18.59 +======= + +* api-change:``elbv2``: [``botocore``] Update elbv2 client to latest version +* bugfix:Signing: [``botocore``] SigV4QueryAuth and CrtSigV4QueryAuth now properly respect AWSRequest.params while signing boto/botocore`#2521 `__ +* api-change:``medialive``: [``botocore``] This release adds support for Transport Stream files as an input type to MediaLive encoders. +* api-change:``ec2``: [``botocore``] Documentation update for Amazon EC2. +* api-change:``frauddetector``: [``botocore``] New model type: Transaction Fraud Insights, which is optimized for online transaction fraud. Stored Events, which allows customers to send and store data directly within Amazon Fraud Detector. Batch Import, which allows customers to upload a CSV file of historic event data for processing and storage + + +1.18.58 +======= + +* api-change:``lexv2-runtime``: [``botocore``] Update lexv2-runtime client to latest version +* api-change:``lexv2-models``: [``botocore``] Update lexv2-models client to latest version +* api-change:``secretsmanager``: [``botocore``] Documentation updates for Secrets Manager +* api-change:``securityhub``: [``botocore``] Added new resource details objects to ASFF, including resources for WAF rate-based rules, EC2 VPC endpoints, ECR repositories, EKS clusters, X-Ray encryption, and OpenSearch domains. Added additional details for CloudFront distributions, CodeBuild projects, ELB V2 load balancers, and S3 buckets. +* api-change:``mediaconvert``: [``botocore``] AWS Elemental MediaConvert has added the ability to set account policies which control access restrictions for HTTP, HTTPS, and S3 content sources. +* api-change:``ec2``: [``botocore``] This release removes a requirement for filters on SearchLocalGatewayRoutes operations. + + +1.18.57 +======= + +* api-change:``kendra``: [``botocore``] Amazon Kendra now supports indexing and querying documents in different languages. +* api-change:``grafana``: [``botocore``] Initial release of the SDK for Amazon Managed Grafana API. +* api-change:``firehose``: [``botocore``] Allow support for Amazon Opensearch Service(successor to Amazon Elasticsearch Service) as a Kinesis Data Firehose delivery destination. +* api-change:``backup``: [``botocore``] Launch of AWS Backup Vault Lock, which protects your backups from malicious and accidental actions, works with existing backup policies, and helps you meet compliance requirements. +* api-change:``schemas``: [``botocore``] Removing unused request/response objects. +* api-change:``chime``: [``botocore``] This release enables customers to configure Chime MediaCapturePipeline via API. + + +1.18.56 +======= + +* api-change:``sagemaker``: [``botocore``] This release adds a new TrainingInputMode FastFile for SageMaker Training APIs. +* api-change:``amplifybackend``: [``botocore``] Adding a new field 'AmplifyFeatureFlags' to the response of the GetBackend operation. It will return a stringified version of the cli.json file for the given Amplify project. +* api-change:``fsx``: [``botocore``] This release adds support for Lustre 2.12 to FSx for Lustre. +* api-change:``kendra``: [``botocore``] Amazon Kendra now supports integration with AWS SSO + + +1.18.55 +======= + +* api-change:``workmail``: [``botocore``] This release allows customers to change their inbound DMARC settings in Amazon WorkMail. +* api-change:``location``: [``botocore``] Add support for PositionFiltering. +* api-change:``application-autoscaling``: [``botocore``] With this release, Application Auto Scaling adds support for Amazon Neptune. Customers can now automatically add or remove Read Replicas of their Neptune clusters to keep the average CPU Utilization at the target value specified by the customers. +* api-change:``ec2``: [``botocore``] Released Capacity Reservation Fleet, a feature of Amazon EC2 Capacity Reservations, which provides a way to manage reserved capacity across instance types. For more information: https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/cr-fleets.html +* api-change:``glue``: [``botocore``] This release adds tag as an input of CreateConnection +* api-change:``backup``: [``botocore``] AWS Backup Audit Manager framework report. + + +1.18.54 +======= + +* api-change:``codebuild``: [``botocore``] CodeBuild now allows you to select how batch build statuses are sent to the source provider for a project. +* api-change:``efs``: [``botocore``] Update efs client to latest version +* api-change:``kms``: [``botocore``] Added SDK examples for ConnectCustomKeyStore, CreateCustomKeyStore, CreateKey, DeleteCustomKeyStore, DescribeCustomKeyStores, DisconnectCustomKeyStore, GenerateDataKeyPair, GenerateDataKeyPairWithoutPlaintext, GetPublicKey, ReplicateKey, Sign, UpdateCustomKeyStore and Verify APIs + + 1.18.53 ======= diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index 8263783..5b627cf 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -2,4 +2,3 @@ This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct). For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact opensource-codeofconduct@amazon.com with any additional questions or comments. - diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index a7011dc..0f82db8 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -7,7 +7,7 @@ A good pull request: - Is clear. - Works across all supported versions of Python. -- Follows the existing style of the code base (PEP-8). +- Follows the existing style of the code base (see Codestyle section). - Has comments included as needed. - A test case that demonstrates the previous flaw that now passes with @@ -37,3 +37,30 @@ ideal report includes: - If possible, create a pull request with a (failing) test case demonstrating what's wrong. This makes the process for fixing bugs quicker & gets issues resolved sooner. + +Codestyle +--------- +This project uses flake8 to enforce codstyle requirements. We've codified this +process using a tool called `pre-commit `__. pre-commit +allows us to specify a config file with all tools required for code linting, +and surfaces either a git commit hook, or single command, for enforcing these. + +To validate your PR prior to publishing, you can use the following +`installation guide `__ to setup pre-commit. + +If you don't want to use the git commit hook, you can run the below command +to automatically perform the codestyle validation: + +.. code-block:: bash + + $ pre-commit run + +This will automatically perform simple updates (such as white space clean up) +and provide a list of any failing flake8 checks. After these are addressed, +you can commit the changes prior to publishing the PR. +These checks are also included in our CI setup under the "Lint" workflow which +will provide output on Github for anything missed locally. + +See the `flake8` section of the +`setup.cfg `__ for the +currently enforced rules. diff --git a/README.rst b/README.rst index b7ba1ab..d7649b6 100644 --- a/README.rst +++ b/README.rst @@ -49,10 +49,10 @@ Assuming that you have Python and ``virtualenv`` installed, set up your environm $ python -m pip install boto3 - + Using Boto3 ~~~~~~~~~~~~~~ -After installing boto3 +After installing boto3 Next, set up credentials (in e.g. ``~/.aws/credentials``): @@ -68,7 +68,7 @@ Then, set up a default region (in e.g. ``~/.aws/config``): [default] region=us-east-1 - + Other credentials configuration method can be found `here `__ Then, from a Python interpreter: @@ -137,4 +137,3 @@ More Resources * `NOTICE `__ * `Changelog `__ * `License `__ - diff --git a/boto3/__init__.py b/boto3/__init__.py index 5725ff3..c67004d 100644 --- a/boto3/__init__.py +++ b/boto3/__init__.py @@ -18,7 +18,7 @@ from boto3.compat import _warn_deprecated_python __author__ = 'Amazon Web Services' -__version__ = '1.18.53' +__version__ = '1.19.9' # The default Boto3 session; autoloaded when needed. diff --git a/boto3/compat.py b/boto3/compat.py index 099fcde..7b617bb 100644 --- a/boto3/compat.py +++ b/boto3/compat.py @@ -75,7 +75,7 @@ def _warn_deprecated_python(): 'aws-cli-v1/' } deprecated_versions = { - (2,7): py_27_params, + (2, 7): py_27_params, } py_version = sys.version_info[:2] diff --git a/boto3/docs/collection.py b/boto3/docs/collection.py index 3f7a38f..db0fb79 100644 --- a/boto3/docs/collection.py +++ b/boto3/docs/collection.py @@ -185,11 +185,13 @@ def document_collection_method(section, resource_name, action_name, 'method_description': ( 'Creates an iterable of all %s resources ' 'in the collection filtered by kwargs passed to ' - 'method.' % collection_model.resource.type + - 'A %s collection will include all resources by ' + 'method. A %s collection will include all resources by ' 'default if no filters are provided, and extreme ' 'caution should be taken when performing actions ' - 'on all resources.'% collection_model.resource.type), + 'on all resources.' % ( + collection_model.resource.type, + collection_model.resource.type + )), 'example_prefix': '%s_iterator = %s.%s.filter' % ( xform_name(collection_model.resource.type), example_resource_name, collection_model.name), diff --git a/boto3/dynamodb/conditions.py b/boto3/dynamodb/conditions.py index c63bd2a..41850b1 100644 --- a/boto3/dynamodb/conditions.py +++ b/boto3/dynamodb/conditions.py @@ -144,8 +144,9 @@ class ConditionAttributeBase(ConditionBase, AttributeBase): AttributeBase.__init__(self, values[0].name) def __eq__(self, other): - return ConditionBase.__eq__(self, other) and \ - AttributeBase.__eq__(self, other) + return ( + ConditionBase.__eq__(self, other) and AttributeBase.__eq__(self, other) + ) def __ne__(self, other): return not self.__eq__(other) diff --git a/boto3/s3/transfer.py b/boto3/s3/transfer.py index bfebc13..79d4487 100644 --- a/boto3/s3/transfer.py +++ b/boto3/s3/transfer.py @@ -166,14 +166,17 @@ class TransferConfig(S3TransferConfig): 'max_io_queue': 'max_io_queue_size' } - def __init__(self, - multipart_threshold=8 * MB, - max_concurrency=10, - multipart_chunksize=8 * MB, - num_download_attempts=5, - max_io_queue=100, - io_chunksize=256 * KB, - use_threads=True): + def __init__( + self, + multipart_threshold=8 * MB, + max_concurrency=10, + multipart_chunksize=8 * MB, + num_download_attempts=5, + max_io_queue=100, + io_chunksize=256 * KB, + use_threads=True, + max_bandwidth=None, + ): """Configuration object for managed S3 transfers :param multipart_threshold: The transfer size threshold for which @@ -209,6 +212,10 @@ class TransferConfig(S3TransferConfig): :param use_threads: If True, threads will be used when performing S3 transfers. If False, no threads will be used in performing transfers: all logic will be ran in the main thread. + + :param max_bandwidth: The maximum bandwidth that will be consumed + in uploading and downloading file content. The value is an integer + in terms of bytes per second. """ super(TransferConfig, self).__init__( multipart_threshold=multipart_threshold, @@ -217,6 +224,7 @@ class TransferConfig(S3TransferConfig): num_download_attempts=num_download_attempts, max_io_queue_size=max_io_queue, io_chunksize=io_chunksize, + max_bandwidth=max_bandwidth, ) # Some of the argument names are not the same as the inherited # S3TransferConfig so we add aliases so you can still access the diff --git a/boto3/session.py b/boto3/session.py index 49c1703..3a3a654 100644 --- a/boto3/session.py +++ b/boto3/session.py @@ -182,6 +182,18 @@ class Session(object): """ return self._session.get_credentials() + def get_partition_for_region(self, region_name): + """Lists the partition name of a particular region. + + :type region_name: string + :param region_name: Name of the region to list partition for (e.g., + us-east-1). + + :rtype: string + :return: Returns the respective partition name (e.g., aws). + """ + return self._session.get_partition_for_region(region_name) + def client(self, service_name, region_name=None, api_version=None, use_ssl=True, verify=None, endpoint_url=None, aws_access_key_id=None, aws_secret_access_key=None, diff --git a/docs/source/guide/events.rst b/docs/source/guide/events.rst index d630275..d97261c 100644 --- a/docs/source/guide/events.rst +++ b/docs/source/guide/events.rst @@ -214,8 +214,8 @@ customize clients or resources and modify the behavior of method calls. Here is the list of events that users of Boto3 can register handlers to: -* ``'creating-client-class`` -* ``'creating-resource-class`` +* ``'creating-client-class'`` +* ``'creating-resource-class'`` * ``'provide-client-params'`` diff --git a/scripts/ci/run-crt-tests b/scripts/ci/run-crt-tests index 63fa6f8..9ff2f06 100755 --- a/scripts/ci/run-crt-tests +++ b/scripts/ci/run-crt-tests @@ -29,7 +29,7 @@ def run(command): try: - import awscrt + import awscrt # noqa except ImportError: print("MISSING DEPENDENCY: awscrt must be installed to run the crt tests.") sys.exit(1) diff --git a/scripts/new-change b/scripts/new-change index 73bd9ab..4d0cbcd 100755 --- a/scripts/new-change +++ b/scripts/new-change @@ -135,7 +135,7 @@ def replace_issue_references(parsed, repo_name): '`%s `__' % ( match.group(), repo_name, number)) - new_description = re.sub('#\d+', linkify, description) + new_description = re.sub(r'#\d+', linkify, description) parsed['description'] = new_description diff --git a/setup.cfg b/setup.cfg index 7fbd7f0..61c4764 100644 --- a/setup.cfg +++ b/setup.cfg @@ -3,9 +3,17 @@ universal = 0 [metadata] requires_dist = - botocore>=1.21.53,<1.22.0 + botocore>=1.22.9,<1.23.0 jmespath>=0.7.1,<1.0.0 s3transfer>=0.5.0,<0.6.0 [options.extras_require] crt = botocore[crt]>=1.21.0,<2.0a0 + +[flake8] +ignore = E203,E501,W503,W504 +exclude = + docs, + boto3/compat.py, + boto3/data, + .changes diff --git a/setup.py b/setup.py index e110169..fc2a7e7 100644 --- a/setup.py +++ b/setup.py @@ -13,7 +13,7 @@ VERSION_RE = re.compile(r'''__version__ = ['"]([0-9.]+)['"]''') requires = [ - 'botocore>=1.21.53,<1.22.0', + 'botocore>=1.22.9,<1.23.0', 'jmespath>=0.7.1,<1.0.0', 's3transfer>=0.5.0,<0.6.0' ] diff --git a/tests/__init__.py b/tests/__init__.py index 64b56c4..169862a 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -12,24 +12,12 @@ # language governing permissions and limitations under the License. import random -import sys import time -from botocore.compat import six - - import unittest from unittest import mock -# In python 3, order matters when calling assertEqual to -# compare lists and dictionaries with lists. Therefore, -# assertItemsEqual needs to be used but it is renamed to -# assertCountEqual in python 3. -if six.PY2: - unittest.TestCase.assertCountEqual = unittest.TestCase.assertItemsEqual - - def unique_id(name): """ Generate a unique ID that includes the given name, diff --git a/tests/functional/docs/test_ec2.py b/tests/functional/docs/test_ec2.py index 6f5cda8..979b038 100644 --- a/tests/functional/docs/test_ec2.py +++ b/tests/functional/docs/test_ec2.py @@ -19,17 +19,23 @@ from boto3.docs.service import ServiceDocumenter class TestInstanceDeleteTags(BaseDocsFunctionalTests): def setUp(self): self.documenter = ServiceDocumenter( - 'ec2', session=Session(region_name='us-east-1')) + 'ec2', session=Session(region_name='us-east-1') + ) self.generated_contents = self.documenter.document_service() self.generated_contents = self.generated_contents.decode('utf-8') def test_delete_tags_method_is_documented(self): contents = self.get_class_document_block( - 'EC2.Instance', self.generated_contents) + 'EC2.Instance', self.generated_contents + ) method_contents = self.get_method_document_block( - 'delete_tags', contents) - self.assert_contains_lines_in_order([ - 'response = instance.delete_tags(', - 'DryRun=True|False,', - 'Tags=[', - ], method_contents) + 'delete_tags', contents + ) + self.assert_contains_lines_in_order( + [ + 'response = instance.delete_tags(', + 'DryRun=True|False,', + 'Tags=[', + ], + method_contents + ) diff --git a/tests/functional/docs/test_smoke.py b/tests/functional/docs/test_smoke.py index 34483b9..c4ea19e 100644 --- a/tests/functional/docs/test_smoke.py +++ b/tests/functional/docs/test_smoke.py @@ -23,12 +23,13 @@ from boto3.docs.service import ServiceDocumenter def botocore_session(): return botocore.session.get_session() + @pytest.fixture def boto3_session(): return boto3.Session(region_name='us-east-1') + def all_services(): - botocore_session = botocore.session.get_session() session = boto3.Session(region_name='us-east-1') for service_name in session.get_available_services(): yield service_name @@ -52,15 +53,13 @@ def test_documentation( # Check that all of the services have the appropriate title _assert_has_title(generated_docs, client) - # Check that all services have the client documented. _assert_has_client_documentation(generated_docs, service_name, client) - - #If the service has resources, make sure the service resource - #is at least documented. + # If the service has resources, make sure the service resource + # is at least documented. if service_name in available_resources: - + resource = boto3.resource(service_name, 'us-east-1') _assert_has_resource_documentation( generated_docs, service_name, resource @@ -68,8 +67,7 @@ def test_documentation( # If the client can paginate, make sure the paginators are documented. try: - paginator_model = botocore_session.get_paginator_model( - service_name) + paginator_model = botocore_session.get_paginator_model(service_name) _assert_has_paginator_documentation( generated_docs, service_name, client, sorted(paginator_model._paginator_config) @@ -77,7 +75,6 @@ def test_documentation( except DataNotFoundError: pass - # If the client has waiters, make sure the waiters are documented. if client.waiter_names: waiter_model = botocore_session.get_waiter_model(service_name) diff --git a/tests/functional/dynamodb/test_stubber_conditions.py b/tests/functional/dynamodb/test_stubber_conditions.py index b326896..3139c61 100644 --- a/tests/functional/dynamodb/test_stubber_conditions.py +++ b/tests/functional/dynamodb/test_stubber_conditions.py @@ -31,11 +31,15 @@ class TestStubberSupportsFilterExpressions(unittest.TestCase): ) stubber = Stubber(table.meta.client) - stubber.add_response('query', dict(Items=list()), expected_params=dict( + stubber.add_response( + 'query', + dict(Items=list()), + expected_params=dict( TableName='mytable', KeyConditionExpression=key_expr, FilterExpression=filter_expr - )) + ) + ) with stubber: response = table.query(KeyConditionExpression=key_expr, @@ -46,15 +50,20 @@ class TestStubberSupportsFilterExpressions(unittest.TestCase): def test_table_scan_can_be_stubbed_with_expressions(self): table = self.resource.Table('mytable') - filter_expr = Attr('myattr').eq('foo') & ( - Attr('myattr2').lte('buzz') | Attr('myattr2').gte('fizz') + filter_expr = ( + Attr('myattr').eq('foo') & + (Attr('myattr2').lte('buzz') | Attr('myattr2').gte('fizz')) ) stubber = Stubber(table.meta.client) - stubber.add_response('scan', dict(Items=list()), expected_params=dict( + stubber.add_response( + 'scan', + dict(Items=list()), + expected_params=dict( TableName='mytable', FilterExpression=filter_expr - )) + ) + ) with stubber: response = table.scan(FilterExpression=filter_expr) diff --git a/tests/functional/dynamodb/test_table.py b/tests/functional/dynamodb/test_table.py index 5de0972..b96bfe8 100644 --- a/tests/functional/dynamodb/test_table.py +++ b/tests/functional/dynamodb/test_table.py @@ -10,7 +10,7 @@ # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. -from tests import unittest, mock +from tests import unittest import boto3 from botocore.stub import Stubber diff --git a/tests/functional/test_s3.py b/tests/functional/test_s3.py index 0b248cc..a7d8d79 100644 --- a/tests/functional/test_s3.py +++ b/tests/functional/test_s3.py @@ -377,7 +377,7 @@ class TestDownloadFileobj(BaseTransferTest): # If this is a ranged get, ContentRange needs to be returned, # contents needs to be pruned, and Range needs to be an expected param. if end_byte is not None: - contents = full_contents[start_byte:end_byte+1] + contents = full_contents[start_byte:end_byte + 1] part_range = 'bytes=%s-%s' % (start_byte, end_byte_range) content_range = 'bytes=%s-%s/%s' % ( start_byte, end_byte, len(full_contents)) diff --git a/tests/functional/test_smoke.py b/tests/functional/test_smoke.py index 19c13da..12a05ff 100644 --- a/tests/functional/test_smoke.py +++ b/tests/functional/test_smoke.py @@ -17,6 +17,7 @@ import botocore.session boto3_session = None + def create_session(): global boto3_session if boto3_session is None: @@ -28,6 +29,7 @@ def create_session(): return boto3_session + def _all_resources(): session = create_session() for service_name in session.get_available_resources(): diff --git a/tests/integration/test_dynamodb.py b/tests/integration/test_dynamodb.py index df46b55..2453322 100644 --- a/tests/integration/test_dynamodb.py +++ b/tests/integration/test_dynamodb.py @@ -171,16 +171,20 @@ class TestDynamoDBConditions(BaseDynamoDBTest): def test_condition_and(self): r = self.scan( - filter_expression=(Attr('MyHashKey').eq('mykey') & - Attr('MyString').eq('mystring'))) + filter_expression=( + Attr('MyHashKey').eq('mykey') & Attr('MyString').eq('mystring') + ) + ) item = r['Items'][0] self.assertTrue( item['MyHashKey'] == 'mykey' and item['MyString'] == 'mystring') def test_condition_or(self): r = self.scan( - filter_expression=(Attr('MyHashKey').eq('mykey2') | - Attr('MyString').eq('mystring'))) + filter_expression=( + Attr('MyHashKey').eq('mykey2') | Attr('MyString').eq('mystring') + ) + ) item = r['Items'][0] self.assertTrue( item['MyHashKey'] == 'mykey2' or item['MyString'] == 'mystring') diff --git a/tests/integration/test_s3.py b/tests/integration/test_s3.py index 30a5569..fa5191b 100644 --- a/tests/integration/test_s3.py +++ b/tests/integration/test_s3.py @@ -472,6 +472,7 @@ class TestS3Transfers(unittest.TestCase): # twice when using signature version 4. self.amount_seen = 0 lock = threading.Lock() + def progress_callback(amount): with lock: self.amount_seen += amount @@ -593,9 +594,11 @@ class TestS3Transfers(unittest.TestCase): def test_download_file_with_directory_not_exist(self): transfer = self.create_s3_transfer() - self.client.put_object(Bucket=self.bucket_name, - Key='foo.txt', - Body=b'foo') + self.client.put_object( + Bucket=self.bucket_name, + Key='foo.txt', + Body=b'foo' + ) self.addCleanup(self.delete_object, 'foo.txt') download_path = os.path.join(self.files.rootdir, 'a', 'b', 'c', 'downloaded.txt') @@ -667,7 +670,7 @@ class TestS3Transfers(unittest.TestCase): # This is just a sanity check to ensure that the bucket interface work. key = 'bucket.txt' bucket = self.session.resource('s3').Bucket(self.bucket_name) - filename = self.files.create_file_with_size(key, 1024*1024) + filename = self.files.create_file_with_size(key, 1024 * 1024) bucket.upload_file(Filename=filename, Key=key) self.addCleanup(self.delete_object, key) download_path = os.path.join(self.files.rootdir, unique_id('foo')) @@ -678,7 +681,7 @@ class TestS3Transfers(unittest.TestCase): # This is just a sanity check to ensure that the object interface work. key = 'object.txt' obj = self.session.resource('s3').Object(self.bucket_name, key) - filename = self.files.create_file_with_size(key, 1024*1024) + filename = self.files.create_file_with_size(key, 1024 * 1024) obj.upload_file(Filename=filename) self.addCleanup(self.delete_object, key) download_path = os.path.join(self.files.rootdir, unique_id('foo')) diff --git a/tests/integration/test_session.py b/tests/integration/test_session.py index fb43573..445c16f 100644 --- a/tests/integration/test_session.py +++ b/tests/integration/test_session.py @@ -10,7 +10,7 @@ # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. -from tests import unittest, unique_id +from tests import unittest import botocore.session import boto3.session diff --git a/tests/unit/docs/test_collection.py b/tests/unit/docs/test_collection.py index 5ebb82a..353b19e 100644 --- a/tests/unit/docs/test_collection.py +++ b/tests/unit/docs/test_collection.py @@ -22,9 +22,9 @@ class TestCollectionDocumenter(BaseDocsTest): self.assert_contains_lines_in_order([ '.. py:attribute:: samples', ' A collection of Sample resources.' - 'A Sample Collection will include all resources by default, ' - 'and extreme caution should be taken when performing actions ' - 'on all resources.', + 'A Sample Collection will include all resources by default, ' + 'and extreme caution should be taken when performing actions ' + 'on all resources.', ' .. py:method:: all()', (' Creates an iterable of all Sample resources in the ' 'collection.'), @@ -35,7 +35,7 @@ class TestCollectionDocumenter(BaseDocsTest): ' :returns: A list of Sample resources', ' .. py:method:: filter(**kwargs)', (' Creates an iterable of all Sample resources in ' - 'the collection filtered by kwargs passed to method.' + 'the collection filtered by kwargs passed to method. ' 'A Sample collection will include all resources by default ' 'if no filters are provided, and extreme caution should be ' 'taken when performing actions on all resources'), @@ -102,4 +102,4 @@ class TestCollectionDocumenter(BaseDocsTest): ' :rtype: list(:py:class:`myservice.Sample`)', ' :returns: A list of Sample resources', ' ' - ]) \ No newline at end of file + ]) diff --git a/tests/unit/dynamodb/test_table.py b/tests/unit/dynamodb/test_table.py index 6fdb728..d7386a8 100644 --- a/tests/unit/dynamodb/test_table.py +++ b/tests/unit/dynamodb/test_table.py @@ -225,7 +225,6 @@ class BaseTransformationTest(unittest.TestCase): self.assert_batch_write_calls_are([first_batch, second_batch, third_batch]) - def test_repeated_flushing_on_exit(self): # We're going to simulate unprocessed_items # returning multiple unprocessed items across calls. @@ -348,39 +347,63 @@ class BaseTransformationTest(unittest.TestCase): first_batch = { 'RequestItems': { self.table_name: [ - {'PutRequest': { 'Item': { - 'pkey': 'foo1', - 'skey': 'bar1', - 'other': 'other2' - }}}, - {'PutRequest': { 'Item': { - 'pkey': 'foo1', - 'skey': 'bar2', - 'other': 'other3' - }}}, - {'DeleteRequest': {'Key': { - 'pkey': 'foo2', - 'skey': 'bar2', - }}}, - {'DeleteRequest': {'Key': { - 'pkey': 'foo2', - 'skey': 'bar3', - }}}, - {'DeleteRequest': {'Key': { - 'pkey': 'foo3', - 'skey': 'bar3', - }}}, + { + 'PutRequest': { + 'Item': { + 'pkey': 'foo1', + 'skey': 'bar1', + 'other': 'other2' + } + } + }, + { + 'PutRequest': { + 'Item': { + 'pkey': 'foo1', + 'skey': 'bar2', + 'other': 'other3' + } + } + }, + { + 'DeleteRequest': { + 'Key': { + 'pkey': 'foo2', + 'skey': 'bar2', + } + } + }, + { + 'DeleteRequest': { + 'Key': { + 'pkey': 'foo2', + 'skey': 'bar3', + } + } + }, + { + 'DeleteRequest': { + 'Key': { + 'pkey': 'foo3', + 'skey': 'bar3', + } + } + }, ] } } second_batch = { 'RequestItems': { self.table_name: [ - {'PutRequest': { 'Item': { - 'pkey': 'foo1', - 'skey': 'bar1', - 'other': 'other2' - }}}, + { + 'PutRequest': { + 'Item': { + 'pkey': 'foo1', + 'skey': 'bar1', + 'other': 'other2' + } + } + }, ] } } diff --git a/tests/unit/dynamodb/test_transform.py b/tests/unit/dynamodb/test_transform.py index aecbbef..8dec344 100644 --- a/tests/unit/dynamodb/test_transform.py +++ b/tests/unit/dynamodb/test_transform.py @@ -135,9 +135,9 @@ class TestInputOutputTransformer(BaseTransformationTest): transformation=self.transformation, target_shape=self.target_shape) assert input_params == { - 'TransformMe': - {'foo': self.transformed_value}, - 'LeaveAlone': {'foo': self.original_value}} + 'TransformMe': {'foo': self.transformed_value}, + 'LeaveAlone': {'foo': self.original_value} + } def test_transform_list(self): input_params = { @@ -315,10 +315,13 @@ class TestInputOutputTransformer(BaseTransformationTest): transformation=self.transformation, target_shape=self.target_shape) assert input_params == { - 'TargetedWrapperList': [[ - self.transformed_value, self.transformed_value]], - 'UntargetedWrapperList': [[ - self.original_value, self.original_value]]} + 'TargetedWrapperList': [ + [self.transformed_value, self.transformed_value] + ], + 'UntargetedWrapperList': [ + [self.original_value, self.original_value] + ] + } def test_transform_incorrect_type_for_structure(self): input_params = { diff --git a/tests/unit/dynamodb/test_types.py b/tests/unit/dynamodb/test_types.py index d754547..347f270 100644 --- a/tests/unit/dynamodb/test_types.py +++ b/tests/unit/dynamodb/test_types.py @@ -16,8 +16,6 @@ import pytest from tests import unittest -from botocore.compat import six - from boto3.dynamodb.types import Binary, TypeSerializer, TypeDeserializer @@ -97,8 +95,6 @@ class TestSerializer(unittest.TestCase): def test_serialize_bytearray(self): assert self.serializer.serialize(bytearray([1])) == {'B': b'\x01'} - @pytest.mark.skipif(six.PY2, - reason='This is a test when using python3 version of bytes') def test_serialize_bytes(self): assert self.serializer.serialize(b'\x01') == {'B': b'\x01'} @@ -141,9 +137,13 @@ class TestSerializer(unittest.TestCase): def test_serialize_map(self): serialized_value = self.serializer.serialize( - {'foo': 'bar', 'baz': {'biz': 1}}) - assert serialized_value == {'M': - {'foo': {'S': 'bar'}, 'baz': {'M': {'biz': {'N': '1'}}}} + {'foo': 'bar', 'baz': {'biz': 1}} + ) + assert serialized_value == { + 'M': { + 'foo': {'S': 'bar'}, + 'baz': {'M': {'biz': {'N': '1'}}} + } } @@ -179,22 +179,22 @@ class TestDeserializer(unittest.TestCase): def test_deserialize_number_set(self): assert self.deserializer.deserialize( - {'NS': ['1', '1.25']}), set([Decimal('1') == Decimal('1.25')]) + {'NS': ['1', '1.25']}) == set([Decimal('1'), Decimal('1.25')]) def test_deserialize_string_set(self): assert self.deserializer.deserialize( - {'SS': ['foo', 'bar']}) == set(['foo', 'bar']) + {'SS': ['foo', 'bar']}) == set(['foo', 'bar']) def test_deserialize_binary_set(self): - assert self.deserializer.deserialize({'BS': [b'\x00', b'\x01']}) == set( - [Binary(b'\x00'), Binary(b'\x01')]) + assert self.deserializer.deserialize( + {'BS': [b'\x00', b'\x01']}) == set([Binary(b'\x00'), Binary(b'\x01')]) def test_deserialize_list(self): - assert self.deserializer.deserialize({'L': - [{'N': '1'}, {'S': 'foo'}, {'L': [{'N': '1.25'}]}]} + assert self.deserializer.deserialize( + {'L': [{'N': '1'}, {'S': 'foo'}, {'L': [{'N': '1.25'}]}]} ) == [Decimal('1'), 'foo', [Decimal('1.25')]] def test_deserialize_map(self): - assert self.deserializer.deserialize({'M': {'foo': - {'S': 'mystring'}, 'bar': {'M': {'baz': {'N': '1'}}}}} + assert self.deserializer.deserialize( + {'M': {'foo': {'S': 'mystring'}, 'bar': {'M': {'baz': {'N': '1'}}}}} ) == {'foo': 'mystring', 'bar': {'baz': Decimal('1')}} diff --git a/tests/unit/resources/test_collection.py b/tests/unit/resources/test_collection.py index 83e48a5..384989d 100644 --- a/tests/unit/resources/test_collection.py +++ b/tests/unit/resources/test_collection.py @@ -16,8 +16,9 @@ from botocore.hooks import HierarchicalEmitter from botocore.model import ServiceModel from boto3.utils import ServiceContext -from boto3.resources.collection import CollectionFactory, CollectionManager, \ - ResourceCollection +from boto3.resources.collection import ( + CollectionFactory, CollectionManager, ResourceCollection +) from boto3.resources.base import ResourceMeta from boto3.resources.factory import ResourceFactory from boto3.resources.model import Collection diff --git a/tests/unit/resources/test_collection_smoke.py b/tests/unit/resources/test_collection_smoke.py index fc1a5d6..798a2bd 100644 --- a/tests/unit/resources/test_collection_smoke.py +++ b/tests/unit/resources/test_collection_smoke.py @@ -87,6 +87,7 @@ def _collection_test_args(): for collection_model in resource_model.collections: yield (client, service_name, resource_name, collection_model) + @pytest.mark.parametrize( 'collection_args', _collection_test_args() @@ -100,6 +101,7 @@ def test_all_collections_have_paginators_if_needed(collection_args): # should be a paginator applied to it. _assert_collection_has_paginator_if_needed(*collection_args) + def _assert_collection_has_paginator_if_needed( client, service_name, resource_name, collection_model ): diff --git a/tests/unit/resources/test_factory.py b/tests/unit/resources/test_factory.py index 5eb25c3..0a675be 100644 --- a/tests/unit/resources/test_factory.py +++ b/tests/unit/resources/test_factory.py @@ -20,7 +20,6 @@ from boto3.utils import ServiceContext from boto3.resources.base import ServiceResource from boto3.resources.collection import CollectionManager from boto3.resources.factory import ResourceFactory -from boto3.resources.action import WaiterAction class BaseTestResourceFactory(BaseTestCase): @@ -35,13 +34,13 @@ class BaseTestResourceFactory(BaseTestCase): resource_json_definition = {} if resource_json_definitions is None: resource_json_definitions = {} - service_context=ServiceContext( + service_context = ServiceContext( service_name='test', resource_json_definitions=resource_json_definitions, service_model=service_model, service_waiter_model=None ) - + return self.factory.load_from_definition( resource_name=resource_name, single_resource_json_definition=resource_json_definition, @@ -573,10 +572,14 @@ class TestResourceFactory(BaseTestResourceFactory): model = { "waiters": { "Exists": { - "waiterName": "BucketExists", - "params": [ - {"target": "Bucket", "source": "identifier", - "name": "Name"}] + "waiterName": "BucketExists", + "params": [ + { + "target": "Bucket", + "source": "identifier", + "name": "Name" + } + ] } } } @@ -595,10 +598,14 @@ class TestResourceFactory(BaseTestResourceFactory): model = { "waiters": { "Exists": { - "waiterName": "BucketExists", - "params": [ - {"target": "Bucket", "source": "identifier", - "name": "Name"}] + "waiterName": "BucketExists", + "params": [ + { + "target": "Bucket", + "source": "identifier", + "name": "Name" + } + ] } } } diff --git a/tests/unit/resources/test_model.py b/tests/unit/resources/test_model.py index de98860..c49f29f 100644 --- a/tests/unit/resources/test_model.py +++ b/tests/unit/resources/test_model.py @@ -169,8 +169,11 @@ class TestModels(BaseTestCase): 'resource': { 'type': 'Frob', 'identifiers': [ - {'target':'Id', 'source':'data', - 'path':'FrobId'} + { + 'target': 'Id', + 'source': 'data', + 'path': 'FrobId' + } ] } } @@ -237,6 +240,7 @@ class TestModels(BaseTestCase): assert waiter.waiter_name == 'ObjectExists' assert waiter.params[0].target == 'Bucket' + class TestRenaming(BaseTestCase): def test_multiple(self): # This tests a bunch of different renames working together @@ -250,8 +254,11 @@ class TestRenaming(BaseTestCase): 'resource': { 'type': 'Frob', 'identifiers': [ - {'target':'Id', 'source':'data', - 'path': 'FrobId'} + { + 'target': 'Id', + 'source': 'data', + 'path': 'FrobId' + } ] } } @@ -348,8 +355,11 @@ class TestRenaming(BaseTestCase): 'resource': { 'type': 'Frob', 'identifiers': [ - {'target':'Id', 'source':'data', - 'path': 'FrobId'} + { + 'target': 'Id', + 'source': 'data', + 'path': 'FrobId' + } ] } } @@ -368,8 +378,11 @@ class TestRenaming(BaseTestCase): 'resource': { 'type': 'Frob', 'identifiers': [ - {'target':'Id', 'source':'data', - 'path': 'FrobId'} + { + 'target': 'Id', + 'source': 'data', + 'path': 'FrobId' + } ] } } diff --git a/tests/unit/resources/test_params.py b/tests/unit/resources/test_params.py index a78136b..84b43fa 100644 --- a/tests/unit/resources/test_params.py +++ b/tests/unit/resources/test_params.py @@ -15,10 +15,12 @@ import pytest from boto3.exceptions import ResourceLoadException from boto3.resources.base import ResourceMeta, ServiceResource from boto3.resources.model import Request -from boto3.resources.params import create_request_parameters, \ - build_param_structure +from boto3.resources.params import ( + create_request_parameters, build_param_structure +) from tests import BaseTestCase, mock + class TestServiceActionParams(BaseTestCase): def test_service_action_params_identifier(self): request_model = Request({ @@ -104,7 +106,7 @@ class TestServiceActionParams(BaseTestCase): parent.meta = ResourceMeta('test', data=None) with pytest.raises(ResourceLoadException): - params = create_request_parameters(parent, request_model) + create_request_parameters(parent, request_model) def test_service_action_params_constants(self): request_model = Request({ diff --git a/tests/unit/resources/test_response.py b/tests/unit/resources/test_response.py index 728a572..740f241 100644 --- a/tests/unit/resources/test_response.py +++ b/tests/unit/resources/test_response.py @@ -17,8 +17,9 @@ from boto3.utils import ServiceContext from boto3.resources.base import ResourceMeta, ServiceResource from boto3.resources.model import ResponseResource, Parameter from boto3.resources.factory import ResourceFactory -from boto3.resources.response import build_identifiers, build_empty_response,\ - RawHandler, ResourceHandler +from boto3.resources.response import ( + build_identifiers, build_empty_response, RawHandler, ResourceHandler +) class TestBuildIdentifiers(BaseTestCase): @@ -216,7 +217,6 @@ class TestBuildEmptyResponse(BaseTestCase): response = self.get_response() assert response is None - def test_path_list(self): self.search_path = 'Container[1].Frob' @@ -235,7 +235,6 @@ class TestBuildEmptyResponse(BaseTestCase): response = self.get_response() assert response is None - def test_path_invalid(self): self.search_path = 'Container.Invalid' diff --git a/tests/unit/s3/test_inject.py b/tests/unit/s3/test_inject.py index 6d44517..f167832 100644 --- a/tests/unit/s3/test_inject.py +++ b/tests/unit/s3/test_inject.py @@ -100,6 +100,7 @@ class TestBucketLoad(unittest.TestCase): with pytest.raises(ClientError): inject.bucket_load(self.resource) + class TestBucketTransferMethods(unittest.TestCase): def setUp(self): diff --git a/tests/unit/s3/test_transfer.py b/tests/unit/s3/test_transfer.py index 71ea4a3..9848cf2 100644 --- a/tests/unit/s3/test_transfer.py +++ b/tests/unit/s3/test_transfer.py @@ -23,6 +23,7 @@ from boto3.s3.transfer import create_transfer_manager from boto3.s3.transfer import S3Transfer from boto3.s3.transfer import OSUtils, TransferConfig, ProgressCallbackInvoker from boto3.s3.transfer import ClientError, S3TransferRetriesExceededError +from boto3.s3.transfer import KB, MB class TestCreateTransferManager(unittest.TestCase): @@ -83,6 +84,26 @@ class TestTransferConfig(unittest.TestCase): self.assert_value_of_actual_and_alias( config, 'max_io_queue_size', 'max_io_queue', new_value) + def test_transferconfig_parameters(self): + config = TransferConfig( + multipart_threshold=8 * MB, + max_concurrency=10, + multipart_chunksize=8 * MB, + num_download_attempts=5, + max_io_queue=100, + io_chunksize=256 * KB, + use_threads=True, + max_bandwidth=1024 * KB, + ) + assert config.multipart_threshold == 8 * MB + assert config.multipart_chunksize == 8 * MB + assert config.max_request_concurrency == 10 + assert config.num_download_attempts == 5 + assert config.max_io_queue_size == 100 + assert config.io_chunksize == 256 * KB + assert config.use_threads is True + assert config.max_bandwidth == 1024 * KB + class TestProgressCallbackInvoker(unittest.TestCase): def test_on_progress(self): diff --git a/tests/unit/test_session.py b/tests/unit/test_session.py index c471f78..9430115 100644 --- a/tests/unit/test_session.py +++ b/tests/unit/test_session.py @@ -13,11 +13,11 @@ import pytest from botocore import loaders -from botocore.exceptions import DataNotFoundError, UnknownServiceError +from botocore.exceptions import UnknownServiceError from botocore.client import Config from boto3 import __version__ -from boto3.exceptions import NoVersionFound, ResourceNotExistsError +from boto3.exceptions import ResourceNotExistsError from boto3.session import Session from tests import mock, BaseTestCase @@ -116,7 +116,7 @@ class TestSession(BaseTestCase): def test_available_profiles(self): bc_session = mock.Mock() - bc_session.available_profiles.return_value = ['foo','bar'] + bc_session.available_profiles.return_value = ['foo', 'bar'] session = Session(botocore_session=bc_session) profiles = session.available_profiles assert len(profiles.return_value) == 2 @@ -207,6 +207,17 @@ class TestSession(BaseTestCase): ) assert partitions == ['foo'] + def test_get_partition_for_region(self): + bc_session = mock.Mock() + bc_session.get_partition_for_region.return_value = 'baz' + session = Session(botocore_session=bc_session) + + partition = session.get_partition_for_region('foo-bar-1') + bc_session.get_partition_for_region.assert_called_with( + 'foo-bar-1' + ) + assert partition == 'baz' + def test_create_client(self): session = Session(region_name='us-east-1') client = session.client('sqs', region_name='us-west-2') @@ -245,7 +256,7 @@ class TestSession(BaseTestCase): config=mock.ANY) client_config = session.client.call_args[1]['config'] assert client_config.user_agent_extra == 'Resource' - assert client_config.signature_version == None + assert client_config.signature_version is None def test_create_resource_with_config(self): mock_bc_session = mock.Mock()