New upstream version 1.19.9+dfsg

This commit is contained in:
Noah Meyerhans 2021-11-03 10:27:47 -07:00
parent 3056914cc0
commit 6d72c75e72
64 changed files with 1066 additions and 164 deletions

17
.changes/1.18.54.json Normal file
View file

@ -0,0 +1,17 @@
[
{
"category": "``codebuild``",
"description": "[``botocore``] CodeBuild now allows you to select how batch build statuses are sent to the source provider for a project.",
"type": "api-change"
},
{
"category": "``efs``",
"description": "[``botocore``] Update efs client to latest version",
"type": "api-change"
},
{
"category": "``kms``",
"description": "[``botocore``] Added SDK examples for ConnectCustomKeyStore, CreateCustomKeyStore, CreateKey, DeleteCustomKeyStore, DescribeCustomKeyStores, DisconnectCustomKeyStore, GenerateDataKeyPair, GenerateDataKeyPairWithoutPlaintext, GetPublicKey, ReplicateKey, Sign, UpdateCustomKeyStore and Verify APIs",
"type": "api-change"
}
]

32
.changes/1.18.55.json Normal file
View file

@ -0,0 +1,32 @@
[
{
"category": "``workmail``",
"description": "[``botocore``] This release allows customers to change their inbound DMARC settings in Amazon WorkMail.",
"type": "api-change"
},
{
"category": "``location``",
"description": "[``botocore``] Add support for PositionFiltering.",
"type": "api-change"
},
{
"category": "``application-autoscaling``",
"description": "[``botocore``] With this release, Application Auto Scaling adds support for Amazon Neptune. Customers can now automatically add or remove Read Replicas of their Neptune clusters to keep the average CPU Utilization at the target value specified by the customers.",
"type": "api-change"
},
{
"category": "``ec2``",
"description": "[``botocore``] Released Capacity Reservation Fleet, a feature of Amazon EC2 Capacity Reservations, which provides a way to manage reserved capacity across instance types. For more information: https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/cr-fleets.html",
"type": "api-change"
},
{
"category": "``glue``",
"description": "[``botocore``] This release adds tag as an input of CreateConnection",
"type": "api-change"
},
{
"category": "``backup``",
"description": "[``botocore``] AWS Backup Audit Manager framework report.",
"type": "api-change"
}
]

22
.changes/1.18.56.json Normal file
View file

@ -0,0 +1,22 @@
[
{
"category": "``sagemaker``",
"description": "[``botocore``] This release adds a new TrainingInputMode FastFile for SageMaker Training APIs.",
"type": "api-change"
},
{
"category": "``amplifybackend``",
"description": "[``botocore``] Adding a new field 'AmplifyFeatureFlags' to the response of the GetBackend operation. It will return a stringified version of the cli.json file for the given Amplify project.",
"type": "api-change"
},
{
"category": "``fsx``",
"description": "[``botocore``] This release adds support for Lustre 2.12 to FSx for Lustre.",
"type": "api-change"
},
{
"category": "``kendra``",
"description": "[``botocore``] Amazon Kendra now supports integration with AWS SSO",
"type": "api-change"
}
]

32
.changes/1.18.57.json Normal file
View file

@ -0,0 +1,32 @@
[
{
"category": "``kendra``",
"description": "[``botocore``] Amazon Kendra now supports indexing and querying documents in different languages.",
"type": "api-change"
},
{
"category": "``grafana``",
"description": "[``botocore``] Initial release of the SDK for Amazon Managed Grafana API.",
"type": "api-change"
},
{
"category": "``firehose``",
"description": "[``botocore``] Allow support for Amazon Opensearch Service(successor to Amazon Elasticsearch Service) as a Kinesis Data Firehose delivery destination.",
"type": "api-change"
},
{
"category": "``backup``",
"description": "[``botocore``] Launch of AWS Backup Vault Lock, which protects your backups from malicious and accidental actions, works with existing backup policies, and helps you meet compliance requirements.",
"type": "api-change"
},
{
"category": "``schemas``",
"description": "[``botocore``] Removing unused request/response objects.",
"type": "api-change"
},
{
"category": "``chime``",
"description": "[``botocore``] This release enables customers to configure Chime MediaCapturePipeline via API.",
"type": "api-change"
}
]

32
.changes/1.18.58.json Normal file
View file

@ -0,0 +1,32 @@
[
{
"category": "``lexv2-runtime``",
"description": "[``botocore``] Update lexv2-runtime client to latest version",
"type": "api-change"
},
{
"category": "``lexv2-models``",
"description": "[``botocore``] Update lexv2-models client to latest version",
"type": "api-change"
},
{
"category": "``secretsmanager``",
"description": "[``botocore``] Documentation updates for Secrets Manager",
"type": "api-change"
},
{
"category": "``securityhub``",
"description": "[``botocore``] Added new resource details objects to ASFF, including resources for WAF rate-based rules, EC2 VPC endpoints, ECR repositories, EKS clusters, X-Ray encryption, and OpenSearch domains. Added additional details for CloudFront distributions, CodeBuild projects, ELB V2 load balancers, and S3 buckets.",
"type": "api-change"
},
{
"category": "``mediaconvert``",
"description": "[``botocore``] AWS Elemental MediaConvert has added the ability to set account policies which control access restrictions for HTTP, HTTPS, and S3 content sources.",
"type": "api-change"
},
{
"category": "``ec2``",
"description": "[``botocore``] This release removes a requirement for filters on SearchLocalGatewayRoutes operations.",
"type": "api-change"
}
]

27
.changes/1.18.59.json Normal file
View file

@ -0,0 +1,27 @@
[
{
"category": "``elbv2``",
"description": "[``botocore``] Update elbv2 client to latest version",
"type": "api-change"
},
{
"category": "Signing",
"description": "[``botocore``] SigV4QueryAuth and CrtSigV4QueryAuth now properly respect AWSRequest.params while signing boto/botocore`#2521 <https://github.com/boto/botocore/issues/2521>`__",
"type": "bugfix"
},
{
"category": "``medialive``",
"description": "[``botocore``] This release adds support for Transport Stream files as an input type to MediaLive encoders.",
"type": "api-change"
},
{
"category": "``ec2``",
"description": "[``botocore``] Documentation update for Amazon EC2.",
"type": "api-change"
},
{
"category": "``frauddetector``",
"description": "[``botocore``] New model type: Transaction Fraud Insights, which is optimized for online transaction fraud. Stored Events, which allows customers to send and store data directly within Amazon Fraud Detector. Batch Import, which allows customers to upload a CSV file of historic event data for processing and storage",
"type": "api-change"
}
]

22
.changes/1.18.60.json Normal file
View file

@ -0,0 +1,22 @@
[
{
"category": "``cloudsearch``",
"description": "[``botocore``] Adds an additional validation exception for Amazon CloudSearch configuration APIs for better error handling.",
"type": "api-change"
},
{
"category": "``ecs``",
"description": "[``botocore``] Documentation only update to address tickets.",
"type": "api-change"
},
{
"category": "``mediatailor``",
"description": "[``botocore``] MediaTailor now supports ad prefetching.",
"type": "api-change"
},
{
"category": "``ec2``",
"description": "[``botocore``] EncryptionSupport for InstanceStorageInfo added to DescribeInstanceTypes API",
"type": "api-change"
}
]

27
.changes/1.18.61.json Normal file
View file

@ -0,0 +1,27 @@
[
{
"category": "``config``",
"description": "[``botocore``] Adding Config support for AWS::OpenSearch::Domain",
"type": "api-change"
},
{
"category": "``ec2``",
"description": "[``botocore``] This release adds support for additional VPC Flow Logs delivery options to S3, such as Apache Parquet formatted files, Hourly partitions and Hive-compatible S3 prefixes",
"type": "api-change"
},
{
"category": "``storagegateway``",
"description": "[``botocore``] Adding support for Audit Logs on NFS shares and Force Closing Files on SMB shares.",
"type": "api-change"
},
{
"category": "``workmail``",
"description": "[``botocore``] This release adds APIs for adding, removing and retrieving details of mail domains",
"type": "api-change"
},
{
"category": "``kinesisanalyticsv2``",
"description": "[``botocore``] Support for Apache Flink 1.13 in Kinesis Data Analytics. Changed the required status of some Update properties to better fit the corresponding Create properties.",
"type": "api-change"
}
]

22
.changes/1.18.62.json Normal file
View file

@ -0,0 +1,22 @@
[
{
"category": "``elbv2``",
"description": "[``botocore``] Update elbv2 client to latest version",
"type": "api-change"
},
{
"category": "``autoscaling``",
"description": "[``botocore``] Amazon EC2 Auto Scaling now supports filtering describe Auto Scaling groups API using tags",
"type": "api-change"
},
{
"category": "``sagemaker``",
"description": "[``botocore``] This release updates the provisioning artifact ID to an optional parameter in CreateProject API. The provisioning artifact ID defaults to the latest provisioning artifact ID of the product if you don't provide one.",
"type": "api-change"
},
{
"category": "``robomaker``",
"description": "[``botocore``] Adding support to GPU simulation jobs as well as non-ROS simulation jobs.",
"type": "api-change"
}
]

12
.changes/1.18.63.json Normal file
View file

@ -0,0 +1,12 @@
[
{
"category": "``efs``",
"description": "[``botocore``] Update efs client to latest version",
"type": "api-change"
},
{
"category": "``glue``",
"description": "[``botocore``] Enable S3 event base crawler API.",
"type": "api-change"
}
]

17
.changes/1.18.64.json Normal file
View file

@ -0,0 +1,17 @@
[
{
"category": "``quicksight``",
"description": "[``botocore``] AWS QuickSight Service Features - Add IP Restriction UI and public APIs support.",
"type": "api-change"
},
{
"category": "AWSCRT",
"description": "[``botocore``] Upgrade awscrt extra to 0.12.5",
"type": "enchancement"
},
{
"category": "``ivs``",
"description": "[``botocore``] Bug fix: remove unsupported maxResults and nextToken pagination parameters from ListTagsForResource",
"type": "api-change"
}
]

12
.changes/1.18.65.json Normal file
View file

@ -0,0 +1,12 @@
[
{
"category": "``dataexchange``",
"description": "[``botocore``] This release adds support for our public preview of AWS Data Exchange for Amazon Redshift. This enables data providers to list products including AWS Data Exchange datashares for Amazon Redshift, giving subscribers read-only access to provider data in Amazon Redshift.",
"type": "api-change"
},
{
"category": "``chime-sdk-messaging``",
"description": "[``botocore``] The Amazon Chime SDK now allows developers to execute business logic on in-flight messages before they are delivered to members of a messaging channel with channel flows.",
"type": "api-change"
}
]

42
.changes/1.19.0.json Normal file
View file

@ -0,0 +1,42 @@
[
{
"category": "``appflow``",
"description": "[``botocore``] Feature to add support for JSON-L format for S3 as a source.",
"type": "api-change"
},
{
"category": "``mediapackage-vod``",
"description": "[``botocore``] MediaPackage passes through digital video broadcasting (DVB) subtitles into the output.",
"type": "api-change"
},
{
"category": "``mediaconvert``",
"description": "[``botocore``] AWS Elemental MediaConvert SDK has added support for specifying caption time delta in milliseconds and the ability to apply color range legalization to source content other than AVC video.",
"type": "api-change"
},
{
"category": "``mediapackage``",
"description": "[``botocore``] When enabled, MediaPackage passes through digital video broadcasting (DVB) subtitles into the output.",
"type": "api-change"
},
{
"category": "``panorama``",
"description": "[``botocore``] General availability for AWS Panorama. AWS SDK for Panorama includes APIs to manage your devices and nodes, and deploy computer vision applications to the edge. For more information, see the AWS Panorama documentation at http://docs.aws.amazon.com/panorama",
"type": "api-change"
},
{
"category": "Serialization",
"description": "[``botocore``] rest-json serialization defaults aligned across AWS SDKs",
"type": "feature"
},
{
"category": "``directconnect``",
"description": "[``botocore``] This release adds 4 new APIS, which needs to be public able",
"type": "api-change"
},
{
"category": "``securityhub``",
"description": "[``botocore``] Added support for cross-Region finding aggregation, which replicates findings from linked Regions to a single aggregation Region. Added operations to view, enable, update, and delete the finding aggregation.",
"type": "api-change"
}
]

7
.changes/1.19.1.json Normal file
View file

@ -0,0 +1,7 @@
[
{
"category": "``connect``",
"description": "[``botocore``] Released Amazon Connect hours of operation API for general availability (GA). This API also supports AWS CloudFormation. For more information, see Amazon Connect Resource Type Reference in the AWS CloudFormation User Guide.",
"type": "api-change"
}
]

17
.changes/1.19.2.json Normal file
View file

@ -0,0 +1,17 @@
[
{
"category": "``quicksight``",
"description": "[``botocore``] Added QSearchBar option for GenerateEmbedUrlForRegisteredUser ExperienceConfiguration to support Q search bar embedding",
"type": "api-change"
},
{
"category": "``auditmanager``",
"description": "[``botocore``] This release introduces character restrictions for ControlSet names. We updated regex patterns for the following attributes: ControlSet, CreateAssessmentFrameworkControlSet, and UpdateAssessmentFrameworkControlSet.",
"type": "api-change"
},
{
"category": "``chime``",
"description": "[``botocore``] Chime VoiceConnector and VoiceConnectorGroup APIs will now return an ARN.",
"type": "api-change"
}
]

22
.changes/1.19.3.json Normal file
View file

@ -0,0 +1,22 @@
[
{
"category": "``rds``",
"description": "[``botocore``] This release adds support for Amazon RDS Custom, which is a new RDS management type that gives you full access to your database and operating system. For more information, see https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/rds-custom.html",
"type": "api-change"
},
{
"category": "``auditmanager``",
"description": "[``botocore``] This release introduces a new feature for Audit Manager: Custom framework sharing. You can now share your custom frameworks with another AWS account, or replicate them into another AWS Region under your own account.",
"type": "api-change"
},
{
"category": "``ec2``",
"description": "[``botocore``] This release adds support to create a VPN Connection that is not attached to a Gateway at the time of creation. Use this to create VPNs associated with Core Networks, or modify your VPN and attach a gateway using the modify API after creation.",
"type": "api-change"
},
{
"category": "``route53resolver``",
"description": "[``botocore``] New API for ResolverConfig, which allows autodefined rules for reverse DNS resolution to be disabled for a VPC",
"type": "api-change"
}
]

17
.changes/1.19.4.json Normal file
View file

@ -0,0 +1,17 @@
[
{
"category": "``emr-containers``",
"description": "[``botocore``] This feature enables auto-generation of certificate to secure the managed-endpoint and removes the need for customer provided certificate-arn during managed-endpoint setup.",
"type": "api-change"
},
{
"category": "``chime-sdk-messaging``",
"description": "[``botocore``] The Amazon Chime SDK now supports push notifications through Amazon Pinpoint",
"type": "api-change"
},
{
"category": "``chime-sdk-identity``",
"description": "[``botocore``] The Amazon Chime SDK now supports push notifications through Amazon Pinpoint",
"type": "api-change"
}
]

42
.changes/1.19.5.json Normal file
View file

@ -0,0 +1,42 @@
[
{
"category": "``autoscaling``",
"description": "[``botocore``] This release adds support for attribute-based instance type selection, a new EC2 Auto Scaling feature that lets customers express their instance requirements as a set of attributes, such as vCPU, memory, and storage.",
"type": "api-change"
},
{
"category": "``ec2``",
"description": "[``botocore``] This release adds: attribute-based instance type selection for EC2 Fleet, Spot Fleet, a feature that lets customers express instance requirements as attributes like vCPU, memory, and storage; and Spot placement score, a feature that helps customers identify an optimal location to run Spot workloads.",
"type": "api-change"
},
{
"category": "Session",
"description": "Added `get_partition_for_region` to lookup partition for a given region_name",
"type": "enhancement"
},
{
"category": "``eks``",
"description": "[``botocore``] EKS managed node groups now support BOTTLEROCKET_x86_64 and BOTTLEROCKET_ARM_64 AMI types.",
"type": "api-change"
},
{
"category": "``sagemaker``",
"description": "[``botocore``] This release allows customers to describe one or more versioned model packages through BatchDescribeModelPackage, update project via UpdateProject, modify and read customer metadata properties using Create, Update and Describe ModelPackage and enables cross account registration of model packages.",
"type": "api-change"
},
{
"category": "Session",
"description": "[``botocore``] Added `get_partition_for_region` allowing partition lookup by region name.",
"type": "enhancement"
},
{
"category": "``textract``",
"description": "[``botocore``] This release adds support for asynchronously analyzing invoice and receipt documents through two new APIs: StartExpenseAnalysis and GetExpenseAnalysis",
"type": "api-change"
},
{
"category": "``s3``",
"description": "TransferConfig now supports the `max_bandwidth` argument.",
"type": "enchancement"
}
]

32
.changes/1.19.6.json Normal file
View file

@ -0,0 +1,32 @@
[
{
"category": "``gamelift``",
"description": "[``botocore``] Added support for Arm-based AWS Graviton2 instances, such as M6g, C6g, and R6g.",
"type": "api-change"
},
{
"category": "``ecs``",
"description": "[``botocore``] Amazon ECS now supports running Fargate tasks on Windows Operating Systems Families which includes Windows Server 2019 Core and Windows Server 2019 Full.",
"type": "api-change"
},
{
"category": "``sagemaker``",
"description": "[``botocore``] This release adds support for RStudio on SageMaker.",
"type": "api-change"
},
{
"category": "``connectparticipant``",
"description": "[``botocore``] This release adds a new boolean attribute - Connect Participant - to the CreateParticipantConnection API, which can be used to mark the participant as connected.",
"type": "api-change"
},
{
"category": "``ec2``",
"description": "[``botocore``] Added new read-only DenyAllIGWTraffic network interface attribute. Added support for DL1 24xlarge instances powered by Habana Gaudi Accelerators for deep learning model training workloads",
"type": "api-change"
},
{
"category": "``ssm-incidents``",
"description": "[``botocore``] Updating documentation, adding new field to ConflictException to indicate earliest retry timestamp for some operations, increase maximum length of nextToken fields",
"type": "api-change"
}
]

27
.changes/1.19.7.json Normal file
View file

@ -0,0 +1,27 @@
[
{
"category": "``transcribe``",
"description": "[``botocore``] Transcribe and Transcribe Call Analytics now support automatic language identification along with custom vocabulary, vocabulary filter, custom language model and PII redaction.",
"type": "api-change"
},
{
"category": "``application-insights``",
"description": "[``botocore``] Added Monitoring support for SQL Server Failover Cluster Instance. Additionally, added a new API to allow one-click monitoring of containers resources.",
"type": "api-change"
},
{
"category": "``rekognition``",
"description": "[``botocore``] This release added new attributes to Rekognition Video GetCelebrityRecognition API operations.",
"type": "api-change"
},
{
"category": "``connect``",
"description": "[``botocore``] Amazon Connect Chat now supports real-time message streaming.",
"type": "api-change"
},
{
"category": "``ec2``",
"description": "[``botocore``] Support added for AMI sharing with organizations and organizational units in ModifyImageAttribute API",
"type": "api-change"
}
]

22
.changes/1.19.8.json Normal file
View file

@ -0,0 +1,22 @@
[
{
"category": "``rekognition``",
"description": "[``botocore``] This Amazon Rekognition Custom Labels release introduces the management of datasets with projects",
"type": "api-change"
},
{
"category": "``networkmanager``",
"description": "[``botocore``] This release adds API support to aggregate resources, routes, and telemetry data across a Global Network.",
"type": "api-change"
},
{
"category": "``lightsail``",
"description": "[``botocore``] This release adds support to enable access logging for buckets in the Lightsail object storage service.",
"type": "api-change"
},
{
"category": "``neptune``",
"description": "[``botocore``] Adds support for major version upgrades to ModifyDbCluster API",
"type": "api-change"
}
]

17
.changes/1.19.9.json Normal file
View file

@ -0,0 +1,17 @@
[
{
"category": "``cloudfront``",
"description": "[``botocore``] CloudFront now supports response headers policies to add HTTP headers to the responses that CloudFront sends to viewers. You can use these policies to add CORS headers, control browser caching, and more, without modifying your origin or writing any code.",
"type": "api-change"
},
{
"category": "``connect``",
"description": "[``botocore``] Amazon Connect Chat now supports real-time message streaming.",
"type": "api-change"
},
{
"category": "``nimble``",
"description": "[``botocore``] Amazon Nimble Studio adds support for users to stop and start streaming sessions.",
"type": "api-change"
}
]

19
.github/workflows/lint.yml vendored Normal file
View file

@ -0,0 +1,19 @@
name: Lint code
on:
push:
pull_request:
branches-ignore: [ master ]
jobs:
lint:
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v2
- name: Set up Python 3.9
uses: actions/setup-python@v2
with:
python-version: 3.9
- name: Run pre-commit
uses: pre-commit/action@v2.0.0

View file

@ -1,4 +1,3 @@
name: Run tests
on:
@ -13,7 +12,7 @@ jobs:
strategy:
fail-fast: false
matrix:
python-version: [3.6, 3.7, 3.8, 3.9, 3.10-dev]
python-version: [3.6, 3.7, 3.8, 3.9, "3.10"]
os: [ubuntu-latest, macOS-latest, windows-latest ]
steps:

View file

@ -43,7 +43,7 @@ jobs:
# threshold of "upvotes", you can set this here. An "upvote" is
# the total number of +1, heart, hooray, and rocket reactions
# on an issue.
minimum-upvotes-to-exempt: 1
minimum-upvotes-to-exempt: 2
repo-token: ${{ secrets.GITHUB_TOKEN }}
loglevel: DEBUG

12
.pre-commit-config.yaml Normal file
View file

@ -0,0 +1,12 @@
exclude: ^(.github|.changes|docs/|boto3/compat.py|boto3/data|CHANGELOG.rst)
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v2.3.0
hooks:
- id: check-yaml
- id: end-of-file-fixer
- id: trailing-whitespace
- repo: https://github.com/pycqa/flake8
rev: 3.9.2
hooks:
- id: flake8

View file

@ -2,6 +2,211 @@
CHANGELOG
=========
1.19.9
======
* api-change:``cloudfront``: [``botocore``] CloudFront now supports response headers policies to add HTTP headers to the responses that CloudFront sends to viewers. You can use these policies to add CORS headers, control browser caching, and more, without modifying your origin or writing any code.
* api-change:``connect``: [``botocore``] Amazon Connect Chat now supports real-time message streaming.
* api-change:``nimble``: [``botocore``] Amazon Nimble Studio adds support for users to stop and start streaming sessions.
1.19.8
======
* api-change:``rekognition``: [``botocore``] This Amazon Rekognition Custom Labels release introduces the management of datasets with projects
* api-change:``networkmanager``: [``botocore``] This release adds API support to aggregate resources, routes, and telemetry data across a Global Network.
* api-change:``lightsail``: [``botocore``] This release adds support to enable access logging for buckets in the Lightsail object storage service.
* api-change:``neptune``: [``botocore``] Adds support for major version upgrades to ModifyDbCluster API
1.19.7
======
* api-change:``transcribe``: [``botocore``] Transcribe and Transcribe Call Analytics now support automatic language identification along with custom vocabulary, vocabulary filter, custom language model and PII redaction.
* api-change:``application-insights``: [``botocore``] Added Monitoring support for SQL Server Failover Cluster Instance. Additionally, added a new API to allow one-click monitoring of containers resources.
* api-change:``rekognition``: [``botocore``] This release added new attributes to Rekognition Video GetCelebrityRecognition API operations.
* api-change:``connect``: [``botocore``] Amazon Connect Chat now supports real-time message streaming.
* api-change:``ec2``: [``botocore``] Support added for AMI sharing with organizations and organizational units in ModifyImageAttribute API
1.19.6
======
* api-change:``gamelift``: [``botocore``] Added support for Arm-based AWS Graviton2 instances, such as M6g, C6g, and R6g.
* api-change:``ecs``: [``botocore``] Amazon ECS now supports running Fargate tasks on Windows Operating Systems Families which includes Windows Server 2019 Core and Windows Server 2019 Full.
* api-change:``sagemaker``: [``botocore``] This release adds support for RStudio on SageMaker.
* api-change:``connectparticipant``: [``botocore``] This release adds a new boolean attribute - Connect Participant - to the CreateParticipantConnection API, which can be used to mark the participant as connected.
* api-change:``ec2``: [``botocore``] Added new read-only DenyAllIGWTraffic network interface attribute. Added support for DL1 24xlarge instances powered by Habana Gaudi Accelerators for deep learning model training workloads
* api-change:``ssm-incidents``: [``botocore``] Updating documentation, adding new field to ConflictException to indicate earliest retry timestamp for some operations, increase maximum length of nextToken fields
1.19.5
======
* api-change:``autoscaling``: [``botocore``] This release adds support for attribute-based instance type selection, a new EC2 Auto Scaling feature that lets customers express their instance requirements as a set of attributes, such as vCPU, memory, and storage.
* api-change:``ec2``: [``botocore``] This release adds: attribute-based instance type selection for EC2 Fleet, Spot Fleet, a feature that lets customers express instance requirements as attributes like vCPU, memory, and storage; and Spot placement score, a feature that helps customers identify an optimal location to run Spot workloads.
* enhancement:Session: Added `get_partition_for_region` to lookup partition for a given region_name
* api-change:``eks``: [``botocore``] EKS managed node groups now support BOTTLEROCKET_x86_64 and BOTTLEROCKET_ARM_64 AMI types.
* api-change:``sagemaker``: [``botocore``] This release allows customers to describe one or more versioned model packages through BatchDescribeModelPackage, update project via UpdateProject, modify and read customer metadata properties using Create, Update and Describe ModelPackage and enables cross account registration of model packages.
* enhancement:Session: [``botocore``] Added `get_partition_for_region` allowing partition lookup by region name.
* api-change:``textract``: [``botocore``] This release adds support for asynchronously analyzing invoice and receipt documents through two new APIs: StartExpenseAnalysis and GetExpenseAnalysis
* enchancement:``s3``: TransferConfig now supports the `max_bandwidth` argument.
1.19.4
======
* api-change:``emr-containers``: [``botocore``] This feature enables auto-generation of certificate to secure the managed-endpoint and removes the need for customer provided certificate-arn during managed-endpoint setup.
* api-change:``chime-sdk-messaging``: [``botocore``] The Amazon Chime SDK now supports push notifications through Amazon Pinpoint
* api-change:``chime-sdk-identity``: [``botocore``] The Amazon Chime SDK now supports push notifications through Amazon Pinpoint
1.19.3
======
* api-change:``rds``: [``botocore``] This release adds support for Amazon RDS Custom, which is a new RDS management type that gives you full access to your database and operating system. For more information, see https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/rds-custom.html
* api-change:``auditmanager``: [``botocore``] This release introduces a new feature for Audit Manager: Custom framework sharing. You can now share your custom frameworks with another AWS account, or replicate them into another AWS Region under your own account.
* api-change:``ec2``: [``botocore``] This release adds support to create a VPN Connection that is not attached to a Gateway at the time of creation. Use this to create VPNs associated with Core Networks, or modify your VPN and attach a gateway using the modify API after creation.
* api-change:``route53resolver``: [``botocore``] New API for ResolverConfig, which allows autodefined rules for reverse DNS resolution to be disabled for a VPC
1.19.2
======
* api-change:``quicksight``: [``botocore``] Added QSearchBar option for GenerateEmbedUrlForRegisteredUser ExperienceConfiguration to support Q search bar embedding
* api-change:``auditmanager``: [``botocore``] This release introduces character restrictions for ControlSet names. We updated regex patterns for the following attributes: ControlSet, CreateAssessmentFrameworkControlSet, and UpdateAssessmentFrameworkControlSet.
* api-change:``chime``: [``botocore``] Chime VoiceConnector and VoiceConnectorGroup APIs will now return an ARN.
1.19.1
======
* api-change:``connect``: [``botocore``] Released Amazon Connect hours of operation API for general availability (GA). This API also supports AWS CloudFormation. For more information, see Amazon Connect Resource Type Reference in the AWS CloudFormation User Guide.
1.19.0
======
* api-change:``appflow``: [``botocore``] Feature to add support for JSON-L format for S3 as a source.
* api-change:``mediapackage-vod``: [``botocore``] MediaPackage passes through digital video broadcasting (DVB) subtitles into the output.
* api-change:``mediaconvert``: [``botocore``] AWS Elemental MediaConvert SDK has added support for specifying caption time delta in milliseconds and the ability to apply color range legalization to source content other than AVC video.
* api-change:``mediapackage``: [``botocore``] When enabled, MediaPackage passes through digital video broadcasting (DVB) subtitles into the output.
* api-change:``panorama``: [``botocore``] General availability for AWS Panorama. AWS SDK for Panorama includes APIs to manage your devices and nodes, and deploy computer vision applications to the edge. For more information, see the AWS Panorama documentation at http://docs.aws.amazon.com/panorama
* feature:Serialization: [``botocore``] rest-json serialization defaults aligned across AWS SDKs
* api-change:``directconnect``: [``botocore``] This release adds 4 new APIS, which needs to be public able
* api-change:``securityhub``: [``botocore``] Added support for cross-Region finding aggregation, which replicates findings from linked Regions to a single aggregation Region. Added operations to view, enable, update, and delete the finding aggregation.
1.18.65
=======
* api-change:``dataexchange``: [``botocore``] This release adds support for our public preview of AWS Data Exchange for Amazon Redshift. This enables data providers to list products including AWS Data Exchange datashares for Amazon Redshift, giving subscribers read-only access to provider data in Amazon Redshift.
* api-change:``chime-sdk-messaging``: [``botocore``] The Amazon Chime SDK now allows developers to execute business logic on in-flight messages before they are delivered to members of a messaging channel with channel flows.
1.18.64
=======
* api-change:``quicksight``: [``botocore``] AWS QuickSight Service Features - Add IP Restriction UI and public APIs support.
* enchancement:AWSCRT: [``botocore``] Upgrade awscrt extra to 0.12.5
* api-change:``ivs``: [``botocore``] Bug fix: remove unsupported maxResults and nextToken pagination parameters from ListTagsForResource
1.18.63
=======
* api-change:``efs``: [``botocore``] Update efs client to latest version
* api-change:``glue``: [``botocore``] Enable S3 event base crawler API.
1.18.62
=======
* api-change:``elbv2``: [``botocore``] Update elbv2 client to latest version
* api-change:``autoscaling``: [``botocore``] Amazon EC2 Auto Scaling now supports filtering describe Auto Scaling groups API using tags
* api-change:``sagemaker``: [``botocore``] This release updates the provisioning artifact ID to an optional parameter in CreateProject API. The provisioning artifact ID defaults to the latest provisioning artifact ID of the product if you don't provide one.
* api-change:``robomaker``: [``botocore``] Adding support to GPU simulation jobs as well as non-ROS simulation jobs.
1.18.61
=======
* api-change:``config``: [``botocore``] Adding Config support for AWS::OpenSearch::Domain
* api-change:``ec2``: [``botocore``] This release adds support for additional VPC Flow Logs delivery options to S3, such as Apache Parquet formatted files, Hourly partitions and Hive-compatible S3 prefixes
* api-change:``storagegateway``: [``botocore``] Adding support for Audit Logs on NFS shares and Force Closing Files on SMB shares.
* api-change:``workmail``: [``botocore``] This release adds APIs for adding, removing and retrieving details of mail domains
* api-change:``kinesisanalyticsv2``: [``botocore``] Support for Apache Flink 1.13 in Kinesis Data Analytics. Changed the required status of some Update properties to better fit the corresponding Create properties.
1.18.60
=======
* api-change:``cloudsearch``: [``botocore``] Adds an additional validation exception for Amazon CloudSearch configuration APIs for better error handling.
* api-change:``ecs``: [``botocore``] Documentation only update to address tickets.
* api-change:``mediatailor``: [``botocore``] MediaTailor now supports ad prefetching.
* api-change:``ec2``: [``botocore``] EncryptionSupport for InstanceStorageInfo added to DescribeInstanceTypes API
1.18.59
=======
* api-change:``elbv2``: [``botocore``] Update elbv2 client to latest version
* bugfix:Signing: [``botocore``] SigV4QueryAuth and CrtSigV4QueryAuth now properly respect AWSRequest.params while signing boto/botocore`#2521 <https://github.com/boto/botocore/issues/2521>`__
* api-change:``medialive``: [``botocore``] This release adds support for Transport Stream files as an input type to MediaLive encoders.
* api-change:``ec2``: [``botocore``] Documentation update for Amazon EC2.
* api-change:``frauddetector``: [``botocore``] New model type: Transaction Fraud Insights, which is optimized for online transaction fraud. Stored Events, which allows customers to send and store data directly within Amazon Fraud Detector. Batch Import, which allows customers to upload a CSV file of historic event data for processing and storage
1.18.58
=======
* api-change:``lexv2-runtime``: [``botocore``] Update lexv2-runtime client to latest version
* api-change:``lexv2-models``: [``botocore``] Update lexv2-models client to latest version
* api-change:``secretsmanager``: [``botocore``] Documentation updates for Secrets Manager
* api-change:``securityhub``: [``botocore``] Added new resource details objects to ASFF, including resources for WAF rate-based rules, EC2 VPC endpoints, ECR repositories, EKS clusters, X-Ray encryption, and OpenSearch domains. Added additional details for CloudFront distributions, CodeBuild projects, ELB V2 load balancers, and S3 buckets.
* api-change:``mediaconvert``: [``botocore``] AWS Elemental MediaConvert has added the ability to set account policies which control access restrictions for HTTP, HTTPS, and S3 content sources.
* api-change:``ec2``: [``botocore``] This release removes a requirement for filters on SearchLocalGatewayRoutes operations.
1.18.57
=======
* api-change:``kendra``: [``botocore``] Amazon Kendra now supports indexing and querying documents in different languages.
* api-change:``grafana``: [``botocore``] Initial release of the SDK for Amazon Managed Grafana API.
* api-change:``firehose``: [``botocore``] Allow support for Amazon Opensearch Service(successor to Amazon Elasticsearch Service) as a Kinesis Data Firehose delivery destination.
* api-change:``backup``: [``botocore``] Launch of AWS Backup Vault Lock, which protects your backups from malicious and accidental actions, works with existing backup policies, and helps you meet compliance requirements.
* api-change:``schemas``: [``botocore``] Removing unused request/response objects.
* api-change:``chime``: [``botocore``] This release enables customers to configure Chime MediaCapturePipeline via API.
1.18.56
=======
* api-change:``sagemaker``: [``botocore``] This release adds a new TrainingInputMode FastFile for SageMaker Training APIs.
* api-change:``amplifybackend``: [``botocore``] Adding a new field 'AmplifyFeatureFlags' to the response of the GetBackend operation. It will return a stringified version of the cli.json file for the given Amplify project.
* api-change:``fsx``: [``botocore``] This release adds support for Lustre 2.12 to FSx for Lustre.
* api-change:``kendra``: [``botocore``] Amazon Kendra now supports integration with AWS SSO
1.18.55
=======
* api-change:``workmail``: [``botocore``] This release allows customers to change their inbound DMARC settings in Amazon WorkMail.
* api-change:``location``: [``botocore``] Add support for PositionFiltering.
* api-change:``application-autoscaling``: [``botocore``] With this release, Application Auto Scaling adds support for Amazon Neptune. Customers can now automatically add or remove Read Replicas of their Neptune clusters to keep the average CPU Utilization at the target value specified by the customers.
* api-change:``ec2``: [``botocore``] Released Capacity Reservation Fleet, a feature of Amazon EC2 Capacity Reservations, which provides a way to manage reserved capacity across instance types. For more information: https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/cr-fleets.html
* api-change:``glue``: [``botocore``] This release adds tag as an input of CreateConnection
* api-change:``backup``: [``botocore``] AWS Backup Audit Manager framework report.
1.18.54
=======
* api-change:``codebuild``: [``botocore``] CodeBuild now allows you to select how batch build statuses are sent to the source provider for a project.
* api-change:``efs``: [``botocore``] Update efs client to latest version
* api-change:``kms``: [``botocore``] Added SDK examples for ConnectCustomKeyStore, CreateCustomKeyStore, CreateKey, DeleteCustomKeyStore, DescribeCustomKeyStores, DisconnectCustomKeyStore, GenerateDataKeyPair, GenerateDataKeyPairWithoutPlaintext, GetPublicKey, ReplicateKey, Sign, UpdateCustomKeyStore and Verify APIs
1.18.53
=======

View file

@ -2,4 +2,3 @@
This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct).
For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact
opensource-codeofconduct@amazon.com with any additional questions or comments.

View file

@ -7,7 +7,7 @@ A good pull request:
- Is clear.
- Works across all supported versions of Python.
- Follows the existing style of the code base (PEP-8).
- Follows the existing style of the code base (see Codestyle section).
- Has comments included as needed.
- A test case that demonstrates the previous flaw that now passes with
@ -37,3 +37,30 @@ ideal report includes:
- If possible, create a pull request with a (failing) test case
demonstrating what's wrong. This makes the process for fixing bugs
quicker & gets issues resolved sooner.
Codestyle
---------
This project uses flake8 to enforce codstyle requirements. We've codified this
process using a tool called `pre-commit <https://pre-commit.com/>`__. pre-commit
allows us to specify a config file with all tools required for code linting,
and surfaces either a git commit hook, or single command, for enforcing these.
To validate your PR prior to publishing, you can use the following
`installation guide <https://pre-commit.com/#install>`__ to setup pre-commit.
If you don't want to use the git commit hook, you can run the below command
to automatically perform the codestyle validation:
.. code-block:: bash
$ pre-commit run
This will automatically perform simple updates (such as white space clean up)
and provide a list of any failing flake8 checks. After these are addressed,
you can commit the changes prior to publishing the PR.
These checks are also included in our CI setup under the "Lint" workflow which
will provide output on Github for anything missed locally.
See the `flake8` section of the
`setup.cfg <https://github.com/boto/boto3/blob/develop/setup.cfg>`__ for the
currently enforced rules.

View file

@ -49,10 +49,10 @@ Assuming that you have Python and ``virtualenv`` installed, set up your environm
$ python -m pip install boto3
Using Boto3
~~~~~~~~~~~~~~
After installing boto3
After installing boto3
Next, set up credentials (in e.g. ``~/.aws/credentials``):
@ -68,7 +68,7 @@ Then, set up a default region (in e.g. ``~/.aws/config``):
[default]
region=us-east-1
Other credentials configuration method can be found `here <https://boto3.amazonaws.com/v1/documentation/api/latest/guide/credentials.html>`__
Then, from a Python interpreter:
@ -137,4 +137,3 @@ More Resources
* `NOTICE <https://github.com/boto/boto3/blob/develop/NOTICE>`__
* `Changelog <https://github.com/boto/boto3/blob/develop/CHANGELOG.rst>`__
* `License <https://github.com/boto/boto3/blob/develop/LICENSE>`__

View file

@ -18,7 +18,7 @@ from boto3.compat import _warn_deprecated_python
__author__ = 'Amazon Web Services'
__version__ = '1.18.53'
__version__ = '1.19.9'
# The default Boto3 session; autoloaded when needed.

View file

@ -75,7 +75,7 @@ def _warn_deprecated_python():
'aws-cli-v1/'
}
deprecated_versions = {
(2,7): py_27_params,
(2, 7): py_27_params,
}
py_version = sys.version_info[:2]

View file

@ -185,11 +185,13 @@ def document_collection_method(section, resource_name, action_name,
'method_description': (
'Creates an iterable of all %s resources '
'in the collection filtered by kwargs passed to '
'method.' % collection_model.resource.type +
'A %s collection will include all resources by '
'method. A %s collection will include all resources by '
'default if no filters are provided, and extreme '
'caution should be taken when performing actions '
'on all resources.'% collection_model.resource.type),
'on all resources.' % (
collection_model.resource.type,
collection_model.resource.type
)),
'example_prefix': '%s_iterator = %s.%s.filter' % (
xform_name(collection_model.resource.type),
example_resource_name, collection_model.name),

View file

@ -144,8 +144,9 @@ class ConditionAttributeBase(ConditionBase, AttributeBase):
AttributeBase.__init__(self, values[0].name)
def __eq__(self, other):
return ConditionBase.__eq__(self, other) and \
AttributeBase.__eq__(self, other)
return (
ConditionBase.__eq__(self, other) and AttributeBase.__eq__(self, other)
)
def __ne__(self, other):
return not self.__eq__(other)

View file

@ -166,14 +166,17 @@ class TransferConfig(S3TransferConfig):
'max_io_queue': 'max_io_queue_size'
}
def __init__(self,
multipart_threshold=8 * MB,
max_concurrency=10,
multipart_chunksize=8 * MB,
num_download_attempts=5,
max_io_queue=100,
io_chunksize=256 * KB,
use_threads=True):
def __init__(
self,
multipart_threshold=8 * MB,
max_concurrency=10,
multipart_chunksize=8 * MB,
num_download_attempts=5,
max_io_queue=100,
io_chunksize=256 * KB,
use_threads=True,
max_bandwidth=None,
):
"""Configuration object for managed S3 transfers
:param multipart_threshold: The transfer size threshold for which
@ -209,6 +212,10 @@ class TransferConfig(S3TransferConfig):
:param use_threads: If True, threads will be used when performing
S3 transfers. If False, no threads will be used in
performing transfers: all logic will be ran in the main thread.
:param max_bandwidth: The maximum bandwidth that will be consumed
in uploading and downloading file content. The value is an integer
in terms of bytes per second.
"""
super(TransferConfig, self).__init__(
multipart_threshold=multipart_threshold,
@ -217,6 +224,7 @@ class TransferConfig(S3TransferConfig):
num_download_attempts=num_download_attempts,
max_io_queue_size=max_io_queue,
io_chunksize=io_chunksize,
max_bandwidth=max_bandwidth,
)
# Some of the argument names are not the same as the inherited
# S3TransferConfig so we add aliases so you can still access the

View file

@ -182,6 +182,18 @@ class Session(object):
"""
return self._session.get_credentials()
def get_partition_for_region(self, region_name):
"""Lists the partition name of a particular region.
:type region_name: string
:param region_name: Name of the region to list partition for (e.g.,
us-east-1).
:rtype: string
:return: Returns the respective partition name (e.g., aws).
"""
return self._session.get_partition_for_region(region_name)
def client(self, service_name, region_name=None, api_version=None,
use_ssl=True, verify=None, endpoint_url=None,
aws_access_key_id=None, aws_secret_access_key=None,

View file

@ -214,8 +214,8 @@ customize clients or resources and modify the behavior of method calls.
Here is the list of events that users of Boto3 can register handlers to:
* ``'creating-client-class``
* ``'creating-resource-class``
* ``'creating-client-class'``
* ``'creating-resource-class'``
* ``'provide-client-params'``

View file

@ -29,7 +29,7 @@ def run(command):
try:
import awscrt
import awscrt # noqa
except ImportError:
print("MISSING DEPENDENCY: awscrt must be installed to run the crt tests.")
sys.exit(1)

View file

@ -135,7 +135,7 @@ def replace_issue_references(parsed, repo_name):
'`%s <https://github.com/%s/issues/%s>`__' % (
match.group(), repo_name, number))
new_description = re.sub('#\d+', linkify, description)
new_description = re.sub(r'#\d+', linkify, description)
parsed['description'] = new_description

View file

@ -3,9 +3,17 @@ universal = 0
[metadata]
requires_dist =
botocore>=1.21.53,<1.22.0
botocore>=1.22.9,<1.23.0
jmespath>=0.7.1,<1.0.0
s3transfer>=0.5.0,<0.6.0
[options.extras_require]
crt = botocore[crt]>=1.21.0,<2.0a0
[flake8]
ignore = E203,E501,W503,W504
exclude =
docs,
boto3/compat.py,
boto3/data,
.changes

View file

@ -13,7 +13,7 @@ VERSION_RE = re.compile(r'''__version__ = ['"]([0-9.]+)['"]''')
requires = [
'botocore>=1.21.53,<1.22.0',
'botocore>=1.22.9,<1.23.0',
'jmespath>=0.7.1,<1.0.0',
's3transfer>=0.5.0,<0.6.0'
]

View file

@ -12,24 +12,12 @@
# language governing permissions and limitations under the License.
import random
import sys
import time
from botocore.compat import six
import unittest
from unittest import mock
# In python 3, order matters when calling assertEqual to
# compare lists and dictionaries with lists. Therefore,
# assertItemsEqual needs to be used but it is renamed to
# assertCountEqual in python 3.
if six.PY2:
unittest.TestCase.assertCountEqual = unittest.TestCase.assertItemsEqual
def unique_id(name):
"""
Generate a unique ID that includes the given name,

View file

@ -19,17 +19,23 @@ from boto3.docs.service import ServiceDocumenter
class TestInstanceDeleteTags(BaseDocsFunctionalTests):
def setUp(self):
self.documenter = ServiceDocumenter(
'ec2', session=Session(region_name='us-east-1'))
'ec2', session=Session(region_name='us-east-1')
)
self.generated_contents = self.documenter.document_service()
self.generated_contents = self.generated_contents.decode('utf-8')
def test_delete_tags_method_is_documented(self):
contents = self.get_class_document_block(
'EC2.Instance', self.generated_contents)
'EC2.Instance', self.generated_contents
)
method_contents = self.get_method_document_block(
'delete_tags', contents)
self.assert_contains_lines_in_order([
'response = instance.delete_tags(',
'DryRun=True|False,',
'Tags=[',
], method_contents)
'delete_tags', contents
)
self.assert_contains_lines_in_order(
[
'response = instance.delete_tags(',
'DryRun=True|False,',
'Tags=[',
],
method_contents
)

View file

@ -23,12 +23,13 @@ from boto3.docs.service import ServiceDocumenter
def botocore_session():
return botocore.session.get_session()
@pytest.fixture
def boto3_session():
return boto3.Session(region_name='us-east-1')
def all_services():
botocore_session = botocore.session.get_session()
session = boto3.Session(region_name='us-east-1')
for service_name in session.get_available_services():
yield service_name
@ -52,15 +53,13 @@ def test_documentation(
# Check that all of the services have the appropriate title
_assert_has_title(generated_docs, client)
# Check that all services have the client documented.
_assert_has_client_documentation(generated_docs, service_name, client)
#If the service has resources, make sure the service resource
#is at least documented.
# If the service has resources, make sure the service resource
# is at least documented.
if service_name in available_resources:
resource = boto3.resource(service_name, 'us-east-1')
_assert_has_resource_documentation(
generated_docs, service_name, resource
@ -68,8 +67,7 @@ def test_documentation(
# If the client can paginate, make sure the paginators are documented.
try:
paginator_model = botocore_session.get_paginator_model(
service_name)
paginator_model = botocore_session.get_paginator_model(service_name)
_assert_has_paginator_documentation(
generated_docs, service_name, client,
sorted(paginator_model._paginator_config)
@ -77,7 +75,6 @@ def test_documentation(
except DataNotFoundError:
pass
# If the client has waiters, make sure the waiters are documented.
if client.waiter_names:
waiter_model = botocore_session.get_waiter_model(service_name)

View file

@ -31,11 +31,15 @@ class TestStubberSupportsFilterExpressions(unittest.TestCase):
)
stubber = Stubber(table.meta.client)
stubber.add_response('query', dict(Items=list()), expected_params=dict(
stubber.add_response(
'query',
dict(Items=list()),
expected_params=dict(
TableName='mytable',
KeyConditionExpression=key_expr,
FilterExpression=filter_expr
))
)
)
with stubber:
response = table.query(KeyConditionExpression=key_expr,
@ -46,15 +50,20 @@ class TestStubberSupportsFilterExpressions(unittest.TestCase):
def test_table_scan_can_be_stubbed_with_expressions(self):
table = self.resource.Table('mytable')
filter_expr = Attr('myattr').eq('foo') & (
Attr('myattr2').lte('buzz') | Attr('myattr2').gte('fizz')
filter_expr = (
Attr('myattr').eq('foo') &
(Attr('myattr2').lte('buzz') | Attr('myattr2').gte('fizz'))
)
stubber = Stubber(table.meta.client)
stubber.add_response('scan', dict(Items=list()), expected_params=dict(
stubber.add_response(
'scan',
dict(Items=list()),
expected_params=dict(
TableName='mytable',
FilterExpression=filter_expr
))
)
)
with stubber:
response = table.scan(FilterExpression=filter_expr)

View file

@ -10,7 +10,7 @@
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from tests import unittest, mock
from tests import unittest
import boto3
from botocore.stub import Stubber

View file

@ -377,7 +377,7 @@ class TestDownloadFileobj(BaseTransferTest):
# If this is a ranged get, ContentRange needs to be returned,
# contents needs to be pruned, and Range needs to be an expected param.
if end_byte is not None:
contents = full_contents[start_byte:end_byte+1]
contents = full_contents[start_byte:end_byte + 1]
part_range = 'bytes=%s-%s' % (start_byte, end_byte_range)
content_range = 'bytes=%s-%s/%s' % (
start_byte, end_byte, len(full_contents))

View file

@ -17,6 +17,7 @@ import botocore.session
boto3_session = None
def create_session():
global boto3_session
if boto3_session is None:
@ -28,6 +29,7 @@ def create_session():
return boto3_session
def _all_resources():
session = create_session()
for service_name in session.get_available_resources():

View file

@ -171,16 +171,20 @@ class TestDynamoDBConditions(BaseDynamoDBTest):
def test_condition_and(self):
r = self.scan(
filter_expression=(Attr('MyHashKey').eq('mykey') &
Attr('MyString').eq('mystring')))
filter_expression=(
Attr('MyHashKey').eq('mykey') & Attr('MyString').eq('mystring')
)
)
item = r['Items'][0]
self.assertTrue(
item['MyHashKey'] == 'mykey' and item['MyString'] == 'mystring')
def test_condition_or(self):
r = self.scan(
filter_expression=(Attr('MyHashKey').eq('mykey2') |
Attr('MyString').eq('mystring')))
filter_expression=(
Attr('MyHashKey').eq('mykey2') | Attr('MyString').eq('mystring')
)
)
item = r['Items'][0]
self.assertTrue(
item['MyHashKey'] == 'mykey2' or item['MyString'] == 'mystring')

View file

@ -472,6 +472,7 @@ class TestS3Transfers(unittest.TestCase):
# twice when using signature version 4.
self.amount_seen = 0
lock = threading.Lock()
def progress_callback(amount):
with lock:
self.amount_seen += amount
@ -593,9 +594,11 @@ class TestS3Transfers(unittest.TestCase):
def test_download_file_with_directory_not_exist(self):
transfer = self.create_s3_transfer()
self.client.put_object(Bucket=self.bucket_name,
Key='foo.txt',
Body=b'foo')
self.client.put_object(
Bucket=self.bucket_name,
Key='foo.txt',
Body=b'foo'
)
self.addCleanup(self.delete_object, 'foo.txt')
download_path = os.path.join(self.files.rootdir, 'a', 'b', 'c',
'downloaded.txt')
@ -667,7 +670,7 @@ class TestS3Transfers(unittest.TestCase):
# This is just a sanity check to ensure that the bucket interface work.
key = 'bucket.txt'
bucket = self.session.resource('s3').Bucket(self.bucket_name)
filename = self.files.create_file_with_size(key, 1024*1024)
filename = self.files.create_file_with_size(key, 1024 * 1024)
bucket.upload_file(Filename=filename, Key=key)
self.addCleanup(self.delete_object, key)
download_path = os.path.join(self.files.rootdir, unique_id('foo'))
@ -678,7 +681,7 @@ class TestS3Transfers(unittest.TestCase):
# This is just a sanity check to ensure that the object interface work.
key = 'object.txt'
obj = self.session.resource('s3').Object(self.bucket_name, key)
filename = self.files.create_file_with_size(key, 1024*1024)
filename = self.files.create_file_with_size(key, 1024 * 1024)
obj.upload_file(Filename=filename)
self.addCleanup(self.delete_object, key)
download_path = os.path.join(self.files.rootdir, unique_id('foo'))

View file

@ -10,7 +10,7 @@
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from tests import unittest, unique_id
from tests import unittest
import botocore.session
import boto3.session

View file

@ -22,9 +22,9 @@ class TestCollectionDocumenter(BaseDocsTest):
self.assert_contains_lines_in_order([
'.. py:attribute:: samples',
' A collection of Sample resources.'
'A Sample Collection will include all resources by default, '
'and extreme caution should be taken when performing actions '
'on all resources.',
'A Sample Collection will include all resources by default, '
'and extreme caution should be taken when performing actions '
'on all resources.',
' .. py:method:: all()',
(' Creates an iterable of all Sample resources in the '
'collection.'),
@ -35,7 +35,7 @@ class TestCollectionDocumenter(BaseDocsTest):
' :returns: A list of Sample resources',
' .. py:method:: filter(**kwargs)',
(' Creates an iterable of all Sample resources in '
'the collection filtered by kwargs passed to method.'
'the collection filtered by kwargs passed to method. '
'A Sample collection will include all resources by default '
'if no filters are provided, and extreme caution should be '
'taken when performing actions on all resources'),
@ -102,4 +102,4 @@ class TestCollectionDocumenter(BaseDocsTest):
' :rtype: list(:py:class:`myservice.Sample`)',
' :returns: A list of Sample resources',
' '
])
])

View file

@ -225,7 +225,6 @@ class BaseTransformationTest(unittest.TestCase):
self.assert_batch_write_calls_are([first_batch, second_batch,
third_batch])
def test_repeated_flushing_on_exit(self):
# We're going to simulate unprocessed_items
# returning multiple unprocessed items across calls.
@ -348,39 +347,63 @@ class BaseTransformationTest(unittest.TestCase):
first_batch = {
'RequestItems': {
self.table_name: [
{'PutRequest': { 'Item': {
'pkey': 'foo1',
'skey': 'bar1',
'other': 'other2'
}}},
{'PutRequest': { 'Item': {
'pkey': 'foo1',
'skey': 'bar2',
'other': 'other3'
}}},
{'DeleteRequest': {'Key': {
'pkey': 'foo2',
'skey': 'bar2',
}}},
{'DeleteRequest': {'Key': {
'pkey': 'foo2',
'skey': 'bar3',
}}},
{'DeleteRequest': {'Key': {
'pkey': 'foo3',
'skey': 'bar3',
}}},
{
'PutRequest': {
'Item': {
'pkey': 'foo1',
'skey': 'bar1',
'other': 'other2'
}
}
},
{
'PutRequest': {
'Item': {
'pkey': 'foo1',
'skey': 'bar2',
'other': 'other3'
}
}
},
{
'DeleteRequest': {
'Key': {
'pkey': 'foo2',
'skey': 'bar2',
}
}
},
{
'DeleteRequest': {
'Key': {
'pkey': 'foo2',
'skey': 'bar3',
}
}
},
{
'DeleteRequest': {
'Key': {
'pkey': 'foo3',
'skey': 'bar3',
}
}
},
]
}
}
second_batch = {
'RequestItems': {
self.table_name: [
{'PutRequest': { 'Item': {
'pkey': 'foo1',
'skey': 'bar1',
'other': 'other2'
}}},
{
'PutRequest': {
'Item': {
'pkey': 'foo1',
'skey': 'bar1',
'other': 'other2'
}
}
},
]
}
}

View file

@ -135,9 +135,9 @@ class TestInputOutputTransformer(BaseTransformationTest):
transformation=self.transformation,
target_shape=self.target_shape)
assert input_params == {
'TransformMe':
{'foo': self.transformed_value},
'LeaveAlone': {'foo': self.original_value}}
'TransformMe': {'foo': self.transformed_value},
'LeaveAlone': {'foo': self.original_value}
}
def test_transform_list(self):
input_params = {
@ -315,10 +315,13 @@ class TestInputOutputTransformer(BaseTransformationTest):
transformation=self.transformation,
target_shape=self.target_shape)
assert input_params == {
'TargetedWrapperList': [[
self.transformed_value, self.transformed_value]],
'UntargetedWrapperList': [[
self.original_value, self.original_value]]}
'TargetedWrapperList': [
[self.transformed_value, self.transformed_value]
],
'UntargetedWrapperList': [
[self.original_value, self.original_value]
]
}
def test_transform_incorrect_type_for_structure(self):
input_params = {

View file

@ -16,8 +16,6 @@ import pytest
from tests import unittest
from botocore.compat import six
from boto3.dynamodb.types import Binary, TypeSerializer, TypeDeserializer
@ -97,8 +95,6 @@ class TestSerializer(unittest.TestCase):
def test_serialize_bytearray(self):
assert self.serializer.serialize(bytearray([1])) == {'B': b'\x01'}
@pytest.mark.skipif(six.PY2,
reason='This is a test when using python3 version of bytes')
def test_serialize_bytes(self):
assert self.serializer.serialize(b'\x01') == {'B': b'\x01'}
@ -141,9 +137,13 @@ class TestSerializer(unittest.TestCase):
def test_serialize_map(self):
serialized_value = self.serializer.serialize(
{'foo': 'bar', 'baz': {'biz': 1}})
assert serialized_value == {'M':
{'foo': {'S': 'bar'}, 'baz': {'M': {'biz': {'N': '1'}}}}
{'foo': 'bar', 'baz': {'biz': 1}}
)
assert serialized_value == {
'M': {
'foo': {'S': 'bar'},
'baz': {'M': {'biz': {'N': '1'}}}
}
}
@ -179,22 +179,22 @@ class TestDeserializer(unittest.TestCase):
def test_deserialize_number_set(self):
assert self.deserializer.deserialize(
{'NS': ['1', '1.25']}), set([Decimal('1') == Decimal('1.25')])
{'NS': ['1', '1.25']}) == set([Decimal('1'), Decimal('1.25')])
def test_deserialize_string_set(self):
assert self.deserializer.deserialize(
{'SS': ['foo', 'bar']}) == set(['foo', 'bar'])
{'SS': ['foo', 'bar']}) == set(['foo', 'bar'])
def test_deserialize_binary_set(self):
assert self.deserializer.deserialize({'BS': [b'\x00', b'\x01']}) == set(
[Binary(b'\x00'), Binary(b'\x01')])
assert self.deserializer.deserialize(
{'BS': [b'\x00', b'\x01']}) == set([Binary(b'\x00'), Binary(b'\x01')])
def test_deserialize_list(self):
assert self.deserializer.deserialize({'L':
[{'N': '1'}, {'S': 'foo'}, {'L': [{'N': '1.25'}]}]}
assert self.deserializer.deserialize(
{'L': [{'N': '1'}, {'S': 'foo'}, {'L': [{'N': '1.25'}]}]}
) == [Decimal('1'), 'foo', [Decimal('1.25')]]
def test_deserialize_map(self):
assert self.deserializer.deserialize({'M': {'foo':
{'S': 'mystring'}, 'bar': {'M': {'baz': {'N': '1'}}}}}
assert self.deserializer.deserialize(
{'M': {'foo': {'S': 'mystring'}, 'bar': {'M': {'baz': {'N': '1'}}}}}
) == {'foo': 'mystring', 'bar': {'baz': Decimal('1')}}

View file

@ -16,8 +16,9 @@ from botocore.hooks import HierarchicalEmitter
from botocore.model import ServiceModel
from boto3.utils import ServiceContext
from boto3.resources.collection import CollectionFactory, CollectionManager, \
ResourceCollection
from boto3.resources.collection import (
CollectionFactory, CollectionManager, ResourceCollection
)
from boto3.resources.base import ResourceMeta
from boto3.resources.factory import ResourceFactory
from boto3.resources.model import Collection

View file

@ -87,6 +87,7 @@ def _collection_test_args():
for collection_model in resource_model.collections:
yield (client, service_name, resource_name, collection_model)
@pytest.mark.parametrize(
'collection_args',
_collection_test_args()
@ -100,6 +101,7 @@ def test_all_collections_have_paginators_if_needed(collection_args):
# should be a paginator applied to it.
_assert_collection_has_paginator_if_needed(*collection_args)
def _assert_collection_has_paginator_if_needed(
client, service_name, resource_name, collection_model
):

View file

@ -20,7 +20,6 @@ from boto3.utils import ServiceContext
from boto3.resources.base import ServiceResource
from boto3.resources.collection import CollectionManager
from boto3.resources.factory import ResourceFactory
from boto3.resources.action import WaiterAction
class BaseTestResourceFactory(BaseTestCase):
@ -35,13 +34,13 @@ class BaseTestResourceFactory(BaseTestCase):
resource_json_definition = {}
if resource_json_definitions is None:
resource_json_definitions = {}
service_context=ServiceContext(
service_context = ServiceContext(
service_name='test',
resource_json_definitions=resource_json_definitions,
service_model=service_model,
service_waiter_model=None
)
return self.factory.load_from_definition(
resource_name=resource_name,
single_resource_json_definition=resource_json_definition,
@ -573,10 +572,14 @@ class TestResourceFactory(BaseTestResourceFactory):
model = {
"waiters": {
"Exists": {
"waiterName": "BucketExists",
"params": [
{"target": "Bucket", "source": "identifier",
"name": "Name"}]
"waiterName": "BucketExists",
"params": [
{
"target": "Bucket",
"source": "identifier",
"name": "Name"
}
]
}
}
}
@ -595,10 +598,14 @@ class TestResourceFactory(BaseTestResourceFactory):
model = {
"waiters": {
"Exists": {
"waiterName": "BucketExists",
"params": [
{"target": "Bucket", "source": "identifier",
"name": "Name"}]
"waiterName": "BucketExists",
"params": [
{
"target": "Bucket",
"source": "identifier",
"name": "Name"
}
]
}
}
}

View file

@ -169,8 +169,11 @@ class TestModels(BaseTestCase):
'resource': {
'type': 'Frob',
'identifiers': [
{'target':'Id', 'source':'data',
'path':'FrobId'}
{
'target': 'Id',
'source': 'data',
'path': 'FrobId'
}
]
}
}
@ -237,6 +240,7 @@ class TestModels(BaseTestCase):
assert waiter.waiter_name == 'ObjectExists'
assert waiter.params[0].target == 'Bucket'
class TestRenaming(BaseTestCase):
def test_multiple(self):
# This tests a bunch of different renames working together
@ -250,8 +254,11 @@ class TestRenaming(BaseTestCase):
'resource': {
'type': 'Frob',
'identifiers': [
{'target':'Id', 'source':'data',
'path': 'FrobId'}
{
'target': 'Id',
'source': 'data',
'path': 'FrobId'
}
]
}
}
@ -348,8 +355,11 @@ class TestRenaming(BaseTestCase):
'resource': {
'type': 'Frob',
'identifiers': [
{'target':'Id', 'source':'data',
'path': 'FrobId'}
{
'target': 'Id',
'source': 'data',
'path': 'FrobId'
}
]
}
}
@ -368,8 +378,11 @@ class TestRenaming(BaseTestCase):
'resource': {
'type': 'Frob',
'identifiers': [
{'target':'Id', 'source':'data',
'path': 'FrobId'}
{
'target': 'Id',
'source': 'data',
'path': 'FrobId'
}
]
}
}

View file

@ -15,10 +15,12 @@ import pytest
from boto3.exceptions import ResourceLoadException
from boto3.resources.base import ResourceMeta, ServiceResource
from boto3.resources.model import Request
from boto3.resources.params import create_request_parameters, \
build_param_structure
from boto3.resources.params import (
create_request_parameters, build_param_structure
)
from tests import BaseTestCase, mock
class TestServiceActionParams(BaseTestCase):
def test_service_action_params_identifier(self):
request_model = Request({
@ -104,7 +106,7 @@ class TestServiceActionParams(BaseTestCase):
parent.meta = ResourceMeta('test', data=None)
with pytest.raises(ResourceLoadException):
params = create_request_parameters(parent, request_model)
create_request_parameters(parent, request_model)
def test_service_action_params_constants(self):
request_model = Request({

View file

@ -17,8 +17,9 @@ from boto3.utils import ServiceContext
from boto3.resources.base import ResourceMeta, ServiceResource
from boto3.resources.model import ResponseResource, Parameter
from boto3.resources.factory import ResourceFactory
from boto3.resources.response import build_identifiers, build_empty_response,\
RawHandler, ResourceHandler
from boto3.resources.response import (
build_identifiers, build_empty_response, RawHandler, ResourceHandler
)
class TestBuildIdentifiers(BaseTestCase):
@ -216,7 +217,6 @@ class TestBuildEmptyResponse(BaseTestCase):
response = self.get_response()
assert response is None
def test_path_list(self):
self.search_path = 'Container[1].Frob'
@ -235,7 +235,6 @@ class TestBuildEmptyResponse(BaseTestCase):
response = self.get_response()
assert response is None
def test_path_invalid(self):
self.search_path = 'Container.Invalid'

View file

@ -100,6 +100,7 @@ class TestBucketLoad(unittest.TestCase):
with pytest.raises(ClientError):
inject.bucket_load(self.resource)
class TestBucketTransferMethods(unittest.TestCase):
def setUp(self):

View file

@ -23,6 +23,7 @@ from boto3.s3.transfer import create_transfer_manager
from boto3.s3.transfer import S3Transfer
from boto3.s3.transfer import OSUtils, TransferConfig, ProgressCallbackInvoker
from boto3.s3.transfer import ClientError, S3TransferRetriesExceededError
from boto3.s3.transfer import KB, MB
class TestCreateTransferManager(unittest.TestCase):
@ -83,6 +84,26 @@ class TestTransferConfig(unittest.TestCase):
self.assert_value_of_actual_and_alias(
config, 'max_io_queue_size', 'max_io_queue', new_value)
def test_transferconfig_parameters(self):
config = TransferConfig(
multipart_threshold=8 * MB,
max_concurrency=10,
multipart_chunksize=8 * MB,
num_download_attempts=5,
max_io_queue=100,
io_chunksize=256 * KB,
use_threads=True,
max_bandwidth=1024 * KB,
)
assert config.multipart_threshold == 8 * MB
assert config.multipart_chunksize == 8 * MB
assert config.max_request_concurrency == 10
assert config.num_download_attempts == 5
assert config.max_io_queue_size == 100
assert config.io_chunksize == 256 * KB
assert config.use_threads is True
assert config.max_bandwidth == 1024 * KB
class TestProgressCallbackInvoker(unittest.TestCase):
def test_on_progress(self):

View file

@ -13,11 +13,11 @@
import pytest
from botocore import loaders
from botocore.exceptions import DataNotFoundError, UnknownServiceError
from botocore.exceptions import UnknownServiceError
from botocore.client import Config
from boto3 import __version__
from boto3.exceptions import NoVersionFound, ResourceNotExistsError
from boto3.exceptions import ResourceNotExistsError
from boto3.session import Session
from tests import mock, BaseTestCase
@ -116,7 +116,7 @@ class TestSession(BaseTestCase):
def test_available_profiles(self):
bc_session = mock.Mock()
bc_session.available_profiles.return_value = ['foo','bar']
bc_session.available_profiles.return_value = ['foo', 'bar']
session = Session(botocore_session=bc_session)
profiles = session.available_profiles
assert len(profiles.return_value) == 2
@ -207,6 +207,17 @@ class TestSession(BaseTestCase):
)
assert partitions == ['foo']
def test_get_partition_for_region(self):
bc_session = mock.Mock()
bc_session.get_partition_for_region.return_value = 'baz'
session = Session(botocore_session=bc_session)
partition = session.get_partition_for_region('foo-bar-1')
bc_session.get_partition_for_region.assert_called_with(
'foo-bar-1'
)
assert partition == 'baz'
def test_create_client(self):
session = Session(region_name='us-east-1')
client = session.client('sqs', region_name='us-west-2')
@ -245,7 +256,7 @@ class TestSession(BaseTestCase):
config=mock.ANY)
client_config = session.client.call_args[1]['config']
assert client_config.user_agent_extra == 'Resource'
assert client_config.signature_version == None
assert client_config.signature_version is None
def test_create_resource_with_config(self):
mock_bc_session = mock.Mock()