2017-02-02 09:27:08 +01:00
|
|
|
# Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License"). You
|
|
|
|
# may not use this file except in compliance with the License. A copy of
|
|
|
|
# the License is located at
|
|
|
|
#
|
|
|
|
# http://aws.amazon.com/apache2.0/
|
|
|
|
#
|
|
|
|
# or in the "license" file accompanying this file. This file is
|
|
|
|
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
|
|
|
# ANY KIND, either express or implied. See the License for the specific
|
|
|
|
# language governing permissions and limitations under the License.
|
2021-10-04 18:33:37 +02:00
|
|
|
import pytest
|
|
|
|
|
2017-02-02 09:27:08 +01:00
|
|
|
from botocore.session import get_session
|
|
|
|
|
|
|
|
SERVICE_RENAMES = {
|
2018-07-11 08:25:50 +02:00
|
|
|
# Actual service name we use -> Allowed computed service name.
|
|
|
|
'alexaforbusiness': 'alexa-for-business',
|
|
|
|
'apigateway': 'api-gateway',
|
|
|
|
'application-autoscaling': 'application-auto-scaling',
|
2018-12-28 08:05:06 +01:00
|
|
|
'appmesh': 'app-mesh',
|
2018-07-11 08:25:50 +02:00
|
|
|
'autoscaling': 'auto-scaling',
|
|
|
|
'autoscaling-plans': 'auto-scaling-plans',
|
|
|
|
'ce': 'cost-explorer',
|
|
|
|
'cloudhsmv2': 'cloudhsm-v2',
|
|
|
|
'cloudsearchdomain': 'cloudsearch-domain',
|
|
|
|
'cognito-idp': 'cognito-identity-provider',
|
|
|
|
'config': 'config-service',
|
|
|
|
'cur': 'cost-and-usage-report-service',
|
|
|
|
'datapipeline': 'data-pipeline',
|
|
|
|
'directconnect': 'direct-connect',
|
|
|
|
'devicefarm': 'device-farm',
|
|
|
|
'discovery': 'application-discovery-service',
|
|
|
|
'dms': 'database-migration-service',
|
|
|
|
'ds': 'directory-service',
|
|
|
|
'dynamodbstreams': 'dynamodb-streams',
|
|
|
|
'elasticbeanstalk': 'elastic-beanstalk',
|
|
|
|
'elastictranscoder': 'elastic-transcoder',
|
|
|
|
'elb': 'elastic-load-balancing',
|
|
|
|
'elbv2': 'elastic-load-balancing-v2',
|
|
|
|
'es': 'elasticsearch-service',
|
2019-08-03 07:08:36 +02:00
|
|
|
'events': 'eventbridge',
|
2018-11-28 09:58:03 +01:00
|
|
|
'globalaccelerator': 'global-accelerator',
|
2018-07-11 08:25:50 +02:00
|
|
|
'iot-data': 'iot-data-plane',
|
|
|
|
'iot-jobs-data': 'iot-jobs-data-plane',
|
|
|
|
'iot1click-devices': 'iot-1click-devices-service',
|
|
|
|
'iot1click-projects': 'iot-1click-projects',
|
2019-08-03 07:08:36 +02:00
|
|
|
'iotevents-data': 'iot-events-data',
|
|
|
|
'iotevents': 'iot-events',
|
2021-01-26 16:12:20 +01:00
|
|
|
'iotwireless': 'iot-wireless',
|
2018-07-11 08:25:50 +02:00
|
|
|
'kinesisanalytics': 'kinesis-analytics',
|
2018-11-28 09:58:03 +01:00
|
|
|
'kinesisanalyticsv2': 'kinesis-analytics-v2',
|
2018-07-11 08:25:50 +02:00
|
|
|
'kinesisvideo': 'kinesis-video',
|
|
|
|
'lex-models': 'lex-model-building-service',
|
2021-01-26 16:12:20 +01:00
|
|
|
'lexv2-models': 'lex-models-v2',
|
2018-07-11 08:25:50 +02:00
|
|
|
'lex-runtime': 'lex-runtime-service',
|
2021-01-26 16:12:20 +01:00
|
|
|
'lexv2-runtime': 'lex-runtime-v2',
|
2018-07-11 08:25:50 +02:00
|
|
|
'logs': 'cloudwatch-logs',
|
|
|
|
'machinelearning': 'machine-learning',
|
|
|
|
'marketplacecommerceanalytics': 'marketplace-commerce-analytics',
|
|
|
|
'marketplace-entitlement': 'marketplace-entitlement-service',
|
|
|
|
'meteringmarketplace': 'marketplace-metering',
|
|
|
|
'mgh': 'migration-hub',
|
2018-11-28 09:58:03 +01:00
|
|
|
'sms-voice': 'pinpoint-sms-voice',
|
2018-07-11 08:25:50 +02:00
|
|
|
'resourcegroupstaggingapi': 'resource-groups-tagging-api',
|
|
|
|
'route53': 'route-53',
|
|
|
|
'route53domains': 'route-53-domains',
|
2018-11-28 09:58:03 +01:00
|
|
|
's3control': 's3-control',
|
2018-07-11 08:25:50 +02:00
|
|
|
'sdb': 'simpledb',
|
|
|
|
'secretsmanager': 'secrets-manager',
|
|
|
|
'serverlessrepo': 'serverlessapplicationrepository',
|
|
|
|
'servicecatalog': 'service-catalog',
|
2021-01-26 16:12:20 +01:00
|
|
|
'servicecatalog-appregistry': 'service-catalog-appregistry',
|
2018-07-11 08:25:50 +02:00
|
|
|
'stepfunctions': 'sfn',
|
|
|
|
'storagegateway': 'storage-gateway',
|
2017-02-02 09:27:08 +01:00
|
|
|
}
|
|
|
|
|
2018-07-11 08:25:50 +02:00
|
|
|
|
|
|
|
ENDPOINT_PREFIX_OVERRIDE = {
|
|
|
|
# entry in endpoints.json -> actual endpoint prefix.
|
|
|
|
# The autoscaling-* services actually send requests to the
|
|
|
|
# autoscaling service, but they're exposed as separate clients
|
|
|
|
# in botocore.
|
|
|
|
'autoscaling-plans': 'autoscaling',
|
|
|
|
'application-autoscaling': 'autoscaling',
|
|
|
|
# For neptune, we send requests to the RDS endpoint.
|
|
|
|
'neptune': 'rds',
|
2019-02-27 08:30:11 +01:00
|
|
|
'docdb': 'rds',
|
2019-08-03 07:08:36 +02:00
|
|
|
# iotevents data endpoints.json and service-2.json don't line up.
|
|
|
|
'ioteventsdata': 'data.iotevents',
|
2019-12-12 10:04:05 +01:00
|
|
|
'iotsecuredtunneling': 'api.tunneling.iot',
|
2021-01-26 16:12:20 +01:00
|
|
|
'iotwireless': 'api.iotwireless',
|
2022-05-26 00:10:07 +02:00
|
|
|
'data.iot': 'data-ats.iot',
|
2018-07-11 08:25:50 +02:00
|
|
|
}
|
2021-01-26 16:12:20 +01:00
|
|
|
|
2018-07-11 08:25:50 +02:00
|
|
|
NOT_SUPPORTED_IN_SDK = [
|
2017-06-27 11:52:19 +02:00
|
|
|
'mobileanalytics',
|
2019-11-18 09:46:14 +01:00
|
|
|
'transcribestreaming',
|
2017-06-27 11:52:19 +02:00
|
|
|
]
|
|
|
|
|
|
|
|
|
2021-11-03 18:14:15 +01:00
|
|
|
SESSION = get_session()
|
|
|
|
LOADER = SESSION.get_component('data_loader')
|
|
|
|
AVAILABLE_SERVICES = LOADER.list_available_services('service-2')
|
|
|
|
|
|
|
|
|
|
|
|
def _known_endpoint_prefixes():
|
2021-10-04 18:33:37 +02:00
|
|
|
# The entries in endpoints.json are keyed off of the endpoint
|
|
|
|
# prefix. We don't directly have that data, so we have to load
|
|
|
|
# every service model and look up its endpoint prefix in its
|
|
|
|
# ``metadata`` section.
|
2022-05-26 00:10:07 +02:00
|
|
|
return {
|
2021-11-03 18:14:15 +01:00
|
|
|
SESSION.get_service_model(service_name).endpoint_prefix
|
|
|
|
for service_name in AVAILABLE_SERVICES
|
2022-05-26 00:10:07 +02:00
|
|
|
}
|
2021-10-04 18:33:37 +02:00
|
|
|
|
|
|
|
|
|
|
|
def _computed_endpoint_prefixes():
|
2018-07-11 08:25:50 +02:00
|
|
|
# This verifies client names match up with data from the endpoints.json
|
|
|
|
# file. We want to verify that every entry in the endpoints.json
|
|
|
|
# file corresponds to a client we can construct via
|
|
|
|
# session.create_client(...).
|
|
|
|
# So first we get a list of all the service names in the endpoints
|
|
|
|
# file.
|
2021-11-03 18:14:15 +01:00
|
|
|
endpoints = LOADER.load_data('endpoints')
|
2018-07-11 08:25:50 +02:00
|
|
|
# A service can be in multiple partitions so we're using
|
|
|
|
# a set here to remove dupes.
|
2022-05-26 00:10:07 +02:00
|
|
|
services_in_endpoints_file = set()
|
2018-07-11 08:25:50 +02:00
|
|
|
for partition in endpoints['partitions']:
|
|
|
|
for service in partition['services']:
|
|
|
|
# There are some services we don't support in the SDK
|
|
|
|
# so we don't need to add them to the list of services
|
|
|
|
# we need to check.
|
|
|
|
if service not in NOT_SUPPORTED_IN_SDK:
|
|
|
|
services_in_endpoints_file.add(service)
|
|
|
|
|
|
|
|
# Now we go through every known endpoint prefix in the endpoints.json
|
|
|
|
# file and ensure it maps to an endpoint prefix we've seen
|
|
|
|
# in a service model.
|
2021-11-03 18:14:15 +01:00
|
|
|
endpoint_prefixes = []
|
2018-07-11 08:25:50 +02:00
|
|
|
for endpoint_prefix in services_in_endpoints_file:
|
|
|
|
# Check for an override where we know that an entry
|
|
|
|
# in the endpoints.json actually maps to a different endpoint
|
|
|
|
# prefix.
|
2022-05-26 00:10:07 +02:00
|
|
|
endpoint_prefix = ENDPOINT_PREFIX_OVERRIDE.get(
|
|
|
|
endpoint_prefix, endpoint_prefix
|
|
|
|
)
|
2021-11-03 18:14:15 +01:00
|
|
|
endpoint_prefixes.append(endpoint_prefix)
|
|
|
|
return sorted(endpoint_prefixes)
|
|
|
|
|
2017-06-27 11:52:19 +02:00
|
|
|
|
2021-11-03 18:14:15 +01:00
|
|
|
KNOWN_ENDPOINT_PREFIXES = _known_endpoint_prefixes()
|
|
|
|
COMPUTED_ENDPOINT_PREFIXES = _computed_endpoint_prefixes()
|
2017-06-27 11:52:19 +02:00
|
|
|
|
2021-11-03 18:14:15 +01:00
|
|
|
|
|
|
|
@pytest.mark.parametrize("endpoint_prefix", COMPUTED_ENDPOINT_PREFIXES)
|
|
|
|
def test_endpoint_matches_service(endpoint_prefix):
|
2021-10-04 18:33:37 +02:00
|
|
|
# We need to cross check all computed endpoints against our
|
|
|
|
# known values in endpoints.json, to ensure everything lines
|
|
|
|
# up correctly.
|
2021-11-03 18:14:15 +01:00
|
|
|
assert endpoint_prefix in KNOWN_ENDPOINT_PREFIXES
|
2017-02-02 09:27:08 +01:00
|
|
|
|
|
|
|
|
2021-11-03 18:14:15 +01:00
|
|
|
@pytest.mark.parametrize("service_name", AVAILABLE_SERVICES)
|
2021-10-04 18:33:37 +02:00
|
|
|
def test_service_name_matches_endpoint_prefix(service_name):
|
|
|
|
"""Generates tests for each service to verify that the computed service
|
|
|
|
named based on the service id matches the service name used to
|
|
|
|
create a client (i.e the directory name in botocore/data)
|
|
|
|
unless there is an explicit exception.
|
|
|
|
"""
|
2021-11-03 18:14:15 +01:00
|
|
|
service_model = SESSION.get_service_model(service_name)
|
2018-07-11 08:25:50 +02:00
|
|
|
computed_name = service_model.service_id.replace(' ', '-').lower()
|
2017-02-02 09:27:08 +01:00
|
|
|
|
|
|
|
# Handle known exceptions where we have renamed the service directory
|
|
|
|
# for one reason or another.
|
2018-07-11 08:25:50 +02:00
|
|
|
actual_service_name = SERVICE_RENAMES.get(service_name, service_name)
|
2021-10-04 18:33:37 +02:00
|
|
|
|
|
|
|
err_msg = (
|
|
|
|
f"Actual service name `{actual_service_name}` does not match "
|
|
|
|
f"expected service name we computed: `{computed_name}`"
|
|
|
|
)
|
|
|
|
assert computed_name == actual_service_name, err_msg
|
2021-08-18 17:45:16 +02:00
|
|
|
|
|
|
|
|
|
|
|
_S3_ALLOWED_PSEUDO_FIPS_REGIONS = [
|
|
|
|
'fips-accesspoint-ca-central-1',
|
|
|
|
'fips-accesspoint-us-east-1',
|
|
|
|
'fips-accesspoint-us-east-2',
|
|
|
|
'fips-accesspoint-us-west-1',
|
|
|
|
'fips-accesspoint-us-west-2',
|
|
|
|
'fips-accesspoint-us-gov-east-1',
|
|
|
|
'fips-accesspoint-us-gov-west-1',
|
|
|
|
'fips-us-gov-west-1',
|
2021-11-03 18:14:15 +01:00
|
|
|
'fips-us-gov-east-1',
|
|
|
|
'fips-ca-central-1',
|
|
|
|
'fips-us-east-1',
|
|
|
|
'fips-us-east-2',
|
|
|
|
'fips-us-west-1',
|
|
|
|
'fips-us-west-2',
|
2021-08-18 17:45:16 +02:00
|
|
|
]
|
|
|
|
|
|
|
|
|
2021-10-04 18:33:37 +02:00
|
|
|
def _s3_region_names():
|
2021-11-03 18:14:15 +01:00
|
|
|
endpoints = LOADER.load_data('endpoints')
|
2021-10-04 18:33:37 +02:00
|
|
|
|
|
|
|
for partition in endpoints['partitions']:
|
|
|
|
s3_service = partition['services'].get('s3', {})
|
|
|
|
for region_name in s3_service['endpoints']:
|
|
|
|
yield region_name.lower()
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.parametrize("region_name", _s3_region_names())
|
|
|
|
def test_no_s3_fips_regions(region_name):
|
|
|
|
# Fail if additional FIPS pseudo-regions are added to S3.
|
|
|
|
# This may be removed once proper support is implemented for FIPS in S3.
|
2021-08-18 17:45:16 +02:00
|
|
|
if region_name in _S3_ALLOWED_PSEUDO_FIPS_REGIONS:
|
|
|
|
return
|
|
|
|
|
2021-10-04 18:33:37 +02:00
|
|
|
err_msg = (
|
|
|
|
'New S3 FIPS pseudo-region added: "{region_name}". '
|
2021-08-18 17:45:16 +02:00
|
|
|
'FIPS has compliancy requirements that may not be met in all cases '
|
|
|
|
'for S3 clients due to the custom endpoint resolution and '
|
|
|
|
'construction logic.'
|
|
|
|
)
|
|
|
|
|
2021-10-04 18:33:37 +02:00
|
|
|
assert 'fips' not in region_name, err_msg
|