def test_service_names_are_valid(): session = get_session() loader = session.get_component('data_loader') service_names = loader.list_available_services('service-2') for service_name in service_names: yield _assert_name_length, service_name yield _assert_name_pattern, service_name
def setUp(self): super(TestServiceDocumenter, self).setUp() self.add_shape_to_params('Biz', 'String') self.setup_client() with mock.patch('ibm_botocore.session.create_loader', return_value=self.loader): session = get_session() self.service_documenter = ServiceDocumenter('myservice', session)
def test_default_configurations_resolve_correctly(): session = get_session() config = Config(defaults_mode='standard') client = session.create_client( 'sts', config=config, region_name='us-west-2') assert client.meta.config.s3['us_east_1_regional_endpoint'] == 'regional' assert client.meta.config.connect_timeout == 3.1 assert client.meta.endpoint_url == 'https://sts.us-west-2.amazonaws.com' assert client.meta.config.retries['mode'] == 'standard'
def test_service_name_matches_endpoint_prefix(): # Generates tests for each service to verify that the endpoint prefix # matches the service name unless there is an explicit exception. session = get_session() loader = session.get_component('data_loader') # Load the list of available services. The names here represent what # will become the client names. services = loader.list_available_services('service-2') for service in services: yield _assert_service_name_matches_endpoint_prefix, loader, service
def test_service_name_matches_endpoint_prefix(): # Generates tests for each service to verify that the computed service # named based on the service id matches the service name used to # create a client (i.e the directory name in botocore/data) # unless there is an explicit exception. session = get_session() loader = session.get_component('data_loader') # Load the list of available services. The names here represent what # will become the client names. services = loader.list_available_services('service-2') for service in services: yield _assert_service_name_matches_endpoint_prefix, session, service
def test_endpoint_matches_service(): backwards_renames = dict((v, k) for k, v in SERVICE_RENAMES.items()) session = get_session() loader = session.get_component('data_loader') expected_services = set(['s3']) pdir = os.path.dirname endpoints_path = os.path.join(pdir(pdir(pdir(__file__))), 'ibm_botocore', 'data', 'endpoints.json') with open(endpoints_path, 'r') as f: data = json.loads(f.read()) for partition in data['partitions']: for service in partition['services'].keys(): service = backwards_renames.get(service, service) if service not in BLACKLIST: yield _assert_endpoint_is_service, service, expected_services
def test_all_uses_of_h2_are_known(): session = get_session() loader = session.get_component('data_loader') services = loader.list_available_services('service-2') for service in services: service_model = session.get_service_model(service) h2_config = service_model.metadata.get('protocolSettings', {}).get('h2') if h2_config == 'required': yield _assert_h2_service_is_known, service elif h2_config == 'eventstream': for operation in service_model.operation_names: operation_model = service_model.operation_model(operation) if operation_model.has_event_stream_output: yield _assert_h2_operation_is_known, service, operation
def test_generate_docs(self): session = get_session() # Have the rst files get written to the temporary directory generate_docs(self.docs_root, session) reference_services_path = os.path.join(self.docs_root, 'reference', 'services') reference_service_path = os.path.join(reference_services_path, 'myservice.rst') self.assertTrue(os.path.exists(reference_service_path)) # Make sure the rst file has some the expected contents. with open(reference_service_path, 'r') as f: contents = f.read() self.assertIn('AWS MyService', contents) self.assertIn('Client', contents) self.assertIn('Paginators', contents) self.assertIn('Waiters', contents)
def test_endpoint_matches_service(): # This verifies client names match up with data from the endpoints.json # file. We want to verify that every entry in the endpoints.json # file corresponds to a client we can construct via # session.create_client(...). # So first we get a list of all the service names in the endpoints # file. session = get_session() loader = session.get_component('data_loader') endpoints = loader.load_data('endpoints') # A service can be in multiple partitions so we're using # a set here to remove dupes. services_in_endpoints_file = set([]) for partition in endpoints['partitions']: for service in partition['services']: # There are some services we don't support in the SDK # so we don't need to add them to the list of services # we need to check. if service not in NOT_SUPPORTED_IN_SDK: services_in_endpoints_file.add(service) # Now we need to cross check them against services we know about. # The entries in endpoints.json are keyed off of the endpoint # prefix. We don't directly have that data, so we have to load # every service model and look up its endpoint prefix in its # ``metadata`` section. known_services = loader.list_available_services('service-2') known_endpoint_prefixes = [ session.get_service_model(service_name).endpoint_prefix for service_name in known_services ] # Now we go through every known endpoint prefix in the endpoints.json # file and ensure it maps to an endpoint prefix we've seen # in a service model. for endpoint_prefix in services_in_endpoints_file: # Check for an override where we know that an entry # in the endpoints.json actually maps to a different endpoint # prefix. endpoint_prefix = ENDPOINT_PREFIX_OVERRIDE.get(endpoint_prefix, endpoint_prefix) yield (_assert_known_endpoint_prefix, endpoint_prefix, known_endpoint_prefixes)
def _all_test_cases(): session = get_session() loader = session.get_component('data_loader') services = loader.list_available_services('service-2') h2_services = [] h2_operations = [] for service in services: service_model = session.get_service_model(service) h2_config = service_model.metadata.get('protocolSettings', {}).get('h2') if h2_config == 'required': h2_services.append(service) elif h2_config == 'eventstream': for operation in service_model.operation_names: operation_model = service_model.operation_model(operation) if operation_model.has_event_stream_output: h2_operations.append([service, operation]) return h2_services, h2_operations
def _service_names(): session = get_session() loader = session.get_component('data_loader') return loader.list_available_services('service-2')
'application-autoscaling': 'autoscaling', # For neptune, we send requests to the RDS endpoint. 'neptune': 'rds', 'docdb': 'rds', # iotevents data endpoints.json and service-2.json don't line up. 'ioteventsdata': 'data.iotevents', 'iotsecuredtunneling': 'api.tunneling.iot', 'iotwireless': 'api.iotwireless', } NOT_SUPPORTED_IN_SDK = [ 'mobileanalytics', 'transcribestreaming', ] SESSION = get_session() LOADER = SESSION.get_component('data_loader') AVAILABLE_SERVICES = LOADER.list_available_services('service-2') def _known_endpoint_prefixes(): # The entries in endpoints.json are keyed off of the endpoint # prefix. We don't directly have that data, so we have to load # every service model and look up its endpoint prefix in its # ``metadata`` section. return set([ SESSION.get_service_model(service_name).endpoint_prefix for service_name in AVAILABLE_SERVICES ])
# # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import pytest from ibm_botocore.session import get_session from ibm_botocore.config import Config _SDK_DEFAULT_CONFIGURATION_VALUES_ALLOWLIST = ( 'retryMode', 'stsRegionalEndpoints', 's3UsEast1RegionalEndpoints', 'connectTimeoutInMillis', 'tlsNegotiationTimeoutInMillis' ) session = get_session() loader = session.get_component('data_loader') sdk_default_configuration = loader.load_data('sdk-default-configuration') @pytest.mark.parametrize("mode", sdk_default_configuration['base']) def test_no_new_sdk_default_configuration_values(mode): err_msg = ( f'New default configuration value {mode} introduced to ' f'sdk-default-configuration.json. Support for setting {mode} must be ' 'considered and added to the DefaulConfigResolver. In addition, ' 'must add value to _SDK_DEFAULT_CONFIGURATION_VALUES_ALLOWLIST.' ) assert mode in _SDK_DEFAULT_CONFIGURATION_VALUES_ALLOWLIST, err_msg
def setUp(self): self._session = get_session()
# sphinx-quickstart on Sun Dec 2 07:26:23 2012. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys, os import ibm_botocore from ibm_botocore.session import get_session from ibm_botocore.docs import generate_docs generate_docs(os.path.dirname(os.path.abspath(__file__)), get_session()) # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. #sys.path.insert(0, os.path.abspath('.')) # -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['sphinx.ext.autodoc']