def assert_monitoring_host_and_port(self, session, host, port): with mock.patch('ibm_botocore.monitoring.SocketPublisher', spec=True) as mock_publisher: session.create_client('s3', 'us-west-2') self.assertEqual(mock_publisher.call_count, 1) _, args, kwargs = mock_publisher.mock_calls[0] self.assertEqual(kwargs.get('host'), host) self.assertEqual(kwargs.get('port'), port)
def test_invalid_bucket_name_raises_error(self): session = ibm_botocore.session.get_session() s3 = session.create_client('s3') with self.assertRaises(ParamValidationError): s3.put_object(Bucket='adfgasdfadfs/bucket/name', Key='foo', Body=b'asdf')
def test_lint_waiter_configs(): session = ibm_botocore.session.get_session() for service_name in session.get_available_services(): client = session.create_client(service_name, 'us-east-1') service_model = client.meta.service_model for waiter_name in client.waiter_names: yield _lint_single_waiter, client, waiter_name, service_model
def _verify_expected_endpoint_url(region, bucket, key, s3_config, is_secure=True, customer_provided_endpoint=None, expected_url=None, signature_version=None): environ = {} with mock.patch('os.environ', environ): environ['AWS_ACCESS_KEY_ID'] = 'access_key' environ['AWS_SECRET_ACCESS_KEY'] = 'secret_key' environ['AWS_CONFIG_FILE'] = 'no-exist-foo' environ['AWS_SHARED_CREDENTIALS_FILE'] = 'no-exist-foo' session = create_session() session.config_filename = 'no-exist-foo' config = Config(signature_version=signature_version, s3=s3_config) s3 = session.create_client('s3', region_name=region, use_ssl=is_secure, config=config, endpoint_url=customer_provided_endpoint) with ClientHTTPStubber(s3) as http_stubber: http_stubber.add_response() s3.put_object(Bucket=bucket, Key=key, Body=b'bar') assert_equal(http_stubber.requests[0].url, expected_url)
def _verify_expected_endpoint_url(region, bucket, key, s3_config, is_secure=True, customer_provided_endpoint=None, expected_url=None): http_response = mock.Mock() http_response.status_code = 200 http_response.headers = {} http_response.content = b'' environ = {} with mock.patch('os.environ', environ): environ['AWS_ACCESS_KEY_ID'] = 'access_key' environ['AWS_SECRET_ACCESS_KEY'] = 'secret_key' environ['AWS_CONFIG_FILE'] = 'no-exist-foo' session = create_session() session.config_filename = 'no-exist-foo' config = None if s3_config is not None: config = Config(s3=s3_config) s3 = session.create_client('s3', region_name=region, use_ssl=is_secure, config=config, endpoint_url=customer_provided_endpoint) with mock.patch('ibm_botocore.endpoint.Session.send') as mock_send: mock_send.return_value = http_response s3.put_object(Bucket=bucket, Key=key, Body=b'bar') request_sent = mock_send.call_args[0][0] assert_equal(request_sent.url, expected_url)
def _get_client(session, service): if os.environ.get('AWS_SMOKE_TEST_REGION', ''): region_name = os.environ['AWS_SMOKE_TEST_REGION'] else: region_name = REGION_OVERRIDES.get(service, REGION) client = session.create_client(service, region_name=region_name) client.meta.events.register_first('needs-retry.*.*', retry_handler) return client
def test_get_ses_waiter(self): # We're checking this because ses is not the endpoint prefix # for the service, it's email. We want to make sure this does # not affect the lookup process. session = ibm_botocore.session.get_session() client = session.create_client('ses', 'us-east-1') # If we have at least one waiter in the list, we know that we have # actually loaded the waiters and this test has passed. self.assertTrue(len(client.waiter_names) > 0)
def setUp(self): session = ibm_botocore.session.get_session() config = ibm_botocore.config.Config( signature_version=ibm_botocore.UNSIGNED, s3={'addressing_style': 'path'}) self.client = session.create_client('s3', region_name='us-east-1', config=config) self.stubber = Stubber(self.client)
def _make_api_call(session, api_call): client = session.create_client(api_call['serviceId'].lower().replace( ' ', '')) operation_name = api_call['operationName'] client_method = getattr(client, xform_name(operation_name)) with _stubbed_http_layer(client, api_call['attemptResponses']): try: client_method(**api_call['params']) except EXPECTED_EXCEPTIONS_THROWN: pass
def test_debug_log_contains_headers_and_body(self): # This test just verifies that the response headers/body # are in the debug log. It's an integration test so that # we can refactor the code however we want, as long as we don't # lose this feature. session = ibm_botocore.session.get_session() client = session.create_client('s3', region_name='us-west-2') debug_log = six.StringIO() session.set_stream_logger('', logging.DEBUG, debug_log) client.list_buckets() debug_log_contents = debug_log.getvalue() self.assertIn('Response headers', debug_log_contents) self.assertIn('Response body', debug_log_contents)
def _waiter_configs(): session = ibm_botocore.session.get_session() validator = Draft4Validator(WAITER_SCHEMA) for service_name in session.get_available_services(): client = session.create_client(service_name, 'us-east-1') try: # We use the loader directly here because we need the entire # json document, not just the portions exposed (either # internally or externally) by the WaiterModel class. loader = session.get_component('data_loader') waiter_model = loader.load_service_model(service_name, 'waiters-2') except UnknownServiceError: # The service doesn't have waiters continue yield validator, waiter_model, client
def test_dynamic_client_error(self): session = ibm_botocore.session.Session() client = session.create_client('s3', 'us-west-2') exception = client.exceptions.NoSuchKey(error_response={ 'Error': { 'Code': 'NoSuchKey', 'Message': 'Not Found' } }, operation_name='myoperation') unpickled_exception = pickle.loads(pickle.dumps(exception)) self.assertIsInstance(unpickled_exception, ibm_botocore.exceptions.ClientError) self.assertEqual(str(unpickled_exception), str(exception)) self.assertEqual(unpickled_exception.operation_name, exception.operation_name) self.assertEqual(unpickled_exception.response, exception.response)
def _create_s3_client(region, is_secure, endpoint_url, s3_config, signature_version): environ = {} with mock.patch('os.environ', environ): environ['AWS_ACCESS_KEY_ID'] = 'access_key' environ['AWS_SECRET_ACCESS_KEY'] = 'secret_key' environ['AWS_CONFIG_FILE'] = 'no-exist-foo' environ['AWS_SHARED_CREDENTIALS_FILE'] = 'no-exist-foo' session = create_session() session.config_filename = 'no-exist-foo' config = Config(signature_version=signature_version, s3=s3_config) s3 = session.create_client('s3', region_name=region, use_ssl=is_secure, config=config, endpoint_url=endpoint_url) return s3
def test_client_has_correct_class_name(): session = ibm_botocore.session.get_session() for service_name in SERVICE_TO_CLASS_NAME: client = session.create_client(service_name, REGION) yield (_assert_class_name_matches_ref_class_name, client, SERVICE_TO_CLASS_NAME[service_name])
def setUp(self): session = ibm_botocore.session.get_session() config = ibm_botocore.config.Config(signature_version=ibm_botocore.UNSIGNED) self.client = session.create_client('s3', config=config) self.stubber = Stubber(self.client)
def _get_client(session, service): if os.environ.get('AWS_SMOKE_TEST_REGION', ''): region_name = os.environ['AWS_SMOKE_TEST_REGION'] else: region_name = REGION_OVERRIDES.get(service, REGION) return session.create_client(service, region_name=region_name)
def assert_created_client_is_not_monitored(self, session): with mock.patch('ibm_botocore.session.monitoring.Monitor', spec=True) as mock_monitor: session.create_client('s3', 'us-west-2') mock_monitor.return_value.register.assert_not_called()
def _test_can_list_clusters_in_region(session, region): client = session.create_client('emr', region_name=region) response = client.list_clusters() assert_true('Clusters' in response)
def test_client_has_correct_class_name(service_name): session = ibm_botocore.session.get_session() client = session.create_client(service_name, REGION) assert client.__class__.__name__ == SERVICE_TO_CLASS_NAME[service_name]