def test_can_clobber_max_attempts_on_session(self): self.session.set_default_client_config( Config(retries={'max_attempts': 1})) # Max attempts should override the session's configured max attempts. client = self.session.create_client( 's3', self.region, config=Config(retries={'max_attempts': 0})) self.assert_will_retry_n_times(client.list_buckets, 0)
def setUp(self): super(TestRegionRedirect, self).setUp() self.bucket_region = self.region self.client_region = 'eu-central-1' self.client = self.session.create_client( 's3', region_name=self.client_region, config=Config(signature_version='s3v4')) self.bucket_client = self.session.create_client( 's3', region_name=self.bucket_region, config=Config(signature_version='s3v4'))
def setUp(self): super(TestS3PresignNonUsStandard, self).setUp() self.client_config = Config(region_name=self.region, signature_version='s3') self.client = self.session.create_client('s3', config=self.client_config) self.setup_bucket()
def test_thread_safe_auth(self): self.auth_paths = [] self.session.register('before-sign', self.increment_auth) # This test depends on auth_path, which is only added in virtual host # style requests. config = Config(s3={'addressing_style': 'virtual'}) self.client = self.session.create_client('s3', self.region, config=config) self.create_object(key_name='foo1') threads = [] for i in range(10): t = threading.Thread(target=self.create_object_catch_exceptions, args=('foo%s' % i, )) t.daemon = True threads.append(t) for thread in threads: thread.start() for thread in threads: thread.join() self.assertEqual( self.caught_exceptions, [], "Unexpectedly caught exceptions: %s" % self.caught_exceptions) self.assertEqual( len(set(self.auth_paths)), 10, "Expected 10 unique auth paths, instead received: %s" % (self.auth_paths))
def create_client(self): # Even though the default signature_version is s3, # we're being explicit in case this ever changes. client_config = Config(signature_version='s3') return self.session.create_client('s3', self.region, config=client_config)
def setUp(self): super(TestRegionRedirect, self).setUp() self.client = self.session.create_client( 's3', 'us-west-2', config=Config(signature_version='s3v4')) self.redirect_response = mock.Mock() self.redirect_response.headers = { 'x-amz-bucket-region': 'eu-central-1' } self.redirect_response.status_code = 301 self.redirect_response.content = ( b'<?xml version="1.0" encoding="UTF-8"?>\n' b'<Error>' b' <Code>PermanentRedirect</Code>' b' <Message>The bucket you are attempting to access must be ' b' addressed using the specified endpoint. Please send all ' b' future requests to this endpoint.' b' </Message>' b' <Bucket>foo</Bucket>' b' <Endpoint>foo.s3.eu-central-1.amazonaws.com</Endpoint>' b'</Error>') self.success_response = mock.Mock() self.success_response.headers = {} self.success_response.status_code = 200 self.success_response.content = ( b'<?xml version="1.0" encoding="UTF-8"?>\n' b'<ListBucketResult>' b' <Name>foo</Name>' b' <Prefix></Prefix>' b' <Marker></Marker>' b' <MaxKeys>1000</MaxKeys>' b' <EncodingType>url</EncodingType>' b' <IsTruncated>false</IsTruncated>' b'</ListBucketResult>')
def _verify_expected_endpoint_url(region, bucket, key, s3_config, is_secure=True, customer_provided_endpoint=None, expected_url=None, signature_version=None): environ = {} with mock.patch('os.environ', environ): environ['AWS_ACCESS_KEY_ID'] = 'access_key' environ['AWS_SECRET_ACCESS_KEY'] = 'secret_key' environ['AWS_CONFIG_FILE'] = 'no-exist-foo' environ['AWS_SHARED_CREDENTIALS_FILE'] = 'no-exist-foo' session = create_session() session.config_filename = 'no-exist-foo' config = Config(signature_version=signature_version, s3=s3_config) s3 = session.create_client('s3', region_name=region, use_ssl=is_secure, config=config, endpoint_url=customer_provided_endpoint) with ClientHTTPStubber(s3) as http_stubber: http_stubber.add_response() s3.put_object(Bucket=bucket, Key=key, Body=b'bar') assert_equal(http_stubber.requests[0].url, expected_url)
def test_set_max_attempts_on_session(self): self.session.set_default_client_config( Config(retries={'max_attempts': 1})) # Max attempts should be inherited from the session. client = self.session.create_client('s3', self.region) with self.assert_will_retry_n_times(client, 1): client.list_buckets()
def test_client_s3_accelerate_from_client_config(self): self.assertEqual( self.args_create.compute_s3_config( client_config=Config(s3={'use_accelerate_endpoint': True}) ), {'use_accelerate_endpoint': True} )
def test_do_not_attempt_retries(self): client = self.session.create_client( 'dynamodb', self.region, config=Config(retries={'max_attempts': 0})) with self.assert_will_retry_n_times(client, 0): client.list_tables()
def __init__(self, recipe): """ :type recipe: Recipe """ # self.log.setLevel(DEBUG) self.log.info("Connection to COS") cos_creds = recipe.cos_creds_content() api_key = cos_creds['apikey'] auth_endpoint = 'https://iam.bluemix.net/oidc/token' service_instance_id = cos_creds['resource_instance_id'] service_endpoint = recipe["COS"]["endpoint"] # service_endpoint = "s3.eu-de.objectstorage.service.networklayer.com" self.bucket = recipe["COS"]["bucket"] self.log.info("service endpoint '%s'", service_endpoint) self.log.info("service bucket '%s'", self.bucket) try: self.resource = ibm_boto3.resource( 's3', ibm_api_key_id=api_key, ibm_service_instance_id=service_instance_id, ibm_auth_endpoint=auth_endpoint, config=Config(signature_version='oauth'), endpoint_url=service_endpoint) except ClientError as e: self.log.fatal('Exception: %s', e) raise SystemExit(-1)
def _verify_expected_endpoint_url(region, bucket, key, s3_config, is_secure=True, customer_provided_endpoint=None, expected_url=None): http_response = mock.Mock() http_response.status_code = 200 http_response.headers = {} http_response.content = b'' environ = {} with mock.patch('os.environ', environ): environ['AWS_ACCESS_KEY_ID'] = 'access_key' environ['AWS_SECRET_ACCESS_KEY'] = 'secret_key' environ['AWS_CONFIG_FILE'] = 'no-exist-foo' session = create_session() session.config_filename = 'no-exist-foo' config = None if s3_config is not None: config = Config(s3=s3_config) s3 = session.create_client('s3', region_name=region, use_ssl=is_secure, config=config, endpoint_url=customer_provided_endpoint) with mock.patch('ibm_botocore.endpoint.Session.send') as mock_send: mock_send.return_value = http_response s3.put_object(Bucket=bucket, Key=key, Body=b'bar') request_sent = mock_send.call_args[0][0] assert_equal(request_sent.url, expected_url)
def test_can_override_max_attempts(self): client = self.session.create_client( 'dynamodb', self.region, config=Config(retries={'max_attempts': 1})) with self.assert_will_retry_n_times(client, 1): client.list_tables()
def test_connect_timeout_exception(self): config = Config( connect_timeout=0.2, retries={'max_attempts': 0}, region_name='us-weast-2', ) client = self.session.create_client('ec2', endpoint_url=self.localhost, config=config) server_bound_event = threading.Event() client_call_ended_event = threading.Event() def no_accept_server(): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) sock.bind(('', self.port)) server_bound_event.set() client_call_ended_event.wait(timeout=60) sock.close() with background(no_accept_server): server_bound_event.wait(timeout=60) with self.assertRaises(ConnectTimeoutError): client.describe_regions() client_call_ended_event.set()
def _get_client(self): return client('s3', ibm_api_key_id='MYAPIKEYID', ibm_service_instance_id='MYAPIKEYID', ibm_auth_endpoint='IBMAUTHENDPOINT', endpoint_url='https://192.168.0.1:443', config=Config(signature_version='oauth'))
def _read_timeout_server(self): config = Config( read_timeout=0.1, retries={'max_attempts': 0}, region_name='us-weast-2', ) client = self.session.create_client('ec2', endpoint_url=self.localhost, config=config) client_call_ended_event = threading.Event() class FakeEC2(SimpleHandler): event = threading.Event() msg = b'<response/>' def get_length(self): return len(self.msg) def get_body(self): client_call_ended_event.wait(timeout=60) return self.msg try: with background(run_server, args=(FakeEC2, self.port)): try: FakeEC2.event.wait(timeout=60) client.describe_regions() finally: client_call_ended_event.set() except BackgroundTaskFailed: self.fail('Fake EC2 service was not called.')
def setUp(self): super(TestRegionRedirect, self).setUp() self.client = self.session.create_client( 's3', 'us-west-2', config=Config( signature_version='s3v4', s3={'addressing_style': 'path'}, )) self.http_stubber = ClientHTTPStubber(self.client) self.redirect_response = { 'status': 301, 'headers': { 'x-amz-bucket-region': 'eu-central-1' }, 'body': (b'<?xml version="1.0" encoding="UTF-8"?>\n' b'<Error>' b' <Code>PermanentRedirect</Code>' b' <Message>The bucket you are attempting to access must be' b' addressed using the specified endpoint. Please send ' b' all future requests to this endpoint.' b' </Message>' b' <Bucket>foo</Bucket>' b' <Endpoint>foo.s3.eu-central-1.amazonaws.com</Endpoint>' b'</Error>') } self.bad_signing_region_response = { 'status': 400, 'headers': { 'x-amz-bucket-region': 'eu-central-1' }, 'body': (b'<?xml version="1.0" encoding="UTF-8"?>' b'<Error>' b' <Code>AuthorizationHeaderMalformed</Code>' b' <Message>the region us-west-2 is wrong; ' b'expecting eu-central-1</Message>' b' <Region>eu-central-1</Region>' b' <RequestId>BD9AA1730D454E39</RequestId>' b' <HostId></HostId>' b'</Error>') } self.success_response = { 'status': 200, 'headers': {}, 'body': (b'<?xml version="1.0" encoding="UTF-8"?>\n' b'<ListBucketResult>' b' <Name>foo</Name>' b' <Prefix></Prefix>' b' <Marker></Marker>' b' <MaxKeys>1000</MaxKeys>' b' <EncodingType>url</EncodingType>' b' <IsTruncated>false</IsTruncated>' b'</ListBucketResult>') }
def test_amz_sdk_request_header(self): test_cases = self._retry_headers_test_cases() for retry_mode in RETRY_MODES: retries_config = {'mode': retry_mode, 'total_max_attempts': 3} client_config = Config(read_timeout=10, retries=retries_config) for test_case in test_cases: self._test_amz_sdk_request_header_with_test_case( *test_case, client_config=client_config)
def setUp(self): self.session = ibm_botocore.session.get_session() self.client = self.session.create_client( 's3', region_name='us-east-1', aws_access_key_id='akid', aws_secret_access_key='skid', config=Config(signature_version='v4'))
def test_client_s3_accelerate_client_config_overrides_config_store(self): self.config_store.set_config_variable( 's3', {'use_accelerate_endpoint': False}) self.assertEqual( self.args_create.compute_s3_config(client_config=Config( s3={'use_accelerate_endpoint': True})), # client_config beats scoped_config {'use_accelerate_endpoint': True})
def test_invalid_host_gaierror(self): config = Config(retries={'max_attempts': 0}, region_name='us-weast-1') endpoint = 'https://ec2.us-weast-1.amazonaws.com/' client = self.session.create_client('ec2', endpoint_url=endpoint, config=config) with self.assertRaises(EndpointConnectionError): client.describe_regions()
def test_client_s3_accelerate_client_config_overrides_scoped(self): self.assertEqual( self.args_create.compute_s3_config( scoped_config={'s3': { 'use_accelerate_endpoint': False }}, client_config=Config(s3={'use_accelerate_endpoint': True})), # client_config beats scoped_config {'use_accelerate_endpoint': True})
def test_default_configurations_resolve_correctly(): session = get_session() config = Config(defaults_mode='standard') client = session.create_client( 'sts', config=config, region_name='us-west-2') assert client.meta.config.s3['us_east_1_regional_endpoint'] == 'regional' assert client.meta.config.connect_timeout == 3.1 assert client.meta.endpoint_url == 'https://sts.us-west-2.amazonaws.com' assert client.meta.config.retries['mode'] == 'standard'
def test_generate_unauthed_url(self): config = Config(signature_version=ibm_botocore.UNSIGNED) client = self.session.create_client('s3', self.region, config=config) url = client.generate_presigned_url(ClientMethod='get_object', Params={ 'Bucket': 'foo', 'Key': 'bar' }) self.assertEqual(url, 'https://foo.s3.amazonaws.com/bar')
def get_client_args(self, service_model, region_name, is_secure, endpoint_url, verify, credentials, scoped_config, client_config, endpoint_bridge): final_args = self.compute_client_args( service_model, client_config, endpoint_bridge, region_name, endpoint_url, is_secure, scoped_config) service_name = final_args['service_name'] # noqa parameter_validation = final_args['parameter_validation'] endpoint_config = final_args['endpoint_config'] protocol = final_args['protocol'] config_kwargs = final_args['config_kwargs'] s3_config = final_args['s3_config'] partition = endpoint_config['metadata'].get('partition', None) socket_options = final_args['socket_options'] signing_region = endpoint_config['signing_region'] endpoint_region_name = endpoint_config['region_name'] event_emitter = copy.copy(self._event_emitter) signer = RequestSigner( service_model.service_id, signing_region, endpoint_config['signing_name'], endpoint_config['signature_version'], credentials, event_emitter ) config_kwargs['s3'] = s3_config new_config = Config(**config_kwargs) endpoint_creator = EndpointCreator(event_emitter) endpoint = endpoint_creator.create_endpoint( service_model, region_name=endpoint_region_name, endpoint_url=endpoint_config['endpoint_url'], verify=verify, response_parser_factory=self._response_parser_factory, max_pool_connections=new_config.max_pool_connections, proxies=new_config.proxies, timeout=(new_config.connect_timeout, new_config.read_timeout), socket_options=socket_options, client_cert=new_config.client_cert, proxies_config=new_config.proxies_config) serializer = ibm_botocore.serialize.create_serializer( protocol, parameter_validation) response_parser = ibm_botocore.parsers.create_parser(protocol) return { 'serializer': serializer, 'endpoint': endpoint, 'response_parser': response_parser, 'event_emitter': event_emitter, 'request_signer': signer, 'service_model': service_model, 'loader': self._loader, 'client_config': new_config, 'partition': partition, 'exceptions_factory': self._exceptions_factory }
def setUp(self): super(TestS3SigV4, self).setUp() self.client = self.session.create_client( 's3', self.region, config=Config(signature_version='s3v4')) self.response_mock = mock.Mock() self.response_mock.content = b'' self.response_mock.headers = {} self.response_mock.status_code = 200 self.http_session_send_mock.return_value = self.response_mock
def create_client(self, signature_version='s3'): return self.session.create_client( 's3', region_name=self.region, config=Config( s3={ 'addressing_style': self.addressing_style, 'signature_version': signature_version }))
def test_generate_unauthed_post(self): config = Config(signature_version=ibm_botocore.UNSIGNED) client = self.session.create_client('s3', self.region, config=config) parts = client.generate_presigned_post(Bucket='foo', Key='bar') expected = { 'fields': {'key': 'bar'}, 'url': 'https://foo.s3.amazonaws.com/' } self.assertEqual(parts, expected)
def test_sso_source_profile(self): token_cache_key = 'f395038c92f1828cbb3991d2d6152d326b895606' cached_token = { 'accessToken': 'a.token', 'expiresAt': self.some_future_time(), } temp_cache = JSONFileCache(self.tempdir) temp_cache[token_cache_key] = cached_token config = ('[profile A]\n' 'role_arn = arn:aws:iam::123456789:role/RoleA\n' 'source_profile = B\n' '[profile B]\n' 'sso_region = us-east-1\n' 'sso_start_url = https://test.url/start\n' 'sso_role_name = SSORole\n' 'sso_account_id = 1234567890\n') self.write_config(config) session, sts_stubber = self.create_session(profile='A') client_config = Config( region_name='us-east-1', signature_version=UNSIGNED, ) sso_stubber = session.stub('sso', config=client_config) sso_stubber.activate() # The expiration needs to be in milliseconds expiration = datetime2timestamp(self.some_future_time()) * 1000 sso_role_creds = self.create_random_credentials() sso_role_response = { 'roleCredentials': { 'accessKeyId': sso_role_creds.access_key, 'secretAccessKey': sso_role_creds.secret_key, 'sessionToken': sso_role_creds.token, 'expiration': int(expiration), } } sso_stubber.add_response('get_role_credentials', sso_role_response) expected_creds = self.create_random_credentials() assume_role_response = self.create_assume_role_response(expected_creds) sts_stubber.add_response('assume_role', assume_role_response) actual_creds = session.get_credentials() self.assert_creds_equal(actual_creds, expected_creds) sts_stubber.assert_no_pending_responses() # Assert that the client was created with the credentials from the # SSO get role credentials response self.assertEqual(self.mock_client_creator.call_count, 1) _, kwargs = self.mock_client_creator.call_args_list[0] expected_kwargs = { 'aws_access_key_id': sso_role_creds.access_key, 'aws_secret_access_key': sso_role_creds.secret_key, 'aws_session_token': sso_role_creds.token, } self.assertEqual(kwargs, expected_kwargs)
def test_content_sha256_not_set_if_config_value_is_false(self): config = Config(signature_version='s3v4', s3={ 'payload_signing_enabled': False }) self.client = self.session.create_client( 's3', self.region, config=config) self.client.put_object(Bucket='foo', Key='bar', Body='baz') sent_headers = self.get_sent_headers() sha_header = sent_headers.get('x-amz-content-sha256') self.assertEqual(sha_header, b'UNSIGNED-PAYLOAD')