def test_can_specify_multiple_versions_from_config(self, client_creator): config_api_version = '2012-01-01' second_config_api_version = '2013-01-01' with temporary_file('w') as f: del self.environ['FOO_PROFILE'] self.environ['FOO_CONFIG_FILE'] = f.name self.session = create_session(session_vars=self.env_vars) f.write('[default]\n') f.write('foo_api_versions =\n' ' myservice = %s\n' ' myservice2 = %s\n' % ( config_api_version, second_config_api_version) ) f.flush() self.session.create_client('myservice', 'us-west-2') call_kwargs = client_creator.return_value.\ create_client.call_args[1] self.assertEqual(call_kwargs['api_version'], config_api_version) self.session.create_client('myservice2', 'us-west-2') call_kwargs = client_creator.return_value.\ create_client.call_args[1] self.assertEqual( call_kwargs['api_version'], second_config_api_version)
def test_env_var_overrides_config_file(self): self.environ['AWS_STS_REGIONAL_ENDPOINTS'] = 'legacy' with temporary_file('w') as f: self.set_sts_regional_for_config_file(f, 'regional') sts = self.create_sts_client('us-west-2') self.assert_request_sent( sts, expected_url='https://sts.amazonaws.com/')
def test_credential_process_returns_error(self): config = ('[profile processcreds]\n' 'credential_process = %s --raise-error\n') config = config % self.credential_process with temporary_file('w') as f: f.write(config) f.flush() self.environ['AWS_CONFIG_FILE'] = f.name session = Session(profile='processcreds') # This regex validates that there is no substring: b' # The reason why we want to validate that is that we want to # make sure that stderr is actually decoded so that in # exceptional cases the error is properly formatted. # As for how the regex works: # `(?!b').` is a negative lookahead, meaning that it will only # match if it is not followed by the pattern `b'`. Since it is # followed by a `.` it will match any character not followed by # that pattern. `((?!hede).)*` does that zero or more times. The # final pattern adds `^` and `$` to anchor the beginning and end # of the string so we can know the whole string is consumed. # Finally `(?s)` at the beginning makes dots match newlines so # we can handle a multi-line string. reg = r"(?s)^((?!b').)*$" with self.assertRaisesRegex(CredentialRetrievalError, reg): session.get_credentials()
def test_put_pipeline_definition_with_json(self): with temporary_file('r+') as f: f.write(TEST_JSON) f.flush() cmdline = self.prefix cmdline += ' --pipeline-id name' cmdline += ' --pipeline-definition file://%s' % f.name result = { 'pipelineId': 'name', 'pipelineObjects': [ {"id": "S3ToS3Copy", "name": "S3ToS3Copy", "fields": [ { "key": "input", "refValue": "InputData" }, { "key": "output", "refValue": "OutputData" }, { "key": "schedule", "refValue": "CopyPeriod" }, { "key": "type", "stringValue": "CopyActivity" }, ]}] } self.assert_params_for_cmd(cmdline, result)
def test_credential_process_returns_error(self): config = ( '[profile processcreds]\n' 'credential_process = %s --raise-error\n' ) config = config % self.credential_process with temporary_file('w') as f: f.write(config) f.flush() self.environ['AWS_CONFIG_FILE'] = f.name session = Session(profile='processcreds') # This regex validates that there is no substring: b' # The reason why we want to validate that is that we want to # make sure that stderr is actually decoded so that in # exceptional cases the error is properly formatted. # As for how the regex works: # `(?!b').` is a negative lookahead, meaning that it will only # match if it is not followed by the pattern `b'`. Since it is # followed by a `.` it will match any character not followed by # that pattern. `((?!hede).)*` does that zero or more times. The # final pattern adds `^` and `$` to anchor the beginning and end # of the string so we can know the whole string is consumed. # Finally `(?s)` at the beginning makes dots match newlines so # we can handle a multi-line string. reg = r"(?s)^((?!b').)*$" with self.assertRaisesRegexp(CredentialRetrievalError, reg): session.get_credentials()
def test_with_csm_disabled_from_config(self): with temporary_file('w') as f: del self.environ['FOO_PROFILE'] self.environ['FOO_CONFIG_FILE'] = f.name f.write('[default]\n') f.write('csm_enabled=false\n') f.flush() self.assert_created_client_is_not_monitored(self.session)
def test_uri_param(self): p = self.get_param_object("ec2.DescribeInstances.Filters") with temporary_file("r+") as f: json_argument = json.dumps([{"Name": "instance-id", "Values": ["i-1234"]}]) f.write(json_argument) f.flush() result = uri_param(p, "file://%s" % f.name) self.assertEqual(result, json_argument)
def test_bucket_in_other_region_using_http(self): client = self.session.create_client("s3", "us-east-1", endpoint_url="http://s3.amazonaws.com/") with temporary_file("w") as f: f.write("foobarbaz" * 1024 * 1024) f.flush() with open(f.name, "rb") as body_file: response = client.put_object(Bucket=self.bucket_name, Key="foo.txt", Body=body_file) self.assert_status_code(response, 200)
def test_configure_sts_regional_from_config_file(self): with temporary_file('w') as f: self.set_sts_regional_for_config_file(f, 'regional') sts = self.create_sts_client('us-west-2') self.assert_request_sent( sts, expected_url='https://sts.us-west-2.amazonaws.com/', )
def test_uri_param(self): p = self.get_param_object('ec2.DescribeInstances.Filters') with temporary_file('r+') as f: json_argument = json.dumps([{"Name": "instance-id", "Values": ["i-1234"]}]) f.write(json_argument) f.flush() result = uri_param(p, 'file://%s' % f.name) self.assertEqual(result, json_argument)
def test_bucket_in_other_region_using_http(self): http_endpoint = self.service.get_endpoint(endpoint_url="http://s3.amazonaws.com/") with temporary_file("w") as f: f.write("foobarbaz" * 1024 * 1024) f.flush() op = self.service.get_operation("PutObject") response = op.call(http_endpoint, bucket=self.bucket_name, key="foo.txt", body=open(f.name, "rb")) self.assertEqual(response[0].status_code, 200) self.keys.append("foo.txt")
def test_config_loader_delegation(self): with temporary_file('w') as f: f.write('[credfile-profile]\naws_access_key_id=a\n') f.write('aws_secret_access_key=b\n') f.flush() self.session.set_config_variable('credentials_file', f.name) self.session.profile = 'credfile-profile' # Now trying to retrieve the scoped config should not fail. self.assertEqual(self.session.get_scoped_config(), {})
def test_generate_command(self): runner = CliRunner() with temporary_file() as output_filename: with open(output_filename) as output: cli_result = runner.invoke(clkhash.cli.cli, ['generate', '50', output.name]) self.assertEqual(cli_result.exit_code, 0, msg=cli_result.output) with open(output_filename, 'rt') as output: out = output.read() assert len(out) > 50
def test_endpoint_discovery_with_config_file_disabled(self): with temporary_file('w') as f: self.set_endpoint_discovery_config_file(f, "false") discovered_endpoint = 'https://discovered.domain' client, http_stubber = self.create_client( service_name="test-discovery-endpoint") self.add_describe_endpoints_response(http_stubber, discovered_endpoint) with self.assertRaises(EndpointDiscoveryRequired): client.test_discovery_required(Foo="bar")
def test_credential_process(self): config = ('[profile processcreds]\n' 'credential_process = %s\n') config = config % self.credential_process with temporary_file('w') as f: f.write(config) f.flush() self.environ['AWS_CONFIG_FILE'] = f.name credentials = Session(profile='processcreds').get_credentials() self.assertEqual(credentials.access_key, 'spam') self.assertEqual(credentials.secret_key, 'eggs')
def test_uri_param(self): p = self.get_param_model('iam.getUser.userId') with temporary_file('r+') as f: json_argument = json.dumps([{ "Name": "user-id", "Values": ["u-1234"] }]) f.write(json_argument) f.flush() result = uri_param(p, 'file://%s' % f.name) self.assertEqual(result, json_argument)
def test_path_not_in_available_profiles(self): with temporary_file('w') as f: self.session.set_config_variable('credentials_file', f.name) f.write('[newprofile]\n') f.write('aws_access_key_id=FROM_CREDS_FILE_1\n') f.write('aws_secret_access_key=FROM_CREDS_FILE_2\n') f.flush() profiles = self.session.available_profiles self.assertEqual(set(profiles), set(['foo', 'default', 'newprofile']))
def test_bucket_in_other_region_using_http(self): client = self.session.create_client( 's3', 'us-east-1', endpoint_url='http://s3.amazonaws.com/') with temporary_file('w') as f: f.write('foobarbaz' * 1024 * 1024) f.flush() with open(f.name, 'rb') as body_file: response = client.put_object( Bucket=self.bucket_name, Key='foo.txt', Body=body_file) self.assert_status_code(response, 200)
def test_bucket_in_other_region_using_http(self): client = self.session.create_client( 's3', 'us-east-1', endpoint_url='http://s3.amazonaws.com/') with temporary_file('w') as f: f.write('foobarbaz' * 1024 * 1024) f.flush() with open(f.name, 'rb') as body_file: response = client.put_object(Bucket=self.bucket_name, Key='foo.txt', Body=body_file) self.assert_status_code(response, 200)
def test_can_override_session(self): with temporary_file('w') as f: # We're going to override _retry.json in # botocore/data by setting our own data directory. override_name = self.create_file( f, contents='{"foo": "bar"}', name='_retry.json') new_data_path = os.path.dirname(override_name) loader = loaders.create_loader(search_path_string=new_data_path) new_content = loader.load_data('_retry') # This should contain the content we just created. self.assertEqual(new_content, {"foo": "bar"})
def test_bucket_in_other_region(self): # This verifies expect 100-continue behavior. We previously # had a bug where we did not support this behavior and trying to # create a bucket and immediately PutObject with a file like object # would actually cause errors. with temporary_file("w") as f: f.write("foobarbaz" * 1024 * 1024) f.flush() op = self.service.get_operation("PutObject") response = op.call(self.endpoint, bucket=self.bucket_name, key="foo.txt", body=open(f.name, "rb")) self.assertEqual(response[0].status_code, 200) self.keys.append("foo.txt")
def test_create_client_with_ca_bundle_from_config(self, client_creator): with temporary_file('w') as f: del self.environ['FOO_PROFILE'] self.environ['FOO_CONFIG_FILE'] = f.name f.write('[default]\n') f.write('foo_ca_bundle=config-certs.pem\n') f.flush() self.session.create_client('ec2', 'us-west-2') call_kwargs = client_creator.return_value.\ create_client.call_args[1] self.assertEqual(call_kwargs['verify'], 'config-certs.pem')
def test_config_loader_delegation(self): session = create_session(profile='credfile-profile') with temporary_file('w') as f: f.write('[credfile-profile]\naws_access_key_id=a\n') f.write('aws_secret_access_key=b\n') f.flush() session.set_config_variable('credentials_file', f.name) # Now trying to retrieve the scoped config should pull in # values from the shared credentials file. self.assertEqual(session.get_scoped_config(), {'aws_access_key_id': 'a', 'aws_secret_access_key': 'b'})
def test_basic_hashing(self): runner = CliRunner() with temporary_file() as output_filename: with open(output_filename, 'wt') as output: cli_result = runner.invoke(clkhash.cli.cli, [ 'hash', self.pii_file.name, 'secret', 'key', RANDOMNAMES_SCHEMA_PATH, output.name ]) self.assertEqual(cli_result.exit_code, 0, msg=cli_result.output) with open(output_filename, 'rt') as output: self.assertIn('clks', json.load(output))
def test_full_config_merges_creds_file_data(self): with temporary_file('w') as f: self.session.set_config_variable('credentials_file', f.name) f.write('[newprofile]\n') f.write('aws_access_key_id=FROM_CREDS_FILE_1\n') f.write('aws_secret_access_key=FROM_CREDS_FILE_2\n') f.flush() full_config = self.session.full_config self.assertEqual(full_config['profiles']['newprofile'], {'aws_access_key_id': 'FROM_CREDS_FILE_1', 'aws_secret_access_key': 'FROM_CREDS_FILE_2'})
def test_endpoint_discovery_with_config_file_auto(self): with temporary_file('w') as f: self.set_endpoint_discovery_config_file(f, "AUTO") discovered_endpoint = 'https://discovered.domain' client, http_stubber = self.create_client( service_name="test-discovery-endpoint") with http_stubber as stubber: self.add_describe_endpoints_response(stubber, discovered_endpoint) client.test_discovery_required(Foo="bar") self.assert_endpoint_discovery_used(stubber, discovered_endpoint)
def test_can_override_session(self): with temporary_file('w') as f: # We're going to override _retry.json in # ibm_botocore/data by setting our own data directory. override_name = self.create_file( f, contents='{"foo": "bar"}', name='_retry.json') new_data_path = os.path.dirname(override_name) loader = loaders.create_loader(search_path_string=new_data_path) new_content = loader.load_data('_retry') # This should contain the content we just created. self.assertEqual(new_content, {"foo": "bar"})
def test_bucket_in_other_region(self): # This verifies expect 100-continue behavior. We previously # had a bug where we did not support this behavior and trying to # create a bucket and immediately PutObject with a file like object # would actually cause errors. client = self.session.create_client("s3", "us-east-1") with temporary_file("w") as f: f.write("foobarbaz" * 1024 * 1024) f.flush() with open(f.name, "rb") as body_file: response = client.put_object(Bucket=self.bucket_name, Key="foo.txt", Body=body_file) self.assert_status_code(response, 200)
def test_honors_aws_shared_credentials_file_env_var(self): with temporary_file('w') as f: f.write('[default]\n' 'aws_access_key_id=custom1\n' 'aws_secret_access_key=custom2\n') f.flush() os.environ['AWS_SHARED_CREDENTIALS_FILE'] = f.name s = Session() credentials = s.get_credentials() self.assertEqual(credentials.access_key, 'custom1') self.assertEqual(credentials.secret_key, 'custom2')
def test_path_not_in_available_profiles(self): with temporary_file('w') as f: self.session.set_config_variable('credentials_file', f.name) f.write('[newprofile]\n') f.write('aws_access_key_id=FROM_CREDS_FILE_1\n') f.write('aws_secret_access_key=FROM_CREDS_FILE_2\n') f.flush() profiles = self.session.available_profiles self.assertEqual( set(profiles), set(['foo', 'default', 'newprofile']))
def test_bucket_in_other_region_using_http(self): http_endpoint = self.service.get_endpoint( endpoint_url='http://s3.amazonaws.com/') with temporary_file('w') as f: f.write('foobarbaz' * 1024 * 1024) f.flush() op = self.service.get_operation('PutObject') response = op.call(http_endpoint, bucket=self.bucket_name, key='foo.txt', body=open(f.name, 'rb')) self.assertEqual(response[0].status_code, 200) self.keys.append('foo.txt')
def test_bucket_in_other_region_using_http(self): http_endpoint = self.service.get_endpoint( endpoint_url='http://s3.amazonaws.com/') with temporary_file('w') as f: f.write('foobarbaz' * 1024 * 1024) f.flush() op = self.service.get_operation('PutObject') with open(f.name, 'rb') as body_file: response = op.call( http_endpoint, bucket=self.bucket_name, key='foo.txt', body=body_file) self.assertEqual(response[0].status_code, 200) self.keys.append('foo.txt')
def test_create_client_uses_api_version_from_config(self, client_creator): config_api_version = '2012-01-01' with temporary_file('w') as f: del self.environ['FOO_PROFILE'] self.environ['FOO_CONFIG_FILE'] = f.name f.write('[default]\n') f.write('foo_api_versions =\n' ' myservice = %s\n' % config_api_version) f.flush() self.session.create_client('myservice', 'us-west-2') call_kwargs = client_creator.return_value.\ create_client.call_args[1] self.assertEqual(call_kwargs['api_version'], config_api_version)
def test_bucket_in_other_region(self): # This verifies expect 100-continue behavior. We previously # had a bug where we did not support this behavior and trying to # create a bucket and immediately PutObject with a file like object # would actually cause errors. client = self.session.create_client('s3', 'us-east-1') with temporary_file('w') as f: f.write('foobarbaz' * 1024 * 1024) f.flush() with open(f.name, 'rb') as body_file: response = client.put_object(Bucket=self.bucket_name, Key='foo.txt', Body=body_file) self.assert_status_code(response, 200)
def test_credential_process(self): config = ( '[profile processcreds]\n' 'credential_process = %s\n' ) config = config % self.credential_process with temporary_file('w') as f: f.write(config) f.flush() self.environ['AWS_CONFIG_FILE'] = f.name credentials = Session(profile='processcreds').get_credentials() self.assertEqual(credentials.access_key, 'spam') self.assertEqual(credentials.secret_key, 'eggs')
def test_user_data(self): data = u'\u0039' with temporary_file('r+') as tmp: with compat_open(tmp.name, 'w') as f: f.write(data) f.flush() args = ( self.prefix + ' --image-id foo --user-data file://%s' % f.name) result = {'ImageId': 'foo', 'MaxCount': '1', 'MinCount': '1', # base64 encoded content of utf-8 encoding of data. 'UserData': 'OQ=='} self.assert_params_for_cmd(args, result)
def test_shutdown_does_not_hang(self): executor = Executor(2, queue.Queue(), False, 10, queue.Queue(maxsize=1)) with temporary_file('rb+') as f: executor.start() class FloodIOQueueTask(object): PRIORITY = 10 def __call__(self): for i in range(50): executor.write_queue.put(IORequest(f.name, 0, b'foobar')) executor.submit(FloodIOQueueTask()) executor.initiate_shutdown() executor.wait_until_shutdown() self.assertEqual(open(f.name, 'rb').read(), b'foobar')
def test_bucket_in_other_region(self): # This verifies expect 100-continue behavior. We previously # had a bug where we did not support this behavior and trying to # create a bucket and immediately PutObject with a file like object # would actually cause errors. with temporary_file('w') as f: f.write('foobarbaz' * 1024 * 1024) f.flush() op = self.service.get_operation('PutObject') with open(f.name, 'rb') as body_file: response = op.call( self.endpoint, bucket=self.bucket_name, key='foo.txt', body=body_file) self.assertEqual(response[0].status_code, 200) self.keys.append('foo.txt')
def test_param_api_version_overrides_config_value(self, client_creator): config_api_version = '2012-01-01' override_api_version = '2014-01-01' with temporary_file('w') as f: del self.environ['FOO_PROFILE'] self.environ['FOO_CONFIG_FILE'] = f.name self.session = create_session(session_vars=self.env_vars) f.write('[default]\n') f.write('foo_api_versions =\n' ' myservice = %s\n' % config_api_version) f.flush() self.session.create_client( 'myservice', 'us-west-2', api_version=override_api_version) call_kwargs = client_creator.return_value.\ create_client.call_args[1] self.assertEqual(call_kwargs['api_version'], override_api_version)
def _configured_session(case_configuration, listener_port): environ = { 'AWS_ACCESS_KEY_ID': case_configuration['accessKey'], 'AWS_SECRET_ACCESS_KEY': 'secret-key', 'AWS_DEFAULT_REGION': case_configuration['region'], 'AWS_DATA_PATH': DATA_DIR, 'AWS_CSM_PORT': listener_port } if 'sessionToken' in case_configuration: environ['AWS_SESSION_TOKEN'] = case_configuration['sessionToken'] environ.update(case_configuration['environmentVariables']) with temporary_file('w') as f: _setup_shared_config(f, case_configuration['sharedConfigFile'], environ) with mock.patch('os.environ', environ): session = botocore.session.Session() if 'maxRetries' in case_configuration: _setup_max_retry_attempts(session, case_configuration) yield session
def run_command_capture_output(self, command): """ Creates a NamedTempFile and saves the output of running a cli command to that file by adding `-o output.name` to the command before running it. :param command: e.g ["status"] :returns: The output as a string. :raises: AssertionError if the command's exit code isn't 0 """ runner = CliRunner() with temporary_file() as output_filename: command.extend(['-o', output_filename]) cli_result = runner.invoke(clkhash.cli.cli, command) assert cli_result.exit_code == 0, cli_result.output with open(output_filename, 'rt') as output: return output.read()
def test_create_client_verify_param_overrides_all(self, client_creator): with temporary_file('w') as f: # Set the ca cert using the config file del self.environ['FOO_PROFILE'] self.environ['FOO_CONFIG_FILE'] = f.name f.write('[default]\n') f.write('foo_ca_bundle=config-certs.pem\n') f.flush() # Set the ca cert with an environment variable self.environ['FOO_AWS_CA_BUNDLE'] = 'env-certs.pem' # Set the ca cert using the verify parameter self.session.create_client( 'ec2', 'us-west-2', verify='verify-certs.pem') call_kwargs = client_creator.return_value.\ create_client.call_args[1] # The verify parameter should override all the other # configurations self.assertEqual(call_kwargs['verify'], 'verify-certs.pem')
def test_hashing_json_schema(self): runner = CliRunner() pii_data = randomnames.NameList(self.SAMPLES) pii_file = create_temp_file() randomnames.save_csv(pii_data.names, [f.identifier for f in pii_data.SCHEMA.fields], pii_file) pii_file.close() with temporary_file() as output_filename: with open(output_filename) as output: cli_result = runner.invoke(clkhash.cli.cli, [ 'hash', pii_file.name, 'secretkey1', 'secretkey2', RANDOMNAMES_SCHEMA_PATH, output.name ]) self.assertEqual(cli_result.exit_code, 0, msg=cli_result.output) with open(output_filename) as output: self.assertIn('clks', json.load(output))
def test_session_profile_overrides_env_vars(self): # If the ".profile" attribute is set then the associated # creds for that profile take precedence over the environment # variables. with temporary_file('w') as f: # We test this by creating creds in two places, # env vars and a fake shared creds file. We ensure # that if an explicit profile is set we pull creds # from the shared creds file. self.environ['AWS_ACCESS_KEY_ID'] = 'env_var_akid' self.environ['AWS_SECRET_ACCESS_KEY'] = 'env_var_sak' self.environ['AWS_SHARED_CREDENTIALS_FILE'] = f.name f.write('[from_session_instance]\n' 'aws_access_key_id=shared_creds_akid\n' 'aws_secret_access_key=shared_creds_sak\n') f.flush() self.session.set_config_variable('profile', 'from_session_instance') creds = self.session.get_credentials() self.assertEqual(creds.access_key, 'shared_creds_akid') self.assertEqual(creds.secret_key, 'shared_creds_sak')
def test_session_profile_overrides_env_vars(self): # If the ".profile" attribute is set then the associated # creds for that profile take precedence over the environment # variables. with temporary_file('w') as f: # We test this by creating creds in two places, # env vars and a fake shared creds file. We ensure # that if an explicit profile is set we pull creds # from the shared creds file. self.environ['AWS_ACCESS_KEY_ID'] = 'env_var_akid' self.environ['AWS_SECRET_ACCESS_KEY'] = 'env_var_sak' self.environ['AWS_SHARED_CREDENTIALS_FILE'] = f.name f.write( '[from_session_instance]\n' 'aws_access_key_id=shared_creds_akid\n' 'aws_secret_access_key=shared_creds_sak\n' ) f.flush() self.session.set_config_variable('profile', 'from_session_instance') creds = self.session.get_credentials() self.assertEqual(creds.access_key, 'shared_creds_akid') self.assertEqual(creds.secret_key, 'shared_creds_sak')
def test_profile_does_not_win_if_all_from_env_vars(self): # Creds should be pulled from the env vars because # if access_key/secret_key/profile are all specified on # the same "level", then the explicit creds take # precedence. with temporary_file('w') as f: self.environ['AWS_SHARED_CREDENTIALS_FILE'] = f.name self.environ['AWS_PROFILE'] = 'myprofile' # Even though we don't use the profile for credentials, # if you have a profile configured in any way # (env vars, set when creating a session, etc.) that profile # must exist. So we need to create an empty profile # matching the value from AWS_PROFILE. f.write( '[myprofile]\n' ) f.flush() self.environ['AWS_ACCESS_KEY_ID'] = 'env_var_akid' self.environ['AWS_SECRET_ACCESS_KEY'] = 'env_var_sak' creds = self.session.get_credentials() self.assertEqual(creds.access_key, 'env_var_akid') self.assertEqual(creds.secret_key, 'env_var_sak')