def test_profile_arg_wins_over_profile_env_var(self): env_vars = os.environ.copy() with temporary_file('w') as f: # Remove existing profile related env vars. env_vars.pop('AWS_PROFILE', None) env_vars.pop('AWS_DEFAULT_PROFILE', None) env_vars['AWS_SHARED_CREDENTIALS_FILE'] = f.name env_vars['AWS_CONFIG_FILE'] = 'does-not-exist-foo' f.write( '[from_env_var]\n' 'aws_access_key_id=enva\n' 'aws_secret_access_key=envb\n' '\n' '[from_argument]\n' 'aws_access_key_id=proa\n' 'aws_secret_access_key=prob\n' ) f.flush() # Now we set the current profile via env var: env_vars['AWS_PROFILE'] = 'from_env_var' # If we specify the --profile argument, that # value should win over the AWS_PROFILE env var. p = aws('configure list --profile from_argument', env_vars=env_vars) # 1. We should see the profile name being set. self.assertIn('from_argument', p.stdout) # 2. The creds should be profa/profb, which come # from the "from_argument" profile. self.assertIn('proa', p.stdout) self.assertIn('prob', p.stdout)
def test_successfully_sets_utime(self): now = datetime.datetime.now(tzlocal()) epoch_now = time.mktime(now.timetuple()) with temporary_file("w") as f: set_file_utime(f.name, epoch_now) _, update_time = get_file_stat(f.name) self.assertEqual(time.mktime(update_time.timetuple()), epoch_now)
def test_put_pipeline_definition_with_json(self): with temporary_file('r+') as f: f.write(TEST_JSON) f.flush() cmdline = self.prefix cmdline += ' --pipeline-id name' cmdline += ' --pipeline-definition file://%s' % f.name result = { 'pipelineId': 'name', 'pipelineObjects': [ {"id": "S3ToS3Copy", "name": "S3ToS3Copy", "fields": [ { "key": "input", "refValue": "InputData" }, { "key": "output", "refValue": "OutputData" }, { "key": "schedule", "refValue": "CopyPeriod" }, { "key": "type", "stringValue": "CopyActivity" }, ]}] } self.assert_params_for_cmd2(cmdline, result)
def test_can_support_addressing_mode_config(self): with temporary_file('w') as f: self.enable_addressing_mode_in_config(f, 'path') stdout = self.get_presigned_url_for_cmd(self.prefix + 's3://bucket/key') self.assert_presigned_url_matches( stdout, { 'hostname': 's3.us-east-1.amazonaws.com', 'path': '/bucket/key', 'query_params': { 'X-Amz-Algorithm': 'AWS4-HMAC-SHA256', 'X-Amz-Credential': ('access_key%2F20160818%2Fus-east-1' '%2Fs3%2Faws4_request'), 'X-Amz-Date': '20160818T143303Z', 'X-Amz-Expires': '3600', 'X-Amz-Signature': ('c6dab3560db76aded03e6268338ddb0a6dec00ebc82d6e' '7abdc305529fcaba74'), 'X-Amz-SignedHeaders': 'host', } })
def test_profile_arg_has_precedence_over_env_vars(self): # At a high level, we're going to set access_key/secret_key # via env vars, but ensure that a --profile <foo> results # in creds being retrieved from the shared creds file # and not from env vars. env_vars = os.environ.copy() with temporary_file('w') as f: env_vars.pop('AWS_PROFILE', None) env_vars.pop('AWS_DEFAULT_PROFILE', None) # 'aws configure list' only shows 4 values # from the credentials so we'll show # 4 char values. env_vars['AWS_ACCESS_KEY_ID'] = 'enva' env_vars['AWS_SECRET_ACCESS_KEY'] = 'envb' env_vars['AWS_SHARED_CREDENTIALS_FILE'] = f.name env_vars['AWS_CONFIG_FILE'] = 'does-not-exist-foo' f.write('[from_argument]\n' 'aws_access_key_id=proa\n' 'aws_secret_access_key=prob\n') f.flush() p = aws('configure list --profile from_argument', env_vars=env_vars) # 1. We should see the profile name being set. self.assertIn('from_argument', p.stdout) # 2. The creds should be proa/prob, which come # from the "from_argument" profile. self.assertIn('proa', p.stdout) self.assertIn('prob', p.stdout) self.assertIn('shared-credentials-file', p.stdout)
def test_profile_arg_wins_over_profile_env_var(self): env_vars = os.environ.copy() with temporary_file('w') as f: # Remove existing profile related env vars. env_vars.pop('AWS_PROFILE', None) env_vars.pop('AWS_DEFAULT_PROFILE', None) env_vars['AWS_SHARED_CREDENTIALS_FILE'] = f.name env_vars['AWS_CONFIG_FILE'] = 'does-not-exist-foo' f.write('[from_env_var]\n' 'aws_access_key_id=enva\n' 'aws_secret_access_key=envb\n' '\n' '[from_argument]\n' 'aws_access_key_id=proa\n' 'aws_secret_access_key=prob\n') f.flush() # Now we set the current profile via env var: env_vars['AWS_PROFILE'] = 'from_env_var' # If we specify the --profile argument, that # value should win over the AWS_PROFILE env var. p = aws('configure list --profile from_argument', env_vars=env_vars) # 1. We should see the profile name being set. self.assertIn('from_argument', p.stdout) # 2. The creds should be profa/profb, which come # from the "from_argument" profile. self.assertIn('proa', p.stdout) self.assertIn('prob', p.stdout)
def test_handles_sigv4(self): with temporary_file('w') as f: self.enable_sigv4_from_config_file(f) stdout = self.get_presigned_url_for_cmd(self.prefix + 's3://bucket/key') expected = { 'hostname': 'bucket.s3.us-east-1.amazonaws.com', 'path': '/key', 'query_params': { 'X-Amz-Algorithm': 'AWS4-HMAC-SHA256', 'X-Amz-Credential': ('access_key%2F20160818%2Fus-east-1' '%2Fs3%2Faws4_request'), 'X-Amz-Date': '20160818T143303Z', 'X-Amz-Expires': '3600', 'X-Amz-Signature': ('1297528058f2c8b89cfa52c6a47d6c548907' '00a1da24702b06d53e774c0acc95'), 'X-Amz-SignedHeaders': 'host' } } self.assert_presigned_url_matches(stdout, expected)
def test_policy_from_paramfile(self, create_client_mock): client = Mock() # S3 mock calls client.get_user.return_value = {'User': {'Arn': ':::::'}} client.head_bucket.side_effect = ClientError( {'Error': { 'Code': 404, 'Message': '' }}, 'HeadBucket') # CloudTrail mock call client.describe_trails.return_value = {} create_client_mock.return_value = client policy = '{"Statement": []}' with temporary_file('w') as f: f.write(policy) f.flush() command = ('cloudtrail create-subscription --s3-new-bucket foo ' '--name bar --s3-custom-policy file://{0}'.format( f.name)) self.run_cmd(command, expected_rc=0) # Ensure that the *contents* of the file are sent as the policy # parameter to S3. client.put_bucket_policy.assert_called_with(Bucket='foo', Policy=policy)
def test_handles_sigv4(self): with temporary_file('w') as f: self.enable_sigv4_from_config_file(f) stdout = self.get_presigned_url_for_cmd(self.prefix + 's3://bucket/key') expected = { 'hostname': 's3.amazonaws.com', 'path': '/bucket/key', 'query_params': { 'X-Amz-Algorithm': 'AWS4-HMAC-SHA256', 'X-Amz-Credential': ('access_key%2F20160818%2Fus-east-1' '%2Fs3%2Faws4_request'), 'X-Amz-Date': '20160818T143303Z', 'X-Amz-Expires': '3600', 'X-Amz-Signature': ('d20178280d7521b384730c678549f6344401ae040bec559ad0602' '0854c6c718f'), 'X-Amz-SignedHeaders': 'host' } } self.assert_presigned_url_matches(stdout, expected)
def test_successfully_sets_utime(self): now = datetime.datetime.now(tzlocal()) epoch_now = time.mktime(now.timetuple()) with temporary_file('w') as f: set_file_utime(f.name, epoch_now) _, update_time = get_file_stat(f.name) self.assertEqual(time.mktime(update_time.timetuple()), epoch_now)
def test_profile_arg_has_precedence_over_env_vars(self): # At a high level, we're going to set access_key/secret_key # via env vars, but ensure that a --profile <foo> results # in creds being retrieved from the shared creds file # and not from env vars. env_vars = os.environ.copy() with temporary_file('w') as f: env_vars.pop('AWS_PROFILE', None) env_vars.pop('AWS_DEFAULT_PROFILE', None) # 'aws configure list' only shows 4 values # from the credentials so we'll show # 4 char values. env_vars['AWS_ACCESS_KEY_ID'] = 'enva' env_vars['AWS_SECRET_ACCESS_KEY'] = 'envb' env_vars['AWS_SHARED_CREDENTIALS_FILE'] = f.name env_vars['AWS_CONFIG_FILE'] = 'does-not-exist-foo' f.write( '[from_argument]\n' 'aws_access_key_id=proa\n' 'aws_secret_access_key=prob\n' ) f.flush() p = aws('configure list --profile from_argument', env_vars=env_vars) # 1. We should see the profile name being set. self.assertIn('from_argument', p.stdout) # 2. The creds should be proa/prob, which come # from the "from_argument" profile. self.assertIn('proa', p.stdout) self.assertIn('prob', p.stdout) self.assertIn('shared-credentials-file', p.stdout)
def test_handles_sigv4(self): with temporary_file('w') as f: self.enable_sigv4_from_config_file(f) stdout = self.get_presigned_url_for_cmd(self.prefix + 's3://bucket/key') expected = { 'hostname': 'bucket.s3.amazonaws.com', 'path': '/key', 'query_params': { 'X-Amz-Algorithm': 'AWS4-HMAC-SHA256', 'X-Amz-Credential': ('access_key%2F20160818%2Fus-east-1' '%2Fs3%2Faws4_request'), 'X-Amz-Date': '20160818T143303Z', 'X-Amz-Expires': '3600', 'X-Amz-Signature': ('d28b6c4a54f31196a6d49335556736a3fc29f036018c8e' '50775887299092d1a0'), 'X-Amz-SignedHeaders': 'host' } } self.assert_presigned_url_matches(stdout, expected)
def test_uri_param(self): p = self.get_param_model('ec2.DescribeInstances.Filters') with temporary_file('r+') as f: json_argument = json.dumps([{"Name": "instance-id", "Values": ["i-1234"]}]) f.write(json_argument) f.flush() result = uri_param('event-name', p, 'file://%s' % f.name) self.assertEqual(result, json_argument)
def test_uri_param(self): p = self.get_param_object('ec2.DescribeInstances.Filters') with temporary_file('r+') as f: json_argument = json.dumps([{"Name": "instance-id", "Values": ["i-1234"]}]) f.write(json_argument) f.flush() result = uri_param(p, 'file://%s' % f.name) self.assertEqual(result, json_argument)
def test_get_file_stat_returns_epoch_on_invalid_timestamp(self): patch_attribute = 'awscli.customizations.s3.utils.datetime' with mock.patch(patch_attribute) as datetime_mock: with temporary_file('w') as temp_file: temp_file.write('foo') temp_file.flush() datetime_mock.fromtimestamp.side_effect = ValueError() size, update_time = get_file_stat(temp_file.name) self.assertIsNone(update_time)
def test_uri_param_no_paramfile_true(self): p = self.get_param_model('ec2.DescribeInstances.Filters') p.no_paramfile = True with temporary_file('r+') as f: json_argument = json.dumps([{"Name": "instance-id", "Values": ["i-1234"]}]) f.write(json_argument) f.flush() result = uri_param('event-name', p, 'file://%s' % f.name) self.assertEqual(result, None)
def assert_handles_fromtimestamp_error(self, error): patch_attribute = 'awscli.customizations.s3.utils.datetime' with mock.patch(patch_attribute) as datetime_mock: with temporary_file('w') as temp_file: temp_file.write('foo') temp_file.flush() datetime_mock.fromtimestamp.side_effect = error size, update_time = get_file_stat(temp_file.name) self.assertIsNone(update_time)
def test_get_file_stat_returns_epoch_on_invalid_timestamp_os_error(self): patch_attribute = "awscli.customizations.s3.utils.datetime" with mock.patch(patch_attribute) as datetime_mock: with temporary_file("w") as temp_file: temp_file.write("foo") temp_file.flush() datetime_mock.fromtimestamp.side_effect = OSError() size, update_time = get_file_stat(temp_file.name) self.assertIsNone(update_time)
def init_clidriver(self): with temporary_file('w') as f: f.write( '[default]\n' 's3 =\n' ' max_concurrent_requests = 1\n' ) f.flush() self.environ['AWS_CONFIG_FILE'] = f.name self.driver = create_clidriver()
def test_get_file_stat(self): now = datetime.datetime.now(tzlocal()) epoch_now = time.mktime(now.timetuple()) with temporary_file("w") as f: f.write("foo") f.flush() os.utime(f.name, (epoch_now, epoch_now)) size, update_time = get_file_stat(f.name) self.assertEqual(size, 3) self.assertEqual(time.mktime(update_time.timetuple()), epoch_now)
def test_get_file_stat(self): now = datetime.datetime.now(tzlocal()) epoch_now = time.mktime(now.timetuple()) with temporary_file('w') as f: f.write('foo') f.flush() os.utime(f.name, (epoch_now, epoch_now)) size, update_time = get_file_stat(f.name) self.assertEqual(size, 3) self.assertEqual(time.mktime(update_time.timetuple()), epoch_now)
def test_uri_param_no_paramfile_true(self): p = self.get_param_object('ec2.DescribeInstances.Filters') p.no_paramfile = True with temporary_file('r+') as f: json_argument = json.dumps([{ "Name": "instance-id", "Values": ["i-1234"] }]) f.write(json_argument) f.flush() result = uri_param(p, 'file://%s' % f.name) self.assertEqual(result, None)
def test_can_support_addressing_mode_config(self): with temporary_file('w') as f: self.enable_addressing_mode_in_config(f, 'path') stdout = self.get_presigned_url_for_cmd(self.prefix + 's3://bucket/key') self.assert_presigned_url_matches( stdout, { 'hostname': 's3.amazonaws.com', 'path': '/bucket/key', 'query_params': { 'AWSAccessKeyId': 'access_key', 'Expires': str(FROZEN_TIMESTAMP + DEFAULT_EXPIRES), 'Signature': '2m9M0eLB%2BqI0nUpkyTskKmHd0Ig%3D', } })
def test_shutdown_does_not_hang(self): executor = Executor(2, queue.Queue(), False, 10, queue.Queue(maxsize=1)) with temporary_file('rb+') as f: executor.start() class FloodIOQueueTask(object): PRIORITY = 10 def __call__(self): for i in range(50): executor.write_queue.put(IORequest(f.name, 0, b'foobar')) executor.submit(FloodIOQueueTask()) executor.initiate_shutdown() executor.wait_until_shutdown() self.assertEqual(open(f.name, 'rb').read(), b'foobar')
def test_user_data(self): data = u'\u0039' with temporary_file('r+') as tmp: with compat_open(tmp.name, 'w') as f: f.write(data) f.flush() args = ( self.prefix + ' --image-id foo --user-data file://%s' % f.name) result = {'ImageId': 'foo', 'MaxCount': '1', 'MinCount': '1', # base64 encoded content of utf-8 encoding of data. 'UserData': 'OQ=='} self.assert_params_for_cmd(args, result)
def test_user_data(self): return data = u"\u0039" with temporary_file("r+") as tmp: with compat_open(tmp.name, "w") as f: f.write(data) f.flush() args = self.prefix + " --image-id foo --user-data file://%s" % f.name result = { "ImageId": "foo", "MaxCount": 1, "MinCount": 1, # base64 encoded content of utf-8 encoding of data. "UserData": "OQ==", } self.assert_params_for_cmd(args, result)
def test_can_support_addressing_mode_config(self): with temporary_file('w') as f: self.enable_addressing_mode_in_config(f, 'path') stdout = self.get_presigned_url_for_cmd( self.prefix + 's3://bucket/key') self.assert_presigned_url_matches( stdout, { 'hostname': 's3.amazonaws.com', 'path': '/bucket/key', 'query_params': { 'AWSAccessKeyId': 'access_key', 'Expires': str(FROZEN_TIMESTAMP + DEFAULT_EXPIRES), 'Signature': '2m9M0eLB%2BqI0nUpkyTskKmHd0Ig%3D', } } )
def test_can_run_wizard(self): with temporary_file('r+') as f: f.write('version: "0.9"\n' 'plan:\n' ' start:\n' ' values:\n' ' foo:\n' ' type: static\n' ' value: myvalue\n' 'execute:\n' ' default:\n' ' - type: apicall\n' ' operation: iam.ListRoles\n' ' params: {}\n') f.flush() stdout, _, _ = self.assert_params_for_cmd( 'cli-dev wizard-dev --run-wizard file://%s' % f.name, params={})
def test_smoke_test_completer(): # Verify we can: # 1. Generate part of the completion index # 2. Create a completer with the factory function # 3. Generate completions using this index. # # We don't generate the entire completion index for all commands. # We're more interested in the end to end flow. The test_generator.py # file verifies that we can generate the entire index so we don't need # to do this twice (it takes a while). with testutils.temporary_file('w') as f: _generate_index(f.name) completions = _autocomplete(f.name, 'aws ec2 desc') # The API can change so we won't assert a specific list, but we'll # pick a few operations that we know will always be there. completion_strings = [c.result for c in completions] assert_in('describe-instances', completion_strings) assert_in('describe-regions', completion_strings)
def test_can_open_file_with_builtin_function(self): invoker = core.APIInvoker(self.mock_session) with temporary_file('r+') as f: f.write('admin') f.flush() invoker.invoke( 'iam', 'CreateUser', # There's two parts to this test. First, we have the # filename stored as a variable. plan_variables={'myfile': f.name}, api_params={ # Then we reference the file with the builtin File # type. We should replace this entire value with # the contents of the temp file ('admin'). 'UserName': {'__wizard__:File': {'path': '{myfile}'}}, }, ) call_method_args = self.get_call_args(self.mock_session) self.assertEqual(call_method_args, mock.call(UserName=b'admin'))
def test_handles_sigv4(self): with temporary_file('w') as f: self.enable_sigv4_from_config_file(f) stdout = self.get_presigned_url_for_cmd( self.prefix + 's3://bucket/key') expected = { 'hostname': 's3.amazonaws.com', 'path': '/bucket/key', 'query_params': { 'X-Amz-Algorithm': 'AWS4-HMAC-SHA256', 'X-Amz-Credential': ( 'access_key%2F20160818%2Fus-east-1' '%2Fs3%2Faws4_request'), 'X-Amz-Date': '20160818T143303Z', 'X-Amz-Expires': '3600', 'X-Amz-Signature': ( 'd20178280d7521b384730c678549f6344401ae040bec559ad0602' '0854c6c718f'), 'X-Amz-SignedHeaders': 'host' } } self.assert_presigned_url_matches(stdout, expected)
def test_policy_from_paramfile(self, create_client_mock): client = Mock() # S3 mock calls client.get_user.return_value = {"User": {"Arn": ":::::"}} client.head_bucket.side_effect = ClientError({"Error": {"Code": 404, "Message": ""}}, "HeadBucket") # CloudTrail mock call client.describe_trails.return_value = {} create_client_mock.return_value = client policy = '{"Statement": []}' with temporary_file("w") as f: f.write(policy) f.flush() command = ( "cloudtrail create-subscription --s3-new-bucket foo " "--name bar --s3-custom-policy file://{0}".format(f.name) ) self.run_cmd(command, expected_rc=0) # Ensure that the *contents* of the file are sent as the policy # parameter to S3. client.put_bucket_policy.assert_called_with(Bucket="foo", Policy=policy)
def test_handles_sigv4(self): with temporary_file('w') as f: self.enable_sigv4_from_config_file(f) stdout = self.get_presigned_url_for_cmd( self.prefix + 's3://bucket/key') expected = { 'hostname': 'bucket.s3.amazonaws.com', 'path': '/key', 'query_params': { 'X-Amz-Algorithm': 'AWS4-HMAC-SHA256', 'X-Amz-Credential': ( 'access_key%2F20160818%2Fus-east-1' '%2Fs3%2Faws4_request'), 'X-Amz-Date': '20160818T143303Z', 'X-Amz-Expires': '3600', 'X-Amz-Signature': ( 'd28b6c4a54f31196a6d49335556736a3fc29f036018c8e' '50775887299092d1a0'), 'X-Amz-SignedHeaders': 'host' } } self.assert_presigned_url_matches(stdout, expected)
def test_policy_from_paramfile(self, create_client_mock): client = Mock() # S3 mock calls client.get_user.return_value = {'User': {'Arn': ':::::'}} client.head_bucket.side_effect = ClientError( {'Error': {'Code': 404, 'Message': ''}}, 'HeadBucket') # CloudTrail mock call client.describe_trails.return_value = {} create_client_mock.return_value = client policy = '{"Statement": []}' with temporary_file('w') as f: f.write(policy) f.flush() command = ( 'cloudtrail create-subscription --s3-new-bucket foo ' '--name bar --s3-custom-policy file://{0}'.format(f.name)) self.run_cmd(command, expected_rc=0) # Ensure that the *contents* of the file are sent as the policy # parameter to S3. client.put_bucket_policy.assert_called_with( Bucket='foo', Policy=policy)