def test_cp_to_and_from_s3(self): # This tests the ability to put a single file in s3 # move it to a different bucket. # and download the file locally bucket_name = self.create_bucket() # copy file into bucket. foo_txt = self.files.create_file('foo.txt', 'this is foo.txt') p = aws('s3 cp %s s3://%s/foo.txt' % (foo_txt, bucket_name)) self.assert_no_errors(p) # Make sure object is in bucket. self.assertTrue(self.key_exists(bucket_name, key_name='foo.txt')) self.assertEqual( self.get_key_contents(bucket_name, key_name='foo.txt'), 'this is foo.txt') self.assertEqual( self.content_type_for_key(bucket_name, key_name='foo.txt'), 'text/plain') # Make a new name for the file and copy it locally. full_path = self.files.full_path('bar.txt') p = aws('s3 cp s3://%s/foo.txt %s' % (bucket_name, full_path)) self.assert_no_errors(p) with open(full_path, 'r') as f: self.assertEqual(f.read(), 'this is foo.txt')
def test_exclude_filter_with_delete(self): # Test for: https://github.com/aws/aws-cli/issues/778 bucket_name = self.create_bucket() first = self.files.create_file('foo.txt', 'contents') second = self.files.create_file('bar.py', 'contents') p = aws("s3 sync %s s3://%s/" % (self.files.rootdir, bucket_name)) self.assert_no_errors(p) self.assertTrue(self.key_exists(bucket_name, key_name='bar.py')) os.remove(second) # We now have the same state as specified in the bug: # local remote # ----- ------ # # foo.txt foo.txt # bar.py # # If we now run --exclude '*.py' --delete, then we should *not* # delete bar.py and the remote side. p = aws("s3 sync %s s3://%s/ --exclude '*.py' --delete" % ( self.files.rootdir, bucket_name)) self.assert_no_errors(p) self.assertTrue( self.key_exists(bucket_name, key_name='bar.py'), ("The --delete flag was not applied to the receiving " "end, the 'bar.py' file was deleted even though it was excluded."))
def test_set_with_triple_nesting(self): aws('configure set default.s3.signature_version s3v4', env_vars=self.env_vars) self.assertEqual( '[default]\n' 's3 =\n' ' signature_version = s3v4\n', self.get_config_file_contents())
def test_no_sign_request(self): d = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, d) env_vars = os.environ.copy() env_vars['AWS_ACCESS_KEY_ID'] = 'foo' env_vars['AWS_SECRET_ACCESS_KEY'] = 'bar' bucket_name = 'nosign' + str( int(time.time())) + str(random.randint(1, 100)) self.put_object(bucket_name, 'foo', content='bar', extra_args={'ACL': 'public-read-write'}) p = aws('s3api get-object --bucket %s --key foo %s' % ( bucket_name, os.path.join(d, 'foo')), env_vars=env_vars) # Should have credential issues. self.assertEqual(p.rc, 255) p = aws('s3api get-object --bucket %s --key foo ' '%s --no-sign-request' % (bucket_name, os.path.join(d, 'foo')), env_vars=env_vars) # Should be able to download the file when not signing. self.assertEqual(p.rc, 0) with open(os.path.join(d, 'foo')) as f: contents = f.read() self.assertEqual(contents, 'bar')
def test_set_with_empty_config_file(self): with open(self.config_filename, 'w'): pass aws('configure set region us-west-1', env_vars=self.env_vars) self.assertEqual( '[default]\n' 'region = us-west-1\n', self.get_config_file_contents())
def test_set_with_updating_value(self): self.set_config_file_contents( '[default]\n' 'region = us-west-2\n') aws('configure set region us-west-1', env_vars=self.env_vars) self.assertEqual( '[default]\n' 'region = us-west-1\n', self.get_config_file_contents())
def test_mb_rb(self): p = aws('s3 mb s3://%s' % self.bucket_name) self.assert_no_errors(p) response = self.list_buckets() self.assertIn(self.bucket_name, [b['Name'] for b in response]) p = aws('s3 rb s3://%s' % self.bucket_name) self.assert_no_errors(p)
def test_set_with_commented_out_field(self): self.set_config_file_contents( '#[preview]\n' ';cloudsearch = true\n') aws('configure set preview.cloudsearch true', env_vars=self.env_vars) self.assertEqual( '#[preview]\n' ';cloudsearch = true\n' '[preview]\n' 'cloudsearch = true\n', self.get_config_file_contents())
def test_basic_exclude_filter_for_single_file(self): full_path = self.files.create_file('foo.txt', 'this is foo.txt') # With no exclude we should upload the file. p = aws('s3 cp %s s3://random-bucket-name/ --dryrun' % full_path) self.assert_no_errors(p) self.assertIn('(dryrun) upload:', p.stdout) p2 = aws("s3 cp %s s3://random-bucket-name/ --dryrun --exclude '*'" % full_path) self.assert_no_files_would_be_uploaded(p2)
def test_json_param_parsing(self): # This is convered by unit tests in botocore, but this is a sanity # check that we get a json response from a json service. p = aws('swf list-domains --registration-status REGISTERED') self.assertEqual(p.rc, 0) self.assertIsInstance(p.json, dict) p = aws('dynamodb list-tables') self.assertEqual(p.rc, 0) self.assertIsInstance(p.json, dict)
def assert_s3_read_only_profile(self, profile_name): # Calls to S3 should succeed command = 's3api list-buckets --profile %s' % profile_name result = aws(command, env_vars=self.environ) self.assertEqual(result.rc, 0, result.stderr) # Calls to other services should not command = 'iam list-groups --profile %s' % profile_name result = aws(command, env_vars=self.environ) self.assertNotEqual(result.rc, 0, result.stdout) self.assertIn('AccessDenied', result.stderr)
def test_mb_rb(self): p = aws('s3 mb s3://%s' % self.bucket_name) self.assert_no_errors(p) # Give the bucket time to form. time.sleep(1) response = self.list_buckets() self.assertIn(self.bucket_name, [b['Name'] for b in response]) p = aws('s3 rb s3://%s' % self.bucket_name) self.assert_no_errors(p)
def test_upload_download_file_with_spaces(self): bucket_name = self.create_bucket() filename = self.files.create_file('with space.txt', 'contents') p = aws('s3 cp %s s3://%s/ --recursive' % (self.files.rootdir, bucket_name)) self.assert_no_errors(p) os.remove(filename) # Now download the file back down locally. p = aws('s3 cp s3://%s/ %s --recursive' % (bucket_name, self.files.rootdir)) self.assert_no_errors(p) self.assertEqual(os.listdir(self.files.rootdir)[0], 'with space.txt')
def test_override_existing_value(self): self.set_config_file_contents( '[default]\n' 's3 =\n' ' signature_version = v4\n' ) aws('configure set default.s3.signature_version NEWVALUE', env_vars=self.env_vars) self.assertEqual( '[default]\n' 's3 =\n' ' signature_version = NEWVALUE\n', self.get_config_file_contents())
def test_cli_input_json_exta_args(self): # Check that the object can be found. p = aws('s3api head-object --cli-input-json file://%s --region %s' % (self.temp_file, self.region)) self.assertEqual(p.rc, 0) # Override the ``key`` argument. Should produce a failure because # the key ``bar`` does not exist. p = aws('s3api head-object --key bar --cli-input-json file://%s ' '--region %s' % (self.temp_file, self.region)) self.assertEqual(p.rc, 255) self.assertIn('Not Found', p.stderr)
def test_get_nested_attribute(self): self.set_config_file_contents( '[default]\n' 's3 =\n' ' signature_version = v4\n' ) p = aws('configure get default.s3.signature_version', env_vars=self.env_vars) self.assertEqual(p.stdout.strip(), 'v4') p = aws('configure get default.bad.doesnotexist', env_vars=self.env_vars) self.assertEqual(p.rc, 1) self.assertEqual(p.stdout, '')
def _aws(command_string, max_attempts=1, delay=5, target_rc=0): service = command_string.split()[0] env = None if service in REGION_OVERRIDES: env = os.environ.copy() env['AWS_DEFAULT_REGION'] = REGION_OVERRIDES[service] for _ in range(max_attempts - 1): result = aws(command_string, env_vars=env) if result.rc == target_rc: return result time.sleep(delay) return aws(command_string, env_vars=env)
def test_no_paginate_arg(self): d = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, d) bucket_name = 'nopaginate' + str( int(time.time())) + str(random.randint(1, 100)) self.put_object(bucket=bucket_name, key='foobar', content='foobar contents') p = aws('s3api list-objects --bucket %s --no-paginate' % bucket_name) self.assertEqual(p.rc, 0, p.stdout + p.stderr) p = aws('s3api list-objects --bucket %s' % bucket_name) self.assertEqual(p.rc, 0, p.stdout + p.stderr)
def test_sync_file_with_spaces(self): bucket_name = self.create_bucket() bucket_name = self.create_bucket() filename = self.files.create_file('with space.txt', 'contents') p = aws('s3 sync %s s3://%s/' % (self.files.rootdir, bucket_name)) self.assert_no_errors(p) # Now syncing again should *not* trigger any uploads (i.e we should # get nothing on stdout). p2 = aws('s3 sync %s s3://%s/' % (self.files.rootdir, bucket_name)) self.assertEqual(p2.stdout, '') self.assertEqual(p2.stderr, '') self.assertEqual(p2.rc, 0)
def test_sync_no_resync(self): self.files.create_file('xyz123456789', contents='test1') self.files.create_file(os.path.join('xyz1', 'test'), contents='test2') self.files.create_file(os.path.join('xyz', 'test'), contents='test3') bucket_name = self.create_bucket() p = aws('s3 sync %s s3://%s' % (self.files.rootdir, bucket_name)) self.assert_no_errors(p) self.assertTrue(self.key_exists(bucket_name, 'xyz123456789')) self.assertTrue(self.key_exists(bucket_name, 'xyz1/test')) self.assertTrue(self.key_exists(bucket_name, 'xyz/test')) p2 = aws('s3 sync %s s3://%s/' % (self.files.rootdir, bucket_name)) self.assertNotIn('upload:', p2.stdout) self.assertEqual('', p2.stdout)
def test_cp(self): bucket_name = self.create_bucket() local_example1_txt = self.files.create_file(u'\u00e9xample.txt', 'example1 contents') s3_example1_txt = 's3://%s/%s' % (bucket_name, os.path.basename(local_example1_txt)) local_example2_txt = self.files.full_path(u'\u00e9xample2.txt') p = aws('s3 cp %s %s' % (local_example1_txt, s3_example1_txt)) self.assert_no_errors(p) # Download the file to the second example2.txt filename. p = aws('s3 cp %s %s --quiet' % (s3_example1_txt, local_example2_txt)) self.assert_no_errors(p) with open(local_example2_txt, 'rb') as f: self.assertEqual(f.read(), b'example1 contents')
def test_can_handle_empty_section(self): self.set_config_file_contents( '[default]\n' ) p = aws('configure set preview.cloudfront true', env_vars=self.env_vars) p = aws('configure set region us-west-2', env_vars=self.env_vars) self.assertEqual( '[default]\n' 'region = us-west-2\n' '[preview]\n' 'cloudfront = true\n', self.get_config_file_contents(), )
def test_transfer_single_large_file(self): # 40MB will force a multipart upload. bucket_name = self.create_bucket() file_contents = 'abcdabcd' * (1024 * 1024 * 10) foo_txt = self.files.create_file('foo.txt', file_contents) full_command = 's3 mv %s s3://%s/foo.txt' % (foo_txt, bucket_name) p = aws(full_command, collect_memory=True) self.assert_no_errors(p) self.assert_max_memory_used(p, self.max_mem_allowed, full_command) # Verify downloading it back down obeys memory utilization. download_full_command = 's3 mv s3://%s/foo.txt %s' % ( bucket_name, foo_txt) p = aws(download_full_command, collect_memory=True) self.assert_no_errors(p) self.assert_max_memory_used(p, self.max_mem_allowed, download_full_command)
def test_ec2_describe_instances(self): # Verify we can make a call and get output. p = aws('ec2 describe-instances') self.assertEqual(p.rc, 0) # We don't know what instances a user might have, but we know # there should at least be a Reservations key. self.assertIn('Reservations', p.json)
def test_set_with_profile(self): p = aws('configure set region us-west-1 --profile testing', env_vars=self.env_vars) self.assert_no_errors(p) self.assertEqual( '[profile testing]\n' 'region = us-west-1\n', self.get_config_file_contents())
def test_param_with_bad_json(self): p = aws( 'ec2 describe-instances --filters ' '\'{"Name": "bad-filter", "Values": ["i-123"]}\'') self.assertEqual(p.rc, 255) self.assertIn("The filter 'bad-filter' is invalid", p.stderr, "stdout: %s, stderr: %s" % (p.stdout, p.stderr))
def test_generate_cli_skeleton_iam(self): p = aws('iam create-group --generate-cli-skeleton') self.assertEqual(p.rc, 0) self.assertEqual( json.loads(p.stdout), {'Path': '', 'GroupName': ''} )
def test_generate_cli_skeleton_sqs(self): p = aws('sqs change-message-visibility --generate-cli-skeleton') self.assertEqual(p.rc, 0) self.assertEqual( json.loads(p.stdout), {'QueueUrl': '', 'ReceiptHandle': '', 'VisibilityTimeout': 0} )
def test_generate_cli_skeleton_s3api(self): p = aws('s3api delete-object --generate-cli-skeleton') self.assertEqual(p.rc, 0) self.assertEqual( json.loads(p.stdout), {'Bucket': '','Key': '', 'MFA': '', 'VersionId': ''} )
def test_set_with_fq_double_dot(self): p = aws('configure set profile.testing.region us-west-2', env_vars=self.env_vars) self.assert_no_errors(p) self.assertEqual( '[profile testing]\n' 'region = us-west-2\n', self.get_config_file_contents())
def test_help_output(self): p = aws('help') self.assertEqual(p.rc, 1) self.assertIn('AWS', p.stdout) self.assertRegexpMatches(p.stdout, 'The\s+AWS\s+Command\s+Line\s+Interface')
def test_leftover_args_in_operation(self): p = aws('ec2 describe-instances BADKEY=foo') self.assertEqual(p.rc, 255) self.assertIn("Unknown option", p.stderr, p.stderr)
def test_unknown_argument(self): p = aws('ec2 describe-instances --filterss') self.assertEqual(p.rc, 255) self.assertIn('Unknown options: --filterss', p.stderr)
def test_help_usage_top_level(self): p = aws('') self.assertIn( 'usage: aws [options] <command> ' '<subcommand> [parameters]', p.stderr) self.assertIn('aws: error', p.stderr)
def test_top_level_options_debug(self): p = aws('ec2 describe-instances --debug') self.assertEqual(p.rc, 0) self.assertIn('DEBUG', p.stderr)
def test_param_json(self): p = aws('ec2 describe-instances --filters ' '\'{"Name": "instance-id", "Values": ["i-123"]}\'') self.assertEqual(p.rc, 0, p.stdout + p.stderr) self.assertIn('Reservations', p.json)
def test_subscribe_to_shard_removed(self): result = aws('kinesis subscribe-to-shard help') error_msg = 'argument operation: Invalid choice, valid choices are:' self.assertIn(error_msg, result.stderr)
def test_help_with_warning_blocks(self): p = aws('elastictranscoder create-pipeline help') self.assertEqual(p.rc, 0, p.stderr) # Check text that appears in the warning block to ensure # the block was actually rendered. self.assertRegexpMatches(p.stdout, 'To\s+receive\s+notifications')
def test_explicitly_exclude_single_file(self): full_path = self.files.create_file('foo.txt', 'this is foo.txt') p = aws('s3 cp %s s3://random-bucket-name/' ' --dryrun --exclude foo.txt' % full_path) self.assert_no_files_would_be_uploaded(p)
def test_operation_help_with_required_arg(self): p = aws('s3api get-object help') self.assertEqual(p.rc, 1, p.stderr) self.assertIn('get-object', p.stdout)
def test_topic_list_help_output(self): p = aws('help topics') self.assertEqual(p.rc, 0) self.assertRegexpMatches(p.stdout, '\s+AWS\s+CLI\s+Topic\s+Guide') self.assertRegexpMatches( p.stdout, '\s+This\s+is\s+the\s+AWS\s+CLI\s+Topic\s+Guide')
def test_topic_help_output(self): p = aws('help return-codes') self.assertEqual(p.rc, 0) self.assertRegexpMatches(p.stdout, '\s+AWS\s+CLI\s+Return\s+Codes') self.assertRegexpMatches( p.stdout, 'These\s+are\s+the\s+following\s+return\s+codes')
def test_param_shorthand(self): p = aws( 'ec2 describe-instances --filters Name=instance-id,Values=i-123') self.assertEqual(p.rc, 0) self.assertIn('Reservations', p.json)
def test_start_conversation_removed(self): result = aws('lexv2-runtime start-conversation help') error_msg = 'argument operation: Invalid choice, valid choices are:' self.assertIn(error_msg, result.stderr)
def test_param_with_bad_json(self): p = aws('ec2 describe-instances --filters ' '\'{"Name": "bad-filter", "Values": ["i-123"]}\'') self.assertEqual(p.rc, 255) self.assertIn("The filter 'bad-filter' is invalid", p.stderr, "stdout: %s, stderr: %s" % (p.stdout, p.stderr))
def assert_dry_run_success(self, command): result = aws(command) expected_response = ('Request would have succeeded, ' 'but DryRun flag is set.') self.assertIn(expected_response, result.stderr)
def test_make_requests_to_other_region(self): p = aws('ec2 describe-instances --region us-west-2') self.assertEqual(p.rc, 0) self.assertIn('Reservations', p.json)
def test_bad_lc_ctype_env_var_is_handled(self): # Test for bad LC_CTYPE on Mac OS X. base_env_vars = os.environ.copy() base_env_vars['LC_CTYPE'] = 'UTF-8' p = aws('iam list-users', env_vars=base_env_vars) self.assertEqual(p.rc, 0)
def test_help_usage_operation_level(self): p = aws('ec2 run-instances') self.assertIn( 'usage: aws [options] <command> ' '<subcommand> [parameters]', p.stderr)
def test_version(self): p = aws('--version') self.assertEqual(p.rc, 0) self.assertTrue(p.stderr.startswith('aws-cli'), p.stderr)
def test_traceback_printed_when_debug_on(self): p = aws('ec2 describe-instances --filters BADKEY=foo --debug') self.assertIn('Traceback (most recent call last):', p.stderr, p.stderr) # Also should see DEBUG statements: self.assertIn('DEBUG', p.stderr, p.stderr)
def test_set_with_profile(self): aws('configure set region us-west-1 --profile testing', env_vars=self.env_vars) self.assertEqual('[profile testing]\n' 'region = us-west-1\n', self.get_config_file_contents())
def test_pagination_with_text_output(self): p = aws('iam list-users --output text') self.assertEqual(p.rc, 0)
def test_set_with_fq_single_dot(self): aws('configure set preview.cloudsearch true', env_vars=self.env_vars) self.assertEqual('[preview]\n' 'cloudsearch = true\n', self.get_config_file_contents())
def test_service_help_output(self): p = aws('ec2 help') self.assertEqual(p.rc, 1) self.assertIn('Amazon EC2', p.stdout)
def test_set_with_fq_double_dot(self): aws('configure set profile.testing.region us-west-2', env_vars=self.env_vars) self.assertEqual('[profile testing]\n' 'region = us-west-2\n', self.get_config_file_contents())
def test_set_with_config_file_no_exist(self): aws('configure set region us-west-1', env_vars=self.env_vars) self.assertEqual('[default]\n' 'region = us-west-1\n', self.get_config_file_contents())
def _run_successful_aws_command(command_string): result = aws(command_string) assert_equal(result.rc, 0) assert_equal(result.stderr, '')
def test_operation_help_with_required_option(self): p = aws('cloudsearchdomain search help') self.assertEqual(p.rc, 0, p.stderr) self.assertIn('search', p.stdout) # And nothing on stderr about missing options. self.assertEqual(p.stderr, '')
def test_generate_cli_skeleton_iam(self): p = aws('iam create-group --generate-cli-skeleton') self.assertEqual(p.rc, 0) self.assertEqual(json.loads(p.stdout), {'Path': '', 'GroupName': ''})