class TestCPCommand(BaseAWSCommandParamsTest): prefix = 's3 cp ' def setUp(self): super(TestCPCommand, self).setUp() self.files = FileCreator() def tearDown(self): super(TestCPCommand, self).tearDown() self.files.remove_all() def test_operations_used_in_upload(self): full_path = self.files.create_file('foo.txt', 'mycontent') cmdline = '%s %s s3://bucket/key.txt' % (self.prefix, full_path) self.parsed_responses = [{'ETag': '"c8afdb36c52cf4727836669019e69222"'}] self.run_cmd(cmdline, expected_rc=0) # The only operation we should have called is PutObject. self.assertEqual(len(self.operations_called), 1, self.operations_called) self.assertEqual(self.operations_called[0][0].name, 'PutObject') def test_operations_used_in_download_file(self): self.parsed_responses = [ {"ContentLength": "100", "LastModified": "00:00:00Z"}, {'ETag': '"foo-1"', 'Body': six.BytesIO(b'foo')}, ] cmdline = '%s s3://bucket/key.txt %s' % (self.prefix, self.files.rootdir) self.run_cmd(cmdline, expected_rc=0) # The only operations we should have called are HeadObject/GetObject. self.assertEqual(len(self.operations_called), 2, self.operations_called) self.assertEqual(self.operations_called[0][0].name, 'HeadObject') self.assertEqual(self.operations_called[1][0].name, 'GetObject') def test_operations_used_in_recursive_download(self): self.parsed_responses = [ {'ETag': '"foo-1"', 'Contents': [], 'CommonPrefixes': []}, ] cmdline = '%s s3://bucket/key.txt %s --recursive' % ( self.prefix, self.files.rootdir) self.run_cmd(cmdline, expected_rc=0) # We called ListObjects but had no objects to download, so # we only have a single ListObjects operation being called. self.assertEqual(len(self.operations_called), 1, self.operations_called) self.assertEqual(self.operations_called[0][0].name, 'ListObjects')
def setUp(self): self.files = FileCreator() self.session = botocore.session.get_session() self.service = self.session.get_service('s3') self.endpoint = self.service.get_endpoint('us-east-1') self.extra_setup()
class BaseS3CLICommand(unittest.TestCase): """Base class for aws s3 command. This contains convenience functions to make writing these tests easier and more streamlined. """ def setUp(self): self.files = FileCreator() self.session = botocore.session.get_session() self.service = self.session.get_service('s3') self.endpoint = self.service.get_endpoint('us-east-1') self.extra_setup() def extra_setup(self): # Subclasses can use this to define extra setup steps. pass def tearDown(self): self.files.remove_all() self.extra_teardown() def extra_teardown(self): # Subclasses can use this to define extra teardown steps. pass def create_bucket(self): bucket_name = _create_bucket(self.session) self.addCleanup(self.delete_bucket, bucket_name) return bucket_name def put_object(self, bucket_name, key_name, contents=''): operation = self.service.get_operation('PutObject') http = operation.call(self.endpoint, bucket=bucket_name, key=key_name, body=contents)[0] self.assertEqual(http.status_code, 200) self.addCleanup(self.delete_key, bucket_name, key_name) def delete_bucket(self, bucket_name): self.remove_all_objects(bucket_name) operation = self.service.get_operation('DeleteBucket') response = operation.call(self.endpoint, bucket=bucket_name)[0] self.assertEqual(response.status_code, 204, response.content) def remove_all_objects(self, bucket_name): operation = self.service.get_operation('ListObjects') pages = operation.paginate(self.endpoint, bucket=bucket_name) parsed = pages.build_full_result() key_names = [obj['Key'] for obj in parsed['Contents']] for key_name in key_names: self.delete_key(bucket_name, key_name) def delete_key(self, bucket_name, key_name): operation = self.service.get_operation('DeleteObject') response = operation.call(self.endpoint, bucket=bucket_name, key=key_name)[0] self.assertEqual(response.status_code, 204) def get_key_contents(self, bucket_name, key_name): operation = self.service.get_operation('GetObject') http, parsed = operation.call( self.endpoint, bucket=bucket_name, key=key_name) self.assertEqual(http.status_code, 200) return parsed['Body'].read().decode('utf-8') def key_exists(self, bucket_name, key_name): operation = self.service.get_operation('HeadObject') http, parsed = operation.call( self.endpoint, bucket=bucket_name, key=key_name) return http.status_code == 200 def list_buckets(self): operation = self.service.get_operation('ListBuckets') http, parsed = operation.call(self.endpoint) self.assertEqual(http.status_code, 200) return parsed['Buckets'] def content_type_for_key(self, bucket_name, key_name): operation = self.service.get_operation('HeadObject') http, parsed = operation.call( self.endpoint, bucket=bucket_name, key=key_name) self.assertEqual(http.status_code, 200) return parsed['ContentType'] def assert_no_errors(self, p): self.assertEqual( p.rc, 0, "Non zero rc (%s) received: %s" % (p.rc, p.stdout + p.stderr)) self.assertNotIn("Error:", p.stdout) self.assertNotIn("failed:", p.stdout) self.assertNotIn("client error", p.stdout) self.assertNotIn("server error", p.stdout)
class BaseS3CLICommand(unittest.TestCase): """Base class for aws s3 command. This contains convenience functions to make writing these tests easier and more streamlined. """ def setUp(self): self.files = FileCreator() self.session = botocore.session.get_session() self.service = self.session.get_service('s3') self.endpoint = self.service.get_endpoint('us-east-1') self.extra_setup() def extra_setup(self): # Subclasses can use this to define extra setup steps. pass def tearDown(self): self.files.remove_all() self.extra_teardown() def extra_teardown(self): # Subclasses can use this to define extra teardown steps. pass def create_bucket(self): bucket_name = _create_bucket(self.session) self.addCleanup(self.delete_bucket, bucket_name) return bucket_name def put_object(self, bucket_name, key_name, contents=''): operation = self.service.get_operation('PutObject') http = operation.call(self.endpoint, bucket=bucket_name, key=key_name, body=contents)[0] self.assertEqual(http.status_code, 200) self.addCleanup(self.delete_key, bucket_name, key_name) def delete_bucket(self, bucket_name): self.remove_all_objects(bucket_name) operation = self.service.get_operation('DeleteBucket') response = operation.call(self.endpoint, bucket=bucket_name)[0] self.assertEqual(response.status_code, 204, response.content) def remove_all_objects(self, bucket_name): operation = self.service.get_operation('ListObjects') pages = operation.paginate(self.endpoint, bucket=bucket_name) parsed = pages.build_full_result() key_names = [obj['Key'] for obj in parsed['Contents']] for key_name in key_names: self.delete_key(bucket_name, key_name) def delete_key(self, bucket_name, key_name): operation = self.service.get_operation('DeleteObject') response = operation.call(self.endpoint, bucket=bucket_name, key=key_name)[0] self.assertEqual(response.status_code, 204) def get_key_contents(self, bucket_name, key_name): operation = self.service.get_operation('GetObject') http, parsed = operation.call(self.endpoint, bucket=bucket_name, key=key_name) self.assertEqual(http.status_code, 200) return parsed['Body'].read().decode('utf-8') def key_exists(self, bucket_name, key_name): operation = self.service.get_operation('HeadObject') http, parsed = operation.call(self.endpoint, bucket=bucket_name, key=key_name) return http.status_code == 200 def list_buckets(self): operation = self.service.get_operation('ListBuckets') http, parsed = operation.call(self.endpoint) self.assertEqual(http.status_code, 200) return parsed['Buckets'] def content_type_for_key(self, bucket_name, key_name): operation = self.service.get_operation('HeadObject') http, parsed = operation.call(self.endpoint, bucket=bucket_name, key=key_name) self.assertEqual(http.status_code, 200) return parsed['ContentType'] def assert_no_errors(self, p): self.assertEqual( p.rc, 0, "Non zero rc (%s) received: %s" % (p.rc, p.stdout + p.stderr)) self.assertNotIn("Error:", p.stdout) self.assertNotIn("failed:", p.stdout) self.assertNotIn("client error", p.stdout) self.assertNotIn("server error", p.stdout)
def setUp(self): super(TestCPCommand, self).setUp() self.files = FileCreator()
class TestCPCommand(BaseAWSCommandParamsTest): prefix = 's3 cp ' def setUp(self): super(TestCPCommand, self).setUp() self.files = FileCreator() def tearDown(self): super(TestCPCommand, self).tearDown() self.files.remove_all() def test_operations_used_in_upload(self): full_path = self.files.create_file('foo.txt', 'mycontent') cmdline = '%s %s s3://bucket/key.txt' % (self.prefix, full_path) self.parsed_responses = [{ 'ETag': '"c8afdb36c52cf4727836669019e69222"' }] self.run_cmd(cmdline, expected_rc=0) # The only operation we should have called is PutObject. self.assertEqual(len(self.operations_called), 1, self.operations_called) self.assertEqual(self.operations_called[0][0].name, 'PutObject') def test_key_name_added_when_only_bucket_provided(self): full_path = self.files.create_file('foo.txt', 'mycontent') cmdline = '%s %s s3://bucket/' % (self.prefix, full_path) self.parsed_responses = [{ 'ETag': '"c8afdb36c52cf4727836669019e69222"' }] self.run_cmd(cmdline, expected_rc=0) # The only operation we should have called is PutObject. self.assertEqual(len(self.operations_called), 1, self.operations_called) self.assertEqual(self.operations_called[0][0].name, 'PutObject') self.assertEqual(self.operations_called[0][1]['key'], 'foo.txt') self.assertEqual(self.operations_called[0][1]['bucket'], 'bucket') def test_trailing_slash_appended(self): full_path = self.files.create_file('foo.txt', 'mycontent') # Here we're saying s3://bucket instead of s3://bucket/ # This should still work the same as if we added the trailing slash. cmdline = '%s %s s3://bucket' % (self.prefix, full_path) self.parsed_responses = [{ 'ETag': '"c8afdb36c52cf4727836669019e69222"' }] self.run_cmd(cmdline, expected_rc=0) # The only operation we should have called is PutObject. self.assertEqual(len(self.operations_called), 1, self.operations_called) self.assertEqual(self.operations_called[0][0].name, 'PutObject') self.assertEqual(self.operations_called[0][1]['key'], 'foo.txt') self.assertEqual(self.operations_called[0][1]['bucket'], 'bucket') def test_operations_used_in_download_file(self): self.parsed_responses = [ { "ContentLength": "100", "LastModified": "00:00:00Z" }, { 'ETag': '"foo-1"', 'Body': six.BytesIO(b'foo') }, ] cmdline = '%s s3://bucket/key.txt %s' % (self.prefix, self.files.rootdir) self.run_cmd(cmdline, expected_rc=0) # The only operations we should have called are HeadObject/GetObject. self.assertEqual(len(self.operations_called), 2, self.operations_called) self.assertEqual(self.operations_called[0][0].name, 'HeadObject') self.assertEqual(self.operations_called[1][0].name, 'GetObject') def test_operations_used_in_recursive_download(self): self.parsed_responses = [ { 'ETag': '"foo-1"', 'Contents': [], 'CommonPrefixes': [] }, ] cmdline = '%s s3://bucket/key.txt %s --recursive' % ( self.prefix, self.files.rootdir) self.run_cmd(cmdline, expected_rc=0) # We called ListObjects but had no objects to download, so # we only have a single ListObjects operation being called. self.assertEqual(len(self.operations_called), 1, self.operations_called) self.assertEqual(self.operations_called[0][0].name, 'ListObjects')