def test_s3_single_file_delete(self): input_s3_file = { 'src': { 'path': self.file1, 'type': 's3' }, 'dest': { 'path': '', 'type': 'local' }, 'dir_op': False, 'use_src_name': True } self.client = mock.Mock() file_gen = FileGenerator(self.client, 'delete') result_list = list(file_gen.call(input_s3_file)) self.assertEqual(len(result_list), 1) compare_files( self, result_list[0], FileStat(src=self.file1, dest='text1.txt', compare_key='text1.txt', size=None, last_update=None, src_type='s3', dest_type='local', operation_name='delete')) self.client.head_object.assert_not_called()
def test_warn_bad_symlink(self): """ This tests to make sure it fails when following bad symlinks. """ abs_root = six.text_type(os.path.abspath(self.root) + os.sep) input_local_dir = { 'src': { 'path': abs_root, 'type': 'local' }, 'dest': { 'path': self.bucket, 'type': 's3' }, 'dir_op': True, 'use_src_name': True } file_stats = FileGenerator(self.client, '', True).call(input_local_dir) file_gen = FileGenerator(self.client, '', True) file_stats = file_gen.call(input_local_dir) all_filenames = self.filenames + self.symlink_files all_filenames.sort() result_list = [] for file_stat in file_stats: result_list.append(getattr(file_stat, 'src')) self.assertEqual(len(result_list), len(all_filenames)) # Just check to make sure the right local files are generated. for i in range(len(result_list)): filename = six.text_type(os.path.abspath(all_filenames[i])) self.assertEqual(result_list[i], filename) self.assertFalse(file_gen.result_queue.empty())
def test_s3_single_file_404(self): """ Test the error message for a 404 ClientError for a single file listing """ input_s3_file = { 'src': { 'path': self.file1, 'type': 's3' }, 'dest': { 'path': 'text1.txt', 'type': 'local' }, 'dir_op': False, 'use_src_name': False } params = {'region': 'us-east-1'} self.client = mock.Mock() self.client.head_object.side_effect = \ ClientError( {'Error': {'Code': '404', 'Message': 'Not Found'}}, 'HeadObject', ) file_gen = FileGenerator(self.client, '') files = file_gen.call(input_s3_file) # The error should include 404 and should include the key name. with self.assertRaisesRegex(ClientError, '404.*text1.txt'): list(files)
def test_warn_bad_symlink(self): """ This tests to make sure it fails when following bad symlinks. """ abs_root = six.text_type(os.path.abspath(self.root) + os.sep) input_local_dir = {'src': {'path': abs_root, 'type': 'local'}, 'dest': {'path': self.bucket, 'type': 's3'}, 'dir_op': True, 'use_src_name': True} file_stats = FileGenerator(self.service, self.endpoint, '', True).call(input_local_dir) file_gen = FileGenerator(self.service, self.endpoint, '', True) file_stats = file_gen.call(input_local_dir) all_filenames = self.filenames + self.symlink_files all_filenames.sort() result_list = [] for file_stat in file_stats: result_list.append(getattr(file_stat, 'src')) self.assertEqual(len(result_list), len(all_filenames)) # Just check to make sure the right local files are generated. for i in range(len(result_list)): filename = six.text_type(os.path.abspath(all_filenames[i])) self.assertEqual(result_list[i], filename) self.assertFalse(file_gen.result_queue.empty())
def test_s3_file(self): """ Generate a single s3 file Note: Size and last update are not tested because s3 generates them. """ input_s3_file = {'src': {'path': self.file1, 'type': 's3'}, 'dest': {'path': 'text1.txt', 'type': 'local'}, 'dir_op': False, 'use_src_name': False} params = {'region': 'us-east-1'} file_gen = FileGenerator(self.service, self.endpoint, '') files = file_gen.call(input_s3_file) result_list = [] for filename in files: result_list.append(filename) file_stat = FileStat(src=self.file1, dest='text1.txt', compare_key='text1.txt', size=result_list[0].size, last_update=result_list[0].last_update, src_type='s3', dest_type='local', operation_name='') ref_list = [file_stat] self.assertEqual(len(result_list), len(ref_list)) for i in range(len(result_list)): compare_files(self, result_list[i], ref_list[i])
def test_s3_file(self): """ Generate a single s3 file Note: Size and last update are not tested because s3 generates them. """ input_s3_file = {'src': {'path': self.file1, 'type': 's3'}, 'dest': {'path': 'text1.txt', 'type': 'local'}, 'dir_op': False, 'use_src_name': False} params = {'region': 'us-east-1'} self.parsed_responses = [{"ETag": "abcd", "ContentLength": 100, "LastModified": "2014-01-09T20:45:49.000Z"}] self.patch_make_request() file_gen = FileGenerator(self.client, '') files = file_gen.call(input_s3_file) result_list = [] for filename in files: result_list.append(filename) file_stat = FileStat(src=self.file1, dest='text1.txt', compare_key='text1.txt', size=result_list[0].size, last_update=result_list[0].last_update, src_type='s3', dest_type='local', operation_name='') ref_list = [file_stat] self.assertEqual(len(result_list), len(ref_list)) for i in range(len(result_list)): compare_files(self, result_list[i], ref_list[i])
def test_s3_file(self): """ Generate a single s3 file Note: Size and last update are not tested because s3 generates them. """ input_s3_file = {'src': {'path': self.file1, 'type': 's3'}, 'dest': {'path': 'text1.txt', 'type': 'local'}, 'dir_op': False, 'use_src_name': False} params = {'region': 'us-east-1'} self.parsed_responses = [{"ETag": "abcd", "ContentLength": 100, "LastModified": "2014-01-09T20:45:49.000Z"}] self.patch_make_request() file_gen = FileGenerator(self.client, '') files = file_gen.call(input_s3_file) result_list = [] for filename in files: result_list.append(filename) file_stat = FileStat(src=self.file1, dest='text1.txt', compare_key='text1.txt', size=result_list[0].size, last_update=result_list[0].last_update, src_type='s3', dest_type='local', operation_name='') ref_list = [file_stat] self.assertEqual(len(result_list), len(ref_list)) for i in range(len(result_list)): compare_files(self, result_list[i], ref_list[i])
def test_s3_file(self): """ Generate a single s3 file Note: Size and last update are not tested because s3 generates them. """ input_s3_file = {'src': {'path': self.file1, 'type': 's3'}, 'dest': {'path': 'text1.txt', 'type': 'local'}, 'dir_op': False, 'use_src_name': False} params = {'region': 'us-east-1'} file_gen = FileGenerator(self.service, self.endpoint, '') files = file_gen.call(input_s3_file) result_list = [] for filename in files: result_list.append(filename) file_stat = FileStat(src=self.file1, dest='text1.txt', compare_key='text1.txt', size=result_list[0].size, last_update=result_list[0].last_update, src_type='s3', dest_type='local', operation_name='') ref_list = [file_stat] self.assertEqual(len(result_list), len(ref_list)) for i in range(len(result_list)): compare_files(self, result_list[i], ref_list[i])
def test_s3_single_file_404(self): """ Test the error message for a 404 ClientError for a single file listing """ input_s3_file = {'src': {'path': self.file1, 'type': 's3'}, 'dest': {'path': 'text1.txt', 'type': 'local'}, 'dir_op': False, 'use_src_name': False} params = {'region': 'us-east-1'} self.client = mock.Mock() self.client.head_object.side_effect = \ ClientError(404, 'Not Found', '404', 'HeadObject', 404) file_gen = FileGenerator(self.client, '') files = file_gen.call(input_s3_file) # The error should include 404 and should include the key name. with self.assertRaisesRegexp(ClientError, '404.*text1.txt'): list(files)
def test_s3_single_file_delete(self): input_s3_file = {'src': {'path': self.file1, 'type': 's3'}, 'dest': {'path': '', 'type': 'local'}, 'dir_op': False, 'use_src_name': True} self.client = mock.Mock() file_gen = FileGenerator(self.client, 'delete') result_list = list(file_gen.call(input_s3_file)) self.assertEqual(len(result_list), 1) compare_files( self, result_list[0], FileStat(src=self.file1, dest='text1.txt', compare_key='text1.txt', size=None, last_update=None, src_type='s3', dest_type='local', operation_name='delete') ) self.client.head_object.assert_not_called()