def test_list_local_files_with_unicode_chars(self): p = os.path.join open(p(self.directory, u"a"), "w").close() open(p(self.directory, u"a\u0300"), "w").close() open(p(self.directory, u"a\u0300-1"), "w").close() open(p(self.directory, u"a\u03001"), "w").close() open(p(self.directory, u"z"), "w").close() open(p(self.directory, u"\u00e6"), "w").close() os.mkdir(p(self.directory, u"a\u0300a")) open(p(self.directory, u"a\u0300a", u"a"), "w").close() open(p(self.directory, u"a\u0300a", u"z"), "w").close() open(p(self.directory, u"a\u0300a", u"\u00e6"), "w").close() file_generator = FileGenerator(None, None, None, None) values = list(el[0] for el in file_generator.list_files(self.directory, dir_op=True)) expected_order = [ os.path.join(self.directory, el) for el in [ u"a", u"a\u0300", u"a\u0300-1", u"a\u03001", u"a\u0300a%sa" % os.path.sep, u"a\u0300a%sz" % os.path.sep, u"a\u0300a%s\u00e6" % os.path.sep, u"z", u"\u00e6", ] ] self.assertEqual(values, expected_order)
def test_error_raised_on_decoding_error(self, listdir_mock): # On Python3, sys.getdefaultencoding file_generator = FileGenerator(None, None, None, None) # utf-8 encoding for U+2713. listdir_mock.return_value = [b'\xe2\x9c\x93'] with self.assertRaises(FileDecodingError): list(file_generator.list_files(self.directory, dir_op=True))
def test_s3_file(self): """ Generate a single s3 file Note: Size and last update are not tested because s3 generates them. """ input_s3_file = {'src': {'path': self.file1, 'type': 's3'}, 'dest': {'path': 'text1.txt', 'type': 'local'}, 'dir_op': False, 'use_src_name': False} params = {'region': 'us-east-1'} file_gen = FileGenerator(self.service, self.endpoint, '') files = file_gen.call(input_s3_file) result_list = [] for filename in files: result_list.append(filename) file_stat = FileStat(src=self.file1, dest='text1.txt', compare_key='text1.txt', size=result_list[0].size, last_update=result_list[0].last_update, src_type='s3', dest_type='local', operation_name='') ref_list = [file_stat] self.assertEqual(len(result_list), len(ref_list)) for i in range(len(result_list)): compare_files(self, result_list[i], ref_list[i])
def test_s3_file(self): """ Generate a single s3 file Note: Size and last update are not tested because s3 generates them. """ input_s3_file = {'src': {'path': self.file1, 'type': 's3'}, 'dest': {'path': 'text1.txt', 'type': 'local'}, 'dir_op': False, 'use_src_name': False} params = {'region': 'us-east-1'} self.parsed_responses = [{"ETag": "abcd", "ContentLength": 100, "LastModified": "2014-01-09T20:45:49.000Z"}] self.patch_make_request() file_gen = FileGenerator(self.client, '') files = file_gen.call(input_s3_file) result_list = [] for filename in files: result_list.append(filename) file_stat = FileStat(src=self.file1, dest='text1.txt', compare_key='text1.txt', size=result_list[0].size, last_update=result_list[0].last_update, src_type='s3', dest_type='local', operation_name='') ref_list = [file_stat] self.assertEqual(len(result_list), len(ref_list)) for i in range(len(result_list)): compare_files(self, result_list[i], ref_list[i])
def test_warn_bad_symlink(self): """ This tests to make sure it fails when following bad symlinks. """ abs_root = six.text_type(os.path.abspath(self.root) + os.sep) input_local_dir = {'src': {'path': abs_root, 'type': 'local'}, 'dest': {'path': self.bucket, 'type': 's3'}, 'dir_op': True, 'use_src_name': True} file_stats = FileGenerator(self.service, self.endpoint, '', True).call(input_local_dir) file_gen = FileGenerator(self.service, self.endpoint, '', True) file_stats = file_gen.call(input_local_dir) all_filenames = self.filenames + self.symlink_files all_filenames.sort() result_list = [] for file_stat in file_stats: result_list.append(getattr(file_stat, 'src')) self.assertEqual(len(result_list), len(all_filenames)) # Just check to make sure the right local files are generated. for i in range(len(result_list)): filename = six.text_type(os.path.abspath(all_filenames[i])) self.assertEqual(result_list[i], filename) self.assertFalse(file_gen.result_queue.empty())
def test_no_warning(self): file_gen = FileGenerator(self.service, self.endpoint, '', False) self.files.create_file("foo.txt", contents="foo") full_path = os.path.join(self.root, "foo.txt") return_val = file_gen.triggers_warning(full_path) self.assertFalse(return_val) self.assertTrue(file_gen.result_queue.empty())
def test_warn_bad_symlink(self): """ This tests to make sure it fails when following bad symlinks. """ abs_root = six.text_type(os.path.abspath(self.root) + os.sep) input_local_dir = { 'src': { 'path': abs_root, 'type': 'local' }, 'dest': { 'path': self.bucket, 'type': 's3' }, 'dir_op': True, 'use_src_name': True } file_stats = FileGenerator(self.client, '', True).call(input_local_dir) file_gen = FileGenerator(self.client, '', True) file_stats = file_gen.call(input_local_dir) all_filenames = self.filenames + self.symlink_files all_filenames.sort() result_list = [] for file_stat in file_stats: result_list.append(getattr(file_stat, 'src')) self.assertEqual(len(result_list), len(all_filenames)) # Just check to make sure the right local files are generated. for i in range(len(result_list)): filename = six.text_type(os.path.abspath(all_filenames[i])) self.assertEqual(result_list[i], filename) self.assertFalse(file_gen.result_queue.empty())
def test_no_warning(self): file_gen = FileGenerator(self.client, '', False) self.files.create_file("foo.txt", contents="foo") full_path = os.path.join(self.root, "foo.txt") return_val = file_gen.triggers_warning(full_path) self.assertFalse(return_val) self.assertTrue(file_gen.result_queue.empty())
def test_normalize_sort_backslash(self): names = ['xyz123456789', 'xyz1\\test', 'xyz\\test'] ref_names = [names[2], names[1], names[0]] filegenerator = FileGenerator(None, None, None) filegenerator.normalize_sort(names, '\\', '/') for i in range(len(ref_names)): self.assertEqual(ref_names[i], names[i])
def test_list_local_files_with_unicode_chars(self): p = os.path.join open(p(self.directory, u'a'), 'w').close() open(p(self.directory, u'a\u0300'), 'w').close() open(p(self.directory, u'a\u0300-1'), 'w').close() open(p(self.directory, u'a\u03001'), 'w').close() open(p(self.directory, u'z'), 'w').close() open(p(self.directory, u'\u00e6'), 'w').close() os.mkdir(p(self.directory, u'a\u0300a')) open(p(self.directory, u'a\u0300a', u'a'), 'w').close() open(p(self.directory, u'a\u0300a', u'z'), 'w').close() open(p(self.directory, u'a\u0300a', u'\u00e6'), 'w').close() file_generator = FileGenerator(None, None, None) values = list( el[0] for el in file_generator.list_files(self.directory, dir_op=True)) expected_order = [ os.path.join(self.directory, el) for el in [ u"a", u"a\u0300", u"a\u0300-1", u"a\u03001", u"a\u0300a%sa" % os.path.sep, u"a\u0300a%sz" % os.path.sep, u"a\u0300a%s\u00e6" % os.path.sep, u"z", u"\u00e6" ] ] self.assertEqual(values, expected_order)
def test_s3_single_file_404(self): """ Test the error message for a 404 ClientError for a single file listing """ input_s3_file = { 'src': { 'path': self.file1, 'type': 's3' }, 'dest': { 'path': 'text1.txt', 'type': 'local' }, 'dir_op': False, 'use_src_name': False } params = {'region': 'us-east-1'} self.client = mock.Mock() self.client.head_object.side_effect = \ ClientError( {'Error': {'Code': '404', 'Message': 'Not Found'}}, 'HeadObject', ) file_gen = FileGenerator(self.client, '') files = file_gen.call(input_s3_file) # The error should include 404 and should include the key name. with self.assertRaisesRegex(ClientError, '404.*text1.txt'): list(files)
def test_s3_single_file_delete(self): input_s3_file = { 'src': { 'path': self.file1, 'type': 's3' }, 'dest': { 'path': '', 'type': 'local' }, 'dir_op': False, 'use_src_name': True } self.client = mock.Mock() file_gen = FileGenerator(self.client, 'delete') result_list = list(file_gen.call(input_s3_file)) self.assertEqual(len(result_list), 1) compare_files( self, result_list[0], FileStat(src=self.file1, dest='text1.txt', compare_key='text1.txt', size=None, last_update=None, src_type='s3', dest_type='local', operation_name='delete')) self.client.head_object.assert_not_called()
def test_error_raised_on_decoding_error(self, listdir_mock): # On Python3, sys.getdefaultencoding file_generator = FileGenerator(None, None, None) # utf-8 encoding for U+2713. listdir_mock.return_value = [b'\xe2\x9c\x93'] with self.assertRaises(FileDecodingError): list(file_generator.list_files(self.directory, dir_op=True))
def test_skip_symlink(self): filename = 'foo.txt' self.files.create_file(os.path.join(self.files.rootdir, filename), contents='foo.txt contents') sym_path = os.path.join(self.files.rootdir, 'symlink') os.symlink(filename, sym_path) filegenerator = FileGenerator(self.client, '', False) self.assertTrue(filegenerator.should_ignore_file(sym_path))
def test_no_exists(self): file_gen = FileGenerator(self.service, self.endpoint, '', False) filename = os.path.join(self.root, 'file') return_val = file_gen.triggers_warning(filename) self.assertTrue(return_val) warning_message = file_gen.result_queue.get() self.assertEqual(warning_message.message, ("warning: Skipping file %s. File does not exist." % filename))
def test_no_skip_symlink_dir(self): filename = 'dir' path = os.path.join(self.files.rootdir, 'dir/') os.mkdir(path) sym_path = os.path.join(self.files.rootdir, 'symlink') os.symlink(path, sym_path) filegenerator = FileGenerator(self.client, '', True) self.assertFalse(filegenerator.should_ignore_file(sym_path)) self.assertFalse(filegenerator.should_ignore_file(path))
def test_normalize_sort(self): names = ['xyz123456789', 'xyz1' + os.path.sep + 'test', 'xyz' + os.path.sep + 'test'] ref_names = [names[2], names[1], names[0]] filegenerator = FileGenerator(None, None, None) filegenerator.normalize_sort(names, os.path.sep, '/') for i in range(len(ref_names)): self.assertEqual(ref_names[i], names[i])
def test_no_exists(self): file_gen = FileGenerator(self.client, '', False) filename = os.path.join(self.root, 'file') return_val = file_gen.triggers_warning(filename) self.assertTrue(return_val) warning_message = file_gen.result_queue.get() self.assertEqual( warning_message.message, ("warning: Skipping file %s. File does not exist." % filename))
def test_no_skip_symlink(self): filename = 'foo.txt' path = self.files.create_file(os.path.join(self.files.rootdir, filename), contents='foo.txt contents') sym_path = os.path.join(self.files.rootdir, 'symlink') os.symlink(path, sym_path) filegenerator = FileGenerator(self.service, self.endpoint, '', True) self.assertFalse(filegenerator.should_ignore_file(sym_path)) self.assertFalse(filegenerator.should_ignore_file(path))
def test_list_files_is_in_sorted_order(self): p = os.path.join open(p(self.directory, "test-123.txt"), "w").close() open(p(self.directory, "test-321.txt"), "w").close() open(p(self.directory, "test123.txt"), "w").close() open(p(self.directory, "test321.txt"), "w").close() os.mkdir(p(self.directory, "test")) open(p(self.directory, "test", "foo.txt"), "w").close() file_generator = FileGenerator(None, None, None, None) values = list(el[0] for el in file_generator.list_files(self.directory, dir_op=True)) self.assertEqual(values, list(sorted(values)))
def test_error_raised_on_decoding_error(self, listdir_mock): # On Python3, sys.getdefaultencoding file_generator = FileGenerator(None, None, None) # utf-8 encoding for U+2713. listdir_mock.return_value = [b'\xe2\x9c\x93'] list(file_generator.list_files(self.directory, dir_op=True)) # Ensure the message was added to the result queue and is # being skipped. self.assertFalse(file_generator.result_queue.empty()) warning_message = file_generator.result_queue.get() self.assertIn("warning: Skipping file ", warning_message.message) self.assertIn("Please check your locale settings.", warning_message.message)
def test_is_special_file_warning(self): file_gen = FileGenerator(self.client, '', False) file_path = os.path.join(self.files.rootdir, 'foo') # Use socket for special file. sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) sock.bind(file_path) return_val = file_gen.triggers_warning(file_path) self.assertTrue(return_val) warning_message = file_gen.result_queue.get() self.assertEqual(warning_message.message, ("warning: Skipping file %s. File is character " "special device, block special device, FIFO, or " "socket." % file_path))
def test_no_read_access(self): file_gen = FileGenerator(self.client, '', False) self.files.create_file("foo.txt", contents="foo") full_path = os.path.join(self.root, "foo.txt") open_function = 'awscli.customizations.s3.filegenerator._open' with mock.patch(open_function) as mock_class: mock_class.side_effect = OSError() return_val = file_gen.triggers_warning(full_path) self.assertTrue(return_val) warning_message = file_gen.result_queue.get() self.assertEqual(warning_message.message, ("warning: Skipping file %s. File/Directory is " "not readable." % full_path))
def test_is_special_file_warning(self): file_gen = FileGenerator(self.service, self.endpoint, '', False) file_path = os.path.join(self.files.rootdir, 'foo') # Use socket for special file. sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) sock.bind(file_path) return_val = file_gen.triggers_warning(file_path) self.assertTrue(return_val) warning_message = file_gen.result_queue.get() self.assertEqual(warning_message.message, ("warning: Skipping file %s. File is character " "special device, block special device, FIFO, or " "socket." % file_path))
def test_no_read_access(self): file_gen = FileGenerator(self.service, self.endpoint, '', False) self.files.create_file("foo.txt", contents="foo") full_path = os.path.join(self.root, "foo.txt") open_function = 'awscli.customizations.s3.filegenerator._open' with mock.patch(open_function) as mock_class: mock_class.side_effect = OSError() return_val = file_gen.triggers_warning(full_path) self.assertTrue(return_val) warning_message = file_gen.result_queue.get() self.assertEqual(warning_message.message, ("warning: Skipping file %s. File/Directory is " "not readable." % full_path))
def test_list_files_is_in_sorted_order(self): p = os.path.join open(p(self.directory, 'test-123.txt'), 'w').close() open(p(self.directory, 'test-321.txt'), 'w').close() open(p(self.directory, 'test123.txt'), 'w').close() open(p(self.directory, 'test321.txt'), 'w').close() os.mkdir(p(self.directory, 'test')) open(p(self.directory, 'test', 'foo.txt'), 'w').close() file_generator = FileGenerator(None, None, None, None) values = list( el[0] for el in file_generator.list_files(self.directory, dir_op=True)) self.assertEqual(values, list(sorted(values)))
def test_list_files_is_in_sorted_order(self): p = os.path.join open(p(self.directory, 'test-123.txt'), 'w').close() open(p(self.directory, 'test-321.txt'), 'w').close() open(p(self.directory, 'test123.txt'), 'w').close() open(p(self.directory, 'test321.txt'), 'w').close() os.mkdir(p(self.directory, 'test')) open(p(self.directory, 'test', 'foo.txt'), 'w').close() file_generator = FileGenerator(None, None, None) values = list(el[0] for el in file_generator.list_files( self.directory, dir_op=True)) ref_vals = list(sorted(values, key=lambda items: items.replace(os.sep, '/'))) self.assertEqual(values, ref_vals)
def test_page_size(self): input_s3_file = { 'src': { 'path': self.bucket + '/', 'type': 's3' }, 'dest': { 'path': '', 'type': 'local' }, 'dir_op': True, 'use_src_name': True } file_gen = FileGenerator(self.service, self.endpoint, '', page_size=1).call(input_s3_file) limited_file_gen = itertools.islice(file_gen, 1) result_list = list(limited_file_gen) file_stat = FileStat(src=self.file2, dest='another_directory' + os.sep + 'text2.txt', compare_key='another_directory/text2.txt', size=21, last_update=result_list[0].last_update, src_type='s3', dest_type='local', operation_name='') # Ensure only one item is returned from ``ListObjects`` self.assertEqual(len(result_list), 1) compare_files(self, result_list[0], file_stat)
def test_s3_directory(self): """ Generates s3 files under a common prefix. Also it ensures that zero size files are ignored. Note: Size and last update are not tested because s3 generates them. """ input_s3_file = {'src': {'path': self.bucket + '/', 'type': 's3'}, 'dest': {'path': '', 'type': 'local'}, 'dir_op': True, 'use_src_name': True} params = {'region': 'us-east-1'} files = FileGenerator(self.service, self.endpoint, '').call(input_s3_file) result_list = [] for filename in files: result_list.append(filename) file_stat = FileStat(src=self.file2, dest='another_directory' + os.sep + 'text2.txt', compare_key='another_directory/text2.txt', size=result_list[0].size, last_update=result_list[0].last_update, src_type='s3', dest_type='local', operation_name='') file_stat2 = FileStat(src=self.file1, dest='text1.txt', compare_key='text1.txt', size=result_list[1].size, last_update=result_list[1].last_update, src_type='s3', dest_type='local', operation_name='') ref_list = [file_stat, file_stat2] self.assertEqual(len(result_list), len(ref_list)) for i in range(len(result_list)): compare_files(self, result_list[i], ref_list[i])
def test_follow_bad_symlink(self): """ This tests to make sure it fails when following bad symlinks. """ abs_root = six.text_type(os.path.abspath(self.root) + os.sep) input_local_dir = { 'src': { 'path': abs_root, 'type': 'local' }, 'dest': { 'path': self.bucket, 'type': 's3' }, 'dir_op': True, 'use_src_name': True } file_stats = FileGenerator(self.service, self.endpoint, '', True).call(input_local_dir) result_list = [] rc = 0 try: for file_stat in file_stats: result_list.append(getattr(file_stat, 'src')) rc = 1 except OSError as e: pass # Error shows up as ValueError in Python 3. except ValueError as e: pass self.assertEquals(0, rc)
def test_local_file(self): """ Generate a single local file. """ input_local_file = { 'src': { 'path': self.local_file, 'type': 'local' }, 'dest': { 'path': 'bucket/text1.txt', 'type': 's3' }, 'dir_op': False, 'use_src_name': False } params = {'region': 'us-east-1'} files = FileGenerator(self.client, '').call(input_local_file) result_list = [] for filename in files: result_list.append(filename) size, last_update = get_file_stat(self.local_file) file_stat = FileStat(src=self.local_file, dest='bucket/text1.txt', compare_key='text1.txt', size=size, last_update=last_update, src_type='local', dest_type='s3', operation_name='') ref_list = [file_stat] self.assertEqual(len(result_list), len(ref_list)) for i in range(len(result_list)): compare_files(self, result_list[i], ref_list[i])
def test_local_directory(self): """ Generate an entire local directory. """ input_local_dir = {'src': {'path': self.local_dir, 'type': 'local'}, 'dest': {'path': 'bucket/', 'type': 's3'}, 'dir_op': True, 'use_src_name': True} params = {'region': 'us-east-1'} files = FileGenerator(self.service, self.endpoint,'').call(input_local_dir) result_list = [] for filename in files: result_list.append(filename) size, last_update = get_file_stat(self.local_file) file_info = FileInfo(src=self.local_file, dest='bucket/text1.txt', compare_key='text1.txt', size=size, last_update=last_update, src_type='local', dest_type='s3', operation_name='', service=None, endpoint=None) path = self.local_dir + 'another_directory' + os.sep \ + 'text2.txt' size, last_update = get_file_stat(path) file_info2 = FileInfo(src=path, dest='bucket/another_directory/text2.txt', compare_key='another_directory/text2.txt', size=size, last_update=last_update, src_type='local', dest_type='s3', operation_name='', service=None, endpoint=None) ref_list = [file_info2, file_info] self.assertEqual(len(result_list), len(ref_list)) for i in range(len(result_list)): compare_files(self, result_list[i], ref_list[i])
def test_s3_directory(self): # # Generates s3 files under a common prefix. Also it ensures that # zero size files are ignored. # Note: Size and last update are not tested because s3 generates them. # input_s3_file = {'src': {'path': self.bucket+'/', 'type': 's3'}, 'dest': {'path': '', 'type': 'local'}, 'dir_op': True, 'use_src_name': True} result_list = list( FileGenerator(self.service, self.endpoint, '').call( input_s3_file)) file_stat = FileStat(src=self.file2, dest='another_directory' + os.sep + 'text2.txt', compare_key='another_directory/text2.txt', size=21, last_update=result_list[0].last_update, src_type='s3', dest_type='local', operation_name='') file_stat2 = FileStat(src=self.file1, dest='text1.txt', compare_key='text1.txt', size=15, last_update=result_list[1].last_update, src_type='s3', dest_type='local', operation_name='') expected_result = [file_stat, file_stat2] self.assertEqual(len(result_list), 2) compare_files(self, result_list[0], expected_result[0]) compare_files(self, result_list[1], expected_result[1])
def test_follow_symlink(self): # First remove the bad symlink. os.remove(os.path.join(self.root, 'symlink_2')) abs_root = six.text_type(os.path.abspath(self.root) + os.sep) input_local_dir = { 'src': { 'path': abs_root, 'type': 'local' }, 'dest': { 'path': self.bucket, 'type': 's3' }, 'dir_op': True, 'use_src_name': True } file_stats = FileGenerator(self.client, '', True).call(input_local_dir) all_filenames = self.filenames + self.symlink_files all_filenames.sort() result_list = [] for file_stat in file_stats: result_list.append(getattr(file_stat, 'src')) self.assertEqual(len(result_list), len(all_filenames)) # Just check to make sure the right local files are generated. for i in range(len(result_list)): filename = six.text_type(os.path.abspath(all_filenames[i])) self.assertEqual(result_list[i], filename)
def test_s3_delete_directory(self): """ Generates s3 files under a common prefix. Also it ensures that the directory itself is included because it is a delete command Note: Size and last update are not tested because s3 generates them. """ input_s3_file = { 'src': { 'path': self.bucket + '/', 'type': 's3' }, 'dest': { 'path': '', 'type': 'local' }, 'dir_op': True, 'use_src_name': True } params = {'region': 'us-east-1'} files = FileGenerator(self.service, self.endpoint, 'delete', params).call(input_s3_file) result_list = [] for filename in files: result_list.append(filename) file_info1 = FileInfo(src=self.bucket + '/another_directory/', dest='another_directory' + os.sep, compare_key='another_directory/', size=result_list[0].size, last_update=result_list[0].last_update, src_type='s3', dest_type='local', operation_name='delete', service=self.service, endpoint=self.endpoint) file_info2 = FileInfo(src=self.file2, dest='another_directory' + os.sep + 'text2.txt', compare_key='another_directory/text2.txt', size=result_list[1].size, last_update=result_list[1].last_update, src_type='s3', dest_type='local', operation_name='delete', service=self.service, endpoint=self.endpoint) file_info3 = FileInfo(src=self.file1, dest='text1.txt', compare_key='text1.txt', size=result_list[2].size, last_update=result_list[2].last_update, src_type='s3', dest_type='local', operation_name='delete', service=self.service, endpoint=self.endpoint) ref_list = [file_info1, file_info2, file_info3] self.assertEqual(len(result_list), len(ref_list)) for i in range(len(result_list)): compare_files(self, result_list[i], ref_list[i])
def test_s3_file(self): # # Generate a single s3 file # Note: Size and last update are not tested because s3 generates them. # input_s3_file = { 'src': { 'path': self.file1, 'type': 's3' }, 'dest': { 'path': 'text1.txt', 'type': 'local' }, 'dir_op': False, 'use_src_name': False } expected_file_size = 15 result_list = list( FileGenerator(self.service, self.endpoint, '').call(input_s3_file)) file_stat = FileStat(src=self.file1, dest='text1.txt', compare_key='text1.txt', size=expected_file_size, last_update=result_list[0].last_update, src_type='s3', dest_type='local', operation_name='') expected_list = [file_stat] self.assertEqual(len(result_list), 1) compare_files(self, result_list[0], expected_list[0])
def test_s3_single_file_404(self): """ Test the error message for a 404 ClientError for a single file listing """ input_s3_file = {'src': {'path': self.file1, 'type': 's3'}, 'dest': {'path': 'text1.txt', 'type': 'local'}, 'dir_op': False, 'use_src_name': False} params = {'region': 'us-east-1'} self.client = mock.Mock() self.client.head_object.side_effect = \ ClientError(404, 'Not Found', '404', 'HeadObject', 404) file_gen = FileGenerator(self.client, '') files = file_gen.call(input_s3_file) # The error should include 404 and should include the key name. with self.assertRaisesRegexp(ClientError, '404.*text1.txt'): list(files)
def test_s3_delete_directory(self): # # Generates s3 files under a common prefix. Also it ensures that # the directory itself is included because it is a delete command # Note: Size and last update are not tested because s3 generates them. # input_s3_file = { 'src': { 'path': self.bucket + '/', 'type': 's3' }, 'dest': { 'path': '', 'type': 'local' }, 'dir_op': True, 'use_src_name': True } params = {'region': 'us-east-1'} result_list = list( FileGenerator(self.service, self.endpoint, 'delete', params).call(input_s3_file)) file_info1 = FileInfo(src=self.bucket + '/another_directory/', dest='another_directory' + os.sep, compare_key='another_directory/', size=0, last_update=result_list[0].last_update, src_type='s3', dest_type='local', operation_name='delete', service=self.service, endpoint=self.endpoint) file_info2 = FileInfo(src=self.file2, dest='another_directory' + os.sep + 'text2.txt', compare_key='another_directory/text2.txt', size=21, last_update=result_list[1].last_update, src_type='s3', dest_type='local', operation_name='delete', service=self.service, endpoint=self.endpoint) file_info3 = FileInfo(src=self.file1, dest='text1.txt', compare_key='text1.txt', size=15, last_update=result_list[2].last_update, src_type='s3', dest_type='local', operation_name='delete', service=self.service, endpoint=self.endpoint) expected_list = [file_info1, file_info2, file_info3] self.assertEqual(len(result_list), 3) compare_files(self, result_list[0], expected_list[0]) compare_files(self, result_list[1], expected_list[1]) compare_files(self, result_list[2], expected_list[2])
def test_s3_directory(self): """ Generates s3 files under a common prefix. Also it ensures that zero size files are ignored. Note: Size and last update are not tested because s3 generates them. """ input_s3_file = { 'src': { 'path': self.bucket + '/', 'type': 's3' }, 'dest': { 'path': '', 'type': 'local' }, 'dir_op': True, 'use_src_name': True } params = {'region': 'us-east-1'} files = FileGenerator(self.client, '').call(input_s3_file) self.parsed_responses = [{ "CommonPrefixes": [], "Contents": [{ "Key": "another_directory/text2.txt", "Size": 100, "LastModified": "2014-01-09T20:45:49.000Z" }, { "Key": "text1.txt", "Size": 10, "LastModified": "2013-01-09T20:45:49.000Z" }] }] self.patch_make_request() result_list = [] for filename in files: result_list.append(filename) file_stat = FileStat(src=self.file2, dest='another_directory' + os.sep + 'text2.txt', compare_key='another_directory/text2.txt', size=result_list[0].size, last_update=result_list[0].last_update, src_type='s3', dest_type='local', operation_name='') file_stat2 = FileStat(src=self.file1, dest='text1.txt', compare_key='text1.txt', size=result_list[1].size, last_update=result_list[1].last_update, src_type='s3', dest_type='local', operation_name='') ref_list = [file_stat, file_stat2] self.assertEqual(len(result_list), len(ref_list)) for i in range(len(result_list)): compare_files(self, result_list[i], ref_list[i])
def test_s3_single_file_delete(self): input_s3_file = {'src': {'path': self.file1, 'type': 's3'}, 'dest': {'path': '', 'type': 'local'}, 'dir_op': False, 'use_src_name': True} self.client = mock.Mock() file_gen = FileGenerator(self.client, 'delete') result_list = list(file_gen.call(input_s3_file)) self.assertEqual(len(result_list), 1) compare_files( self, result_list[0], FileStat(src=self.file1, dest='text1.txt', compare_key='text1.txt', size=None, last_update=None, src_type='s3', dest_type='local', operation_name='delete') ) self.client.head_object.assert_not_called()
def test_no_follow_symlink(self): abs_root = six.text_type(os.path.abspath(self.root) + os.sep) input_local_dir = {'src': {'path': abs_root, 'type': 'local'}, 'dest': {'path': self.bucket, 'type': 's3'}, 'dir_op': True, 'use_src_name': True} file_infos = FileGenerator(self.service, self.endpoint, '', False).call(input_local_dir) self.filenames.sort() result_list = [] for file_info in file_infos: result_list.append(getattr(file_info, 'src')) self.assertEqual(len(result_list), len(self.filenames)) # Just check to make sure the right local files are generated. for i in range(len(result_list)): filename = six.text_type(os.path.abspath(self.filenames[i])) self.assertEqual(result_list[i], filename)
def test_list_files_with_invalid_timestamp(self, stat_mock): stat_mock.return_value = 9, None open(os.path.join(self.directory, 'test'), 'w').close() file_generator = FileGenerator(None, None, None) value = list(file_generator.list_files(self.directory, dir_op=True))[0] self.assertIs(value[1]['LastModified'], EPOCH_TIME)
def test_warning(self): path = os.path.join(self.files.rootdir, 'badsymlink') os.symlink('non-existent-file', path) filegenerator = FileGenerator(self.service, self.endpoint, '', True) self.assertTrue(filegenerator.should_ignore_file(path))