Exemple #1
0
    def test_s3_file(self):
        """
        Generate a single s3 file
        Note: Size and last update are not tested because s3 generates them.
        """
        input_s3_file = {'src': {'path': self.file1, 'type': 's3'},
                         'dest': {'path': 'text1.txt', 'type': 'local'},
                         'dir_op': False, 'use_src_name': False}
        params = {'region': 'us-east-1'}
        self.parsed_responses = [{"ETag": "abcd", "ContentLength": 100,
                                  "LastModified": "2014-01-09T20:45:49.000Z"}]
        self.patch_make_request()

        file_gen = FileGenerator(self.client, '')
        files = file_gen.call(input_s3_file)
        result_list = []
        for filename in files:
            result_list.append(filename)
        file_stat = FileStat(src=self.file1, dest='text1.txt',
                             compare_key='text1.txt',
                             size=result_list[0].size,
                             last_update=result_list[0].last_update,
                             src_type='s3',
                             dest_type='local', operation_name='')

        ref_list = [file_stat]
        self.assertEqual(len(result_list), len(ref_list))
        for i in range(len(result_list)):
            compare_files(self, result_list[i], ref_list[i])
 def test_local_directory(self):
     """
     Generate an entire local directory.
     """
     input_local_dir = {'src': {'path': self.local_dir,
                                'type': 'local'},
                        'dest': {'path': 'bucket/',
                                 'type': 's3'},
                        'dir_op': True, 'use_src_name': True}
     params = {'region': 'us-east-1'}
     files = FileGenerator(self.service,
                           self.endpoint,'').call(input_local_dir)
     result_list = []
     for filename in files:
         result_list.append(filename)
     size, last_update = get_file_stat(self.local_file)
     file_info = FileInfo(src=self.local_file, dest='bucket/text1.txt',
                          compare_key='text1.txt', size=size,
                          last_update=last_update, src_type='local',
                          dest_type='s3', operation_name='',
                          service=None, endpoint=None)
     path = self.local_dir + 'another_directory' + os.sep \
         + 'text2.txt'
     size, last_update = get_file_stat(path)
     file_info2 = FileInfo(src=path,
                           dest='bucket/another_directory/text2.txt',
                           compare_key='another_directory/text2.txt',
                           size=size, last_update=last_update,
                           src_type='local',
                           dest_type='s3', operation_name='',
                           service=None, endpoint=None)
     ref_list = [file_info2, file_info]
     self.assertEqual(len(result_list), len(ref_list))
     for i in range(len(result_list)):
         compare_files(self, result_list[i], ref_list[i])
    def test_s3_directory(self):
        """
        Generates s3 files under a common prefix. Also it ensures that
        zero size files are ignored.
        Note: Size and last update are not tested because s3 generates them.
        """
        input_s3_file = {'src': {'path': self.bucket + '/', 'type': 's3'},
                         'dest': {'path': '', 'type': 'local'},
                         'dir_op': True, 'use_src_name': True}
        params = {'region': 'us-east-1'}
        files = FileGenerator(self.service, self.endpoint,
                              '').call(input_s3_file)
        result_list = []
        for filename in files:
            result_list.append(filename)
        file_stat = FileStat(src=self.file2,
                             dest='another_directory' + os.sep +
                             'text2.txt',
                             compare_key='another_directory/text2.txt',
                             size=result_list[0].size,
                             last_update=result_list[0].last_update,
                             src_type='s3',
                             dest_type='local', operation_name='')
        file_stat2 = FileStat(src=self.file1,
                              dest='text1.txt',
                              compare_key='text1.txt',
                              size=result_list[1].size,
                              last_update=result_list[1].last_update,
                              src_type='s3',
                              dest_type='local', operation_name='')

        ref_list = [file_stat, file_stat2]
        self.assertEqual(len(result_list), len(ref_list))
        for i in range(len(result_list)):
            compare_files(self, result_list[i], ref_list[i])
    def test_s3_directory(self):
        """
        Generates s3 files under a common prefix. Also it ensures that
        zero size files are ignored.
        Note: Size and last update are not tested because s3 generates them.
        """
        input_s3_file = {'src': {'path': self.bucket + '/', 'type': 's3'},
                         'dest': {'path': '', 'type': 'local'},
                         'dir_op': True, 'use_src_name': True}
        params = {'region': 'us-east-1'}
        files = FileGenerator(self.service, self.endpoint, '', params).call(input_s3_file)
        result_list = []
        for filename in files:
            result_list.append(filename)
        file_info = FileInfo(src=self.file2,
                             dest='another_directory' + os.sep + 'text2.txt',
                             compare_key='another_directory/text2.txt',
                             size=result_list[0].size,
                             last_update=result_list[0].last_update,
                             src_type='s3',
                             dest_type='local', operation_name='',
                             service=None, endpoint=None)
        file_info2 = FileInfo(src=self.file1,
                              dest='text1.txt',
                              compare_key='text1.txt',
                              size=result_list[1].size,
                              last_update=result_list[1].last_update,
                              src_type='s3',
                              dest_type='local', operation_name='',
                              service=None, endpoint=None)

        ref_list = [file_info, file_info2]
        self.assertEqual(len(result_list), len(ref_list))
        for i in range(len(result_list)):
            compare_files(self, result_list[i], ref_list[i])
    def test_s3_file(self):
        #
        # Generate a single s3 file
        # Note: Size and last update are not tested because s3 generates them.
        #
        input_s3_file = {
            'src': {
                'path': self.file1,
                'type': 's3'
            },
            'dest': {
                'path': 'text1.txt',
                'type': 'local'
            },
            'dir_op': False,
            'use_src_name': False
        }
        expected_file_size = 15
        result_list = list(
            FileGenerator(self.service, self.endpoint, '').call(input_s3_file))
        file_stat = FileStat(src=self.file1,
                             dest='text1.txt',
                             compare_key='text1.txt',
                             size=expected_file_size,
                             last_update=result_list[0].last_update,
                             src_type='s3',
                             dest_type='local',
                             operation_name='')

        expected_list = [file_stat]
        self.assertEqual(len(result_list), 1)
        compare_files(self, result_list[0], expected_list[0])
    def test_s3_file(self):
        """
        Generate a single s3 file
        Note: Size and last update are not tested because s3 generates them.
        """
        input_s3_file = {'src': {'path': self.file1, 'type': 's3'},
                         'dest': {'path': 'text1.txt', 'type': 'local'},
                         'dir_op': False, 'use_src_name': False}
        params = {'region': 'us-east-1'}
        files = FileGenerator(self.service, self.endpoint, '', params).call(input_s3_file)
        result_list = []
        for filename in files:
            result_list.append(filename)
        file_info = FileInfo(src=self.file1, dest='text1.txt',
                             compare_key='text1.txt',
                             size=result_list[0].size,
                             last_update=result_list[0].last_update,
                             src_type='s3',
                             dest_type='local', operation_name='',
                             service=None, endpoint=None)

        ref_list = [file_info]
        self.assertEqual(len(result_list), len(ref_list))
        for i in range(len(result_list)):
            compare_files(self, result_list[i], ref_list[i])
    def test_s3_file(self):
        """
        Generate a single s3 file
        Note: Size and last update are not tested because s3 generates them.
        """
        input_s3_file = {
            "src": {"path": self.file1, "type": "s3"},
            "dest": {"path": "text1.txt", "type": "local"},
            "dir_op": False,
            "use_src_name": False,
        }
        params = {"region": "us-east-1"}
        files = FileGenerator(self.service, self.endpoint, "", params).call(input_s3_file)
        result_list = []
        for filename in files:
            result_list.append(filename)
        file_info = FileInfo(
            src=self.file1,
            dest="text1.txt",
            compare_key="text1.txt",
            size=result_list[0].size,
            last_update=result_list[0].last_update,
            src_type="s3",
            dest_type="local",
            operation_name="",
            service=None,
            endpoint=None,
        )

        ref_list = [file_info]
        self.assertEqual(len(result_list), len(ref_list))
        for i in range(len(result_list)):
            compare_files(self, result_list[i], ref_list[i])
 def test_local_directory(self):
     """
     Generate an entire local directory.
     """
     input_local_dir = {'src': {'path': self.local_dir,
                                'type': 'local'},
                        'dest': {'path': 'bucket/',
                                 'type': 's3'},
                        'dir_op': True, 'use_src_name': True}
     params = {'region': 'us-east-1'}
     files = FileGenerator(self.service, self.endpoint,
                           '', params).call(input_local_dir)
     result_list = []
     for filename in files:
         result_list.append(filename)
     size, last_update = get_file_stat(self.local_file)
     file_info = FileInfo(src=self.local_file, dest='bucket/text1.txt',
                          compare_key='text1.txt', size=size,
                          last_update=last_update, src_type='local',
                          dest_type='s3', operation_name='',
                          service=None, endpoint=None)
     path = self.local_dir + 'another_directory' + os.sep \
         + 'text2.txt'
     size, last_update = get_file_stat(path)
     file_info2 = FileInfo(src=path,
                           dest='bucket/another_directory/text2.txt',
                           compare_key='another_directory/text2.txt',
                           size=size, last_update=last_update,
                           src_type='local',
                           dest_type='s3', operation_name='',
                           service=None, endpoint=None)
     ref_list = [file_info2, file_info]
     self.assertEqual(len(result_list), len(ref_list))
     for i in range(len(result_list)):
         compare_files(self, result_list[i], ref_list[i])
Exemple #9
0
    def test_s3_directory(self):
        #
        # Generates s3 files under a common prefix. Also it ensures that
        # zero size files are ignored.
        # Note: Size and last update are not tested because s3 generates them.
        #
        input_s3_file = {'src': {'path': self.bucket+'/', 'type': 's3'},
                         'dest': {'path': '', 'type': 'local'},
                         'dir_op': True, 'use_src_name': True}
        result_list = list(
            FileGenerator(self.service, self.endpoint, '').call(
                input_s3_file))
        file_stat = FileStat(src=self.file2,
                             dest='another_directory' + os.sep + 'text2.txt',
                             compare_key='another_directory/text2.txt',
                             size=21,
                             last_update=result_list[0].last_update,
                             src_type='s3',
                             dest_type='local', operation_name='')
        file_stat2 = FileStat(src=self.file1,
                              dest='text1.txt',
                              compare_key='text1.txt',
                              size=15,
                              last_update=result_list[1].last_update,
                              src_type='s3',
                              dest_type='local', operation_name='')

        expected_result = [file_stat, file_stat2]
        self.assertEqual(len(result_list), 2)
        compare_files(self, result_list[0], expected_result[0])
        compare_files(self, result_list[1], expected_result[1])
    def test_s3_file(self):
        """
        Generate a single s3 file
        Note: Size and last update are not tested because s3 generates them.
        """
        input_s3_file = {'src': {'path': self.file1, 'type': 's3'},
                         'dest': {'path': 'text1.txt', 'type': 'local'},
                         'dir_op': False, 'use_src_name': False}
        params = {'region': 'us-east-1'}
        files = FileGenerator(self.service, self.endpoint,
                              '').call(input_s3_file)
        result_list = []
        for filename in files:
            result_list.append(filename)
        file_info = FileInfo(src=self.file1, dest='text1.txt',
                             compare_key='text1.txt',
                             size=result_list[0].size,
                             last_update=result_list[0].last_update,
                             src_type='s3',
                             dest_type='local', operation_name='',
                             service=None, endpoint=None)

        ref_list = [file_info]
        self.assertEqual(len(result_list), len(ref_list))
        for i in range(len(result_list)):
            compare_files(self, result_list[i], ref_list[i])
    def test_s3_directory(self):
        #
        # Generates s3 files under a common prefix. Also it ensures that
        # zero size files are ignored.
        # Note: Size and last update are not tested because s3 generates them.
        #
        input_s3_file = {'src': {'path': self.bucket+'/', 'type': 's3'},
                         'dest': {'path': '', 'type': 'local'},
                         'dir_op': True, 'use_src_name': True}
        result_list = list(
            FileGenerator(self.service, self.endpoint, '').call(
                input_s3_file))
        file_stat = FileStat(src=self.file2,
                             dest='another_directory' + os.sep + 'text2.txt',
                             compare_key='another_directory/text2.txt',
                             size=21,
                             last_update=result_list[0].last_update,
                             src_type='s3',
                             dest_type='local', operation_name='')
        file_stat2 = FileStat(src=self.file1,
                              dest='text1.txt',
                              compare_key='text1.txt',
                              size=15,
                              last_update=result_list[1].last_update,
                              src_type='s3',
                              dest_type='local', operation_name='')

        expected_result = [file_stat, file_stat2]
        self.assertEqual(len(result_list), 2)
        compare_files(self, result_list[0], expected_result[0])
        compare_files(self, result_list[1], expected_result[1])
    def test_s3_file(self):
        """
        Generate a single s3 file
        Note: Size and last update are not tested because s3 generates them.
        """
        input_s3_file = {'src': {'path': self.file1, 'type': 's3'},
                         'dest': {'path': 'text1.txt', 'type': 'local'},
                         'dir_op': False, 'use_src_name': False}
        params = {'region': 'us-east-1'}
        self.parsed_responses = [{"ETag": "abcd", "ContentLength": 100,
                                  "LastModified": "2014-01-09T20:45:49.000Z"}]
        self.patch_make_request()

        file_gen = FileGenerator(self.client, '')
        files = file_gen.call(input_s3_file)
        result_list = []
        for filename in files:
            result_list.append(filename)
        file_stat = FileStat(src=self.file1, dest='text1.txt',
                             compare_key='text1.txt',
                             size=result_list[0].size,
                             last_update=result_list[0].last_update,
                             src_type='s3',
                             dest_type='local', operation_name='')

        ref_list = [file_stat]
        self.assertEqual(len(result_list), len(ref_list))
        for i in range(len(result_list)):
            compare_files(self, result_list[i], ref_list[i])
 def test_local_file(self):
     """
     Generate a single local file.
     """
     input_local_file = {
         "src": {"path": self.local_file, "type": "local"},
         "dest": {"path": "bucket/text1.txt", "type": "s3"},
         "dir_op": False,
         "use_src_name": False,
     }
     params = {"region": "us-east-1"}
     files = FileGenerator(self.service, self.endpoint, "", params).call(input_local_file)
     result_list = []
     for filename in files:
         result_list.append(filename)
     size, last_update = get_file_stat(self.local_file)
     file_info = FileInfo(
         src=self.local_file,
         dest="bucket/text1.txt",
         compare_key="text1.txt",
         size=size,
         last_update=last_update,
         src_type="local",
         dest_type="s3",
         operation_name="",
         service=None,
         endpoint=None,
     )
     ref_list = [file_info]
     self.assertEqual(len(result_list), len(ref_list))
     for i in range(len(result_list)):
         compare_files(self, result_list[i], ref_list[i])
Exemple #14
0
    def test_s3_delete_directory(self):
        """
        Generates s3 files under a common prefix. Also it ensures that
        the directory itself is included because it is a delete command
        Note: Size and last update are not tested because s3 generates them.
        """
        input_s3_file = {
            'src': {
                'path': self.bucket + '/',
                'type': 's3'
            },
            'dest': {
                'path': '',
                'type': 'local'
            },
            'dir_op': True,
            'use_src_name': True
        }
        params = {'region': 'us-east-1'}
        files = FileGenerator(self.service, self.endpoint, 'delete',
                              params).call(input_s3_file)
        result_list = []
        for filename in files:
            result_list.append(filename)

        file_info1 = FileInfo(src=self.bucket + '/another_directory/',
                              dest='another_directory' + os.sep,
                              compare_key='another_directory/',
                              size=result_list[0].size,
                              last_update=result_list[0].last_update,
                              src_type='s3',
                              dest_type='local',
                              operation_name='delete',
                              service=self.service,
                              endpoint=self.endpoint)
        file_info2 = FileInfo(src=self.file2,
                              dest='another_directory' + os.sep + 'text2.txt',
                              compare_key='another_directory/text2.txt',
                              size=result_list[1].size,
                              last_update=result_list[1].last_update,
                              src_type='s3',
                              dest_type='local',
                              operation_name='delete',
                              service=self.service,
                              endpoint=self.endpoint)
        file_info3 = FileInfo(src=self.file1,
                              dest='text1.txt',
                              compare_key='text1.txt',
                              size=result_list[2].size,
                              last_update=result_list[2].last_update,
                              src_type='s3',
                              dest_type='local',
                              operation_name='delete',
                              service=self.service,
                              endpoint=self.endpoint)

        ref_list = [file_info1, file_info2, file_info3]
        self.assertEqual(len(result_list), len(ref_list))
        for i in range(len(result_list)):
            compare_files(self, result_list[i], ref_list[i])
Exemple #15
0
 def test_local_file(self):
     """
     Generate a single local file.
     """
     input_local_file = {
         'src': {
             'path': self.local_file,
             'type': 'local'
         },
         'dest': {
             'path': 'bucket/text1.txt',
             'type': 's3'
         },
         'dir_op': False,
         'use_src_name': False
     }
     params = {'region': 'us-east-1'}
     files = FileGenerator(self.client, '').call(input_local_file)
     result_list = []
     for filename in files:
         result_list.append(filename)
     size, last_update = get_file_stat(self.local_file)
     file_stat = FileStat(src=self.local_file,
                          dest='bucket/text1.txt',
                          compare_key='text1.txt',
                          size=size,
                          last_update=last_update,
                          src_type='local',
                          dest_type='s3',
                          operation_name='')
     ref_list = [file_stat]
     self.assertEqual(len(result_list), len(ref_list))
     for i in range(len(result_list)):
         compare_files(self, result_list[i], ref_list[i])
Exemple #16
0
 def test_s3_single_file_delete(self):
     input_s3_file = {
         'src': {
             'path': self.file1,
             'type': 's3'
         },
         'dest': {
             'path': '',
             'type': 'local'
         },
         'dir_op': False,
         'use_src_name': True
     }
     self.client = mock.Mock()
     file_gen = FileGenerator(self.client, 'delete')
     result_list = list(file_gen.call(input_s3_file))
     self.assertEqual(len(result_list), 1)
     compare_files(
         self, result_list[0],
         FileStat(src=self.file1,
                  dest='text1.txt',
                  compare_key='text1.txt',
                  size=None,
                  last_update=None,
                  src_type='s3',
                  dest_type='local',
                  operation_name='delete'))
     self.client.head_object.assert_not_called()
 def test_local_file(self):
     """
     Generate a single local file.
     """
     input_local_file = {'src': {'path': self.local_file,
                                 'type': 'local'},
                         'dest': {'path': 'bucket/text1.txt',
                                  'type': 's3'},
                         'dir_op': False, 'use_src_name': False}
     params = {'region': 'us-east-1'}
     files = FileGenerator(self.service,
                           self.endpoint, '', params).call(input_local_file)
     result_list = []
     for filename in files:
         result_list.append(filename)
     size, last_update = get_file_stat(self.local_file)
     file_info = FileInfo(src=self.local_file, dest='bucket/text1.txt',
                          compare_key='text1.txt', size=size,
                          last_update=last_update, src_type='local',
                          dest_type='s3', operation_name='',
                          service=None, endpoint=None)
     ref_list = [file_info]
     self.assertEqual(len(result_list), len(ref_list))
     for i in range(len(result_list)):
         compare_files(self, result_list[i], ref_list[i])
 def test_page_size(self):
     input_s3_file = {
         'src': {
             'path': self.bucket + '/',
             'type': 's3'
         },
         'dest': {
             'path': '',
             'type': 'local'
         },
         'dir_op': True,
         'use_src_name': True
     }
     file_gen = FileGenerator(self.service, self.endpoint, '',
                              page_size=1).call(input_s3_file)
     limited_file_gen = itertools.islice(file_gen, 1)
     result_list = list(limited_file_gen)
     file_stat = FileStat(src=self.file2,
                          dest='another_directory' + os.sep + 'text2.txt',
                          compare_key='another_directory/text2.txt',
                          size=21,
                          last_update=result_list[0].last_update,
                          src_type='s3',
                          dest_type='local',
                          operation_name='')
     # Ensure only one item is returned from ``ListObjects``
     self.assertEqual(len(result_list), 1)
     compare_files(self, result_list[0], file_stat)
    def test_s3_delete_directory(self):
        """
        Generates s3 files under a common prefix. Also it ensures that
        the directory itself is included because it is a delete command
        Note: Size and last update are not tested because s3 generates them.
        """
        input_s3_file = {
            "src": {"path": self.bucket + "/", "type": "s3"},
            "dest": {"path": "", "type": "local"},
            "dir_op": True,
            "use_src_name": True,
        }
        params = {"region": "us-east-1"}
        files = FileGenerator(self.service, self.endpoint, "delete", params).call(input_s3_file)
        result_list = []
        for filename in files:
            result_list.append(filename)

        file_info1 = FileInfo(
            src=self.bucket + "/another_directory/",
            dest="another_directory" + os.sep,
            compare_key="another_directory/",
            size=result_list[0].size,
            last_update=result_list[0].last_update,
            src_type="s3",
            dest_type="local",
            operation_name="delete",
            service=None,
            endpoint=None,
        )
        file_info2 = FileInfo(
            src=self.file2,
            dest="another_directory" + os.sep + "text2.txt",
            compare_key="another_directory/text2.txt",
            size=result_list[1].size,
            last_update=result_list[1].last_update,
            src_type="s3",
            dest_type="local",
            operation_name="delete",
            service=None,
            endpoint=None,
        )
        file_info3 = FileInfo(
            src=self.file1,
            dest="text1.txt",
            compare_key="text1.txt",
            size=result_list[2].size,
            last_update=result_list[2].last_update,
            src_type="s3",
            dest_type="local",
            operation_name="delete",
            service=None,
            endpoint=None,
        )

        ref_list = [file_info1, file_info2, file_info3]
        self.assertEqual(len(result_list), len(ref_list))
        for i in range(len(result_list)):
            compare_files(self, result_list[i], ref_list[i])
Exemple #20
0
    def test_s3_directory(self):
        """
        Generates s3 files under a common prefix. Also it ensures that
        zero size files are ignored.
        Note: Size and last update are not tested because s3 generates them.
        """
        input_s3_file = {
            'src': {
                'path': self.bucket + '/',
                'type': 's3'
            },
            'dest': {
                'path': '',
                'type': 'local'
            },
            'dir_op': True,
            'use_src_name': True
        }
        params = {'region': 'us-east-1'}
        files = FileGenerator(self.client, '').call(input_s3_file)

        self.parsed_responses = [{
            "CommonPrefixes": [],
            "Contents": [{
                "Key": "another_directory/text2.txt",
                "Size": 100,
                "LastModified": "2014-01-09T20:45:49.000Z"
            }, {
                "Key": "text1.txt",
                "Size": 10,
                "LastModified": "2013-01-09T20:45:49.000Z"
            }]
        }]
        self.patch_make_request()
        result_list = []
        for filename in files:
            result_list.append(filename)
        file_stat = FileStat(src=self.file2,
                             dest='another_directory' + os.sep + 'text2.txt',
                             compare_key='another_directory/text2.txt',
                             size=result_list[0].size,
                             last_update=result_list[0].last_update,
                             src_type='s3',
                             dest_type='local',
                             operation_name='')
        file_stat2 = FileStat(src=self.file1,
                              dest='text1.txt',
                              compare_key='text1.txt',
                              size=result_list[1].size,
                              last_update=result_list[1].last_update,
                              src_type='s3',
                              dest_type='local',
                              operation_name='')

        ref_list = [file_stat, file_stat2]
        self.assertEqual(len(result_list), len(ref_list))
        for i in range(len(result_list)):
            compare_files(self, result_list[i], ref_list[i])
    def test_s3_delete_directory(self):
        """
        Generates s3 files under a common prefix. Also it ensures that
        the directory itself is included because it is a delete command
        Note: Size and last update are not tested because s3 generates them.
        """
        input_s3_file = {'src': {'path': self.bucket + '/', 'type': 's3'},
                         'dest': {'path': '', 'type': 'local'},
                         'dir_op': True, 'use_src_name': True}
        self.parsed_responses = [{
            "CommonPrefixes": [], "Contents": [
                {"Key": "another_directory/", "Size": 0,
                 "LastModified": "2012-01-09T20:45:49.000Z"},
                {"Key": "another_directory/text2.txt", "Size": 100,
                 "LastModified": "2014-01-09T20:45:49.000Z"},
                {"Key": "text1.txt", "Size": 10,
                 "LastModified": "2013-01-09T20:45:49.000Z"}]}]
        self.patch_make_request()
        files = FileGenerator(self.client, 'delete').call(input_s3_file)
        result_list = []
        for filename in files:
            result_list.append(filename)

        file_stat1 = FileStat(src=self.bucket + '/another_directory/',
                              dest='another_directory' + os.sep,
                              compare_key='another_directory/',
                              size=result_list[0].size,
                              last_update=result_list[0].last_update,
                              src_type='s3',
                              dest_type='local', operation_name='delete')
        file_stat2 = FileStat(src=self.file2,
                              dest='another_directory' + os.sep + 'text2.txt',
                              compare_key='another_directory/text2.txt',
                              size=result_list[1].size,
                              last_update=result_list[1].last_update,
                              src_type='s3',
                              dest_type='local', operation_name='delete')
        file_stat3 = FileStat(src=self.file1,
                              dest='text1.txt',
                              compare_key='text1.txt',
                              size=result_list[2].size,
                              last_update=result_list[2].last_update,
                              src_type='s3',
                              dest_type='local', operation_name='delete')

        ref_list = [file_stat1, file_stat2, file_stat3]
        self.assertEqual(len(result_list), len(ref_list))
        for i in range(len(result_list)):
            compare_files(self, result_list[i], ref_list[i])
 def test_local_directory(self):
     """
     Generate an entire local directory.
     """
     input_local_dir = {
         "src": {"path": self.local_dir, "type": "local"},
         "dest": {"path": "bucket/", "type": "s3"},
         "dir_op": True,
         "use_src_name": True,
     }
     params = {"region": "us-east-1"}
     files = FileGenerator(self.service, self.endpoint, "", params).call(input_local_dir)
     result_list = []
     for filename in files:
         result_list.append(filename)
     size, last_update = get_file_stat(self.local_file)
     file_info = FileInfo(
         src=self.local_file,
         dest="bucket/text1.txt",
         compare_key="text1.txt",
         size=size,
         last_update=last_update,
         src_type="local",
         dest_type="s3",
         operation_name="",
         service=None,
         endpoint=None,
     )
     path = self.local_dir + "another_directory" + os.sep + "text2.txt"
     size, last_update = get_file_stat(path)
     file_info2 = FileInfo(
         src=path,
         dest="bucket/another_directory/text2.txt",
         compare_key="another_directory/text2.txt",
         size=size,
         last_update=last_update,
         src_type="local",
         dest_type="s3",
         operation_name="",
         service=None,
         endpoint=None,
     )
     ref_list = [file_info2, file_info]
     self.assertEqual(len(result_list), len(ref_list))
     for i in range(len(result_list)):
         compare_files(self, result_list[i], ref_list[i])
 def test_s3_single_file_delete(self):
     input_s3_file = {'src': {'path': self.file1, 'type': 's3'},
                      'dest': {'path': '', 'type': 'local'},
                      'dir_op': False, 'use_src_name': True}
     self.client = mock.Mock()
     file_gen = FileGenerator(self.client, 'delete')
     result_list = list(file_gen.call(input_s3_file))
     self.assertEqual(len(result_list), 1)
     compare_files(
         self,
         result_list[0],
         FileStat(src=self.file1, dest='text1.txt',
                  compare_key='text1.txt',
                  size=None, last_update=None,
                  src_type='s3', dest_type='local',
                  operation_name='delete')
     )
     self.client.head_object.assert_not_called()
Exemple #24
0
 def test_page_size(self):
     input_s3_file = {'src': {'path': self.bucket+'/', 'type': 's3'},
                      'dest': {'path': '', 'type': 'local'},
                      'dir_op': True, 'use_src_name': True}
     file_gen = FileGenerator(self.service, self.endpoint, '',
                              page_size=1).call(input_s3_file)
     limited_file_gen = itertools.islice(file_gen, 1)
     result_list = list(limited_file_gen)
     file_stat = FileStat(src=self.file2,
                          dest='another_directory' + os.sep + 'text2.txt',
                          compare_key='another_directory/text2.txt',
                          size=21,
                          last_update=result_list[0].last_update,
                          src_type='s3',
                          dest_type='local', operation_name='')
     # Ensure only one item is returned from ``ListObjects``
     self.assertEqual(len(result_list), 1)
     compare_files(self, result_list[0], file_stat)
    def test_s3_delete_directory(self):
        """
        Generates s3 files under a common prefix. Also it ensures that
        the directory itself is included because it is a delete command
        Note: Size and last update are not tested because s3 generates them.
        """
        input_s3_file = {'src': {'path': self.bucket + '/', 'type': 's3'},
                         'dest': {'path': '', 'type': 'local'},
                         'dir_op': True, 'use_src_name': True}
        files = FileGenerator(self.service, self.endpoint,
                              'delete').call(input_s3_file)
        result_list = []
        for filename in files:
            result_list.append(filename)

        file_info1 = FileInfo(src=self.bucket + '/another_directory/',
                              dest='another_directory' + os.sep,
                              compare_key='another_directory/',
                              size=result_list[0].size,
                              last_update=result_list[0].last_update,
                              src_type='s3',
                              dest_type='local', operation_name='delete',
                              service=None, endpoint=None)
        file_info2 = FileInfo(src=self.file2,
                              dest='another_directory' + os.sep + 'text2.txt',
                              compare_key='another_directory/text2.txt',
                              size=result_list[1].size,
                              last_update=result_list[1].last_update,
                              src_type='s3',
                              dest_type='local', operation_name='delete',
                              service=None, endpoint=None)
        file_info3 = FileInfo(src=self.file1,
                              dest='text1.txt',
                              compare_key='text1.txt',
                              size=result_list[2].size,
                              last_update=result_list[2].last_update,
                              src_type='s3',
                              dest_type='local', operation_name='delete',
                              service=None, endpoint=None)

        ref_list = [file_info1, file_info2, file_info3]
        self.assertEqual(len(result_list), len(ref_list))
        for i in range(len(result_list)):
            compare_files(self, result_list[i], ref_list[i])
    def test_s3_file(self):
        #
        # Generate a single s3 file
        # Note: Size and last update are not tested because s3 generates them.
        #
        input_s3_file = {'src': {'path': self.file1, 'type': 's3'},
                         'dest': {'path': 'text1.txt', 'type': 'local'},
                         'dir_op': False, 'use_src_name': False}
        expected_file_size = 15
        result_list = list(
            FileGenerator(self.client, '').call(input_s3_file))
        file_stat = FileStat(src=self.file1, dest='text1.txt',
                             compare_key='text1.txt',
                             size=expected_file_size,
                             last_update=result_list[0].last_update,
                             src_type='s3',
                             dest_type='local', operation_name='')

        expected_list = [file_stat]
        self.assertEqual(len(result_list), 1)
        compare_files(self, result_list[0], expected_list[0])
    def test_s3_delete_directory(self):
        #
        # Generates s3 files under a common prefix. Also it ensures that
        # the directory itself is included because it is a delete command
        # Note: Size and last update are not tested because s3 generates them.
        #
        input_s3_file = {
            'src': {
                'path': self.bucket + '/',
                'type': 's3'
            },
            'dest': {
                'path': '',
                'type': 'local'
            },
            'dir_op': True,
            'use_src_name': True
        }
        params = {'region': 'us-east-1'}
        result_list = list(
            FileGenerator(self.service, self.endpoint, 'delete',
                          params).call(input_s3_file))

        file_info1 = FileInfo(src=self.bucket + '/another_directory/',
                              dest='another_directory' + os.sep,
                              compare_key='another_directory/',
                              size=0,
                              last_update=result_list[0].last_update,
                              src_type='s3',
                              dest_type='local',
                              operation_name='delete',
                              service=self.service,
                              endpoint=self.endpoint)
        file_info2 = FileInfo(src=self.file2,
                              dest='another_directory' + os.sep + 'text2.txt',
                              compare_key='another_directory/text2.txt',
                              size=21,
                              last_update=result_list[1].last_update,
                              src_type='s3',
                              dest_type='local',
                              operation_name='delete',
                              service=self.service,
                              endpoint=self.endpoint)
        file_info3 = FileInfo(src=self.file1,
                              dest='text1.txt',
                              compare_key='text1.txt',
                              size=15,
                              last_update=result_list[2].last_update,
                              src_type='s3',
                              dest_type='local',
                              operation_name='delete',
                              service=self.service,
                              endpoint=self.endpoint)

        expected_list = [file_info1, file_info2, file_info3]
        self.assertEqual(len(result_list), 3)
        compare_files(self, result_list[0], expected_list[0])
        compare_files(self, result_list[1], expected_list[1])
        compare_files(self, result_list[2], expected_list[2])
    def test_s3_delete_directory(self):
        #
        # Generates s3 files under a common prefix. Also it ensures that
        # the directory itself is included because it is a delete command
        # Note: Size and last update are not tested because s3 generates them.
        #
        input_s3_file = {'src': {'path': self.bucket+'/', 'type': 's3'},
                         'dest': {'path': '', 'type': 'local'},
                         'dir_op': True, 'use_src_name': True}
        params = {'region': 'us-east-1'}
        result_list = list(
            FileGenerator(self.service, self.endpoint,
                          'delete', params).call(
                input_s3_file))

        file_info1 = FileInfo(
            src=self.bucket + '/another_directory/',
            dest='another_directory' + os.sep,
            compare_key='another_directory/',
            size=0,
            last_update=result_list[0].last_update,
            src_type='s3',
            dest_type='local', operation_name='delete',
            service=self.service, endpoint=self.endpoint)
        file_info2 = FileInfo(
            src=self.file2,
            dest='another_directory' + os.sep + 'text2.txt',
            compare_key='another_directory/text2.txt',
            size=21,
            last_update=result_list[1].last_update,
            src_type='s3',
            dest_type='local', operation_name='delete',
            service=self.service,
            endpoint=self.endpoint)
        file_info3 = FileInfo(
            src=self.file1,
            dest='text1.txt',
            compare_key='text1.txt',
            size=15,
            last_update=result_list[2].last_update,
            src_type='s3',
            dest_type='local', operation_name='delete',
            service=self.service,
            endpoint=self.endpoint)

        expected_list = [file_info1, file_info2, file_info3]
        self.assertEqual(len(result_list), 3)
        compare_files(self, result_list[0], expected_list[0])
        compare_files(self, result_list[1], expected_list[1])
        compare_files(self, result_list[2], expected_list[2])