コード例 #1
0
 def test_compare_lastmod_copy(self):
     """
     Confirms compare time works for copies
     """
     src_files = []
     dest_files = []
     ref_list = []
     result_list = []
     time = datetime.datetime.now()
     future_time = time + datetime.timedelta(0, 3)
     src_file = FileInfo(src='', dest='',
                         compare_key='comparator_test.py', size=10,
                         last_update=future_time, src_type='s3',
                         dest_type='s3', operation='copy')
     dest_file = FileInfo(src='', dest='',
                          compare_key='comparator_test.py', size=10,
                          last_update=time, src_type='s3',
                          dest_type='s3', operation='')
     src_files.append(src_file)
     dest_files.append(dest_file)
     files = self.comparator.call(iter(src_files), iter(dest_files))
     ref_list.append(src_file)
     for filename in files:
         result_list.append(filename)
     self.assertEqual(result_list, ref_list)
コード例 #2
0
ファイル: test_comparator.py プロジェクト: smikes/aws-cli
    def test_compare_size_only_src_older_than_dest(self):
        """
        Confirm that files with the same size but different update times are not
        synced when `size_only` is set.
        """
        time_dst = datetime.datetime.now()
        time_src = time_dst + datetime.timedelta(days=1)

        src_file = FileInfo(src='',
                            dest='',
                            compare_key='test.py',
                            size=10,
                            last_update=time_src,
                            src_type='local',
                            dest_type='s3',
                            operation_name='upload',
                            service=None,
                            endpoint=None)

        dst_file = FileInfo(src='',
                            dest='',
                            compare_key='test.py',
                            size=10,
                            last_update=time_dst,
                            src_type='s3',
                            dest_type='local',
                            operation_name='',
                            service=None,
                            endpoint=None)

        files = self.comparator.call(iter([src_file]), iter([dst_file]))
        self.assertEqual(sum(1 for _ in files), 0)
コード例 #3
0
 def test_compare_key_less(self):
     """
     Confirm the appropriate action is taken when the soruce compare key
     is less than the destination compare key.
     """
     src_files = []
     dest_files = []
     ref_list = []
     result_list = []
     time = datetime.datetime.now()
     src_file = FileInfo(src='', dest='',
                         compare_key='bomparator_test.py', size=10,
                         last_update=time, src_type='local',
                         dest_type='s3', operation='upload')
     dest_file = FileInfo(src='', dest='',
                          compare_key='comparator_test.py', size=10,
                          last_update=time, src_type='s3',
                          dest_type='local', operation='')
     src_files.append(src_file)
     dest_files.append(dest_file)
     dest_file.operation = 'delete'
     ref_list.append(src_file)
     ref_list.append(dest_file)
     files = self.comparator.call(iter(src_files), iter(dest_files))
     for filename in files:
         result_list.append(filename)
     self.assertEqual(result_list, ref_list)
コード例 #4
0
    def test_bucket(self):
        rand1 = random.randrange(5000)
        rand2 = random.randrange(5000)
        self.bucket = str(rand1) + 'mybucket' + str(rand2) + '/'

        file_info = FileInfo(
            src=self.bucket,
            operation_name='make_bucket',
            size=0,
            service=self.service,
            endpoint=self.endpoint,
        )
        S3Handler(self.session, self.params).call([file_info])
        buckets_list = []
        for bucket in list_buckets(self.session):
            buckets_list.append(bucket['Name'])
        self.assertIn(self.bucket[:-1], buckets_list)

        file_info = FileInfo(src=self.bucket,
                             operation_name='remove_bucket',
                             size=0,
                             service=self.service,
                             endpoint=self.endpoint)
        S3Handler(self.session, self.params).call([file_info])
        buckets_list = []
        for bucket in list_buckets(self.session):
            buckets_list.append(bucket['Name'])
        self.assertNotIn(self.bucket[:-1], buckets_list)
コード例 #5
0
    def test_compare_exact_timestamps_src_older(self):
        """
        Confirm that same-sized files are synced when
        the source is older than the destination and
        `exact_timestamps` is set.
        """
        time_src = datetime.datetime.now() - datetime.timedelta(days=1)
        time_dst = datetime.datetime.now()

        src_file = FileInfo(src='',
                            dest='',
                            compare_key='test.py',
                            size=10,
                            last_update=time_src,
                            src_type='s3',
                            dest_type='local',
                            operation_name='download',
                            service=None,
                            endpoint=None)

        dst_file = FileInfo(src='',
                            dest='',
                            compare_key='test.py',
                            size=10,
                            last_update=time_dst,
                            src_type='local',
                            dest_type='s3',
                            operation_name='',
                            service=None,
                            endpoint=None)

        files = self.comparator.call(iter([src_file]), iter([dst_file]))
        self.assertEqual(sum(1 for _ in files), 1)
コード例 #6
0
ファイル: test_s3handler.py プロジェクト: Frame02/aws-cli
 def test_enqueue_multipart_download_stream(self):
     """
     This test ensures the right calls are made in ``_enqueue_tasks()``
     if the file should be a multipart download.
     """
     s3handler = S3StreamHandler(
         self.session, self.params,
         runtime_config=runtime_config(multipart_threshold=5))
     s3handler.executor = mock.Mock()
     fileinfo = FileInfo('filename', operation_name='download',
                         is_stream=True)
     with mock.patch('awscli.customizations.s3.s3handler'
                     '.S3StreamHandler._enqueue_range_download_tasks') as \
             mock_enqueue_range_tasks:
         with mock.patch('awscli.customizations.s3.fileinfo.FileInfo'
                         '.set_size_from_s3') as mock_set_size_from_s3:
             # Set the file size to something larger than the multipart
             # threshold.
             fileinfo.size = 100
             # Run the main enqueue function.
             s3handler._enqueue_tasks([fileinfo])
             # Assert that the size of the ``FileInfo`` object was set
             # if we are downloading a stream.
             self.assertTrue(mock_set_size_from_s3.called)
             # Ensure that this download would have been a multipart
             # download.
             self.assertTrue(mock_enqueue_range_tasks.called)
コード例 #7
0
    def test_bucket(self):
        rand1 = random.randrange(5000)
        rand2 = random.randrange(5000)
        self.bucket = str(rand1) + 'mybucket' + str(rand2) + '/'
        orig_number_buckets = len(list_buckets(self.session))

        file_info = FileInfo(
            src=self.bucket,
            operation_name='make_bucket',
            size=0,
            service=self.service,
            endpoint=self.endpoint)
        S3Handler(self.session, self.params).call([file_info])
        number_buckets = len(list_buckets(self.session))
        self.assertEqual(orig_number_buckets + 1, number_buckets)

        file_info = FileInfo(
            src=self.bucket,
            operation_name='remove_bucket',
            size=0,
            service=self.service,
            endpoint=self.endpoint)
        S3Handler(self.session, self.params).call([file_info])
        number_buckets = len(list_buckets(self.session))
        self.assertEqual(orig_number_buckets, number_buckets)
コード例 #8
0
ファイル: test_s3handler.py プロジェクト: zsjohny/aws-cli
    def test_bucket(self):
        rand1 = random.randrange(5000)
        rand2 = random.randrange(5000)
        self.bucket = str(rand1) + 'mybucket' + str(rand2) + '/'

        file_info = FileInfo(src=self.bucket,
                             operation_name='make_bucket',
                             size=0,
                             client=self.client,
                             source_client=self.source_client)
        S3Handler(self.session, self.params).call([file_info])
        buckets_list = []
        for bucket in self.client.list_buckets().get('Buckets', []):
            buckets_list.append(bucket['Name'])
        self.assertIn(self.bucket[:-1], buckets_list)

        file_info = FileInfo(src=self.bucket,
                             operation_name='remove_bucket',
                             size=0,
                             client=self.client,
                             source_client=self.source_client)
        S3Handler(self.session, self.params).call([file_info])
        buckets_list = []
        for bucket in self.client.list_buckets().get('Buckets', []):
            buckets_list.append(bucket['Name'])
        self.assertNotIn(self.bucket[:-1], buckets_list)
コード例 #9
0
 def test_local_directory(self):
     """
     Generate an entire local directory.
     """
     input_local_dir = {'src': {'path': self.local_dir,
                                'type': 'local'},
                        'dest': {'path': 'bucket/',
                                 'type': 's3'},
                        'dir_op': True, 'use_src_name': True}
     params = {'region': 'us-east-1'}
     files = FileGenerator(self.service,
                           self.endpoint,'').call(input_local_dir)
     result_list = []
     for filename in files:
         result_list.append(filename)
     size, last_update = get_file_stat(self.local_file)
     file_info = FileInfo(src=self.local_file, dest='bucket/text1.txt',
                          compare_key='text1.txt', size=size,
                          last_update=last_update, src_type='local',
                          dest_type='s3', operation_name='',
                          service=None, endpoint=None)
     path = self.local_dir + 'another_directory' + os.sep \
         + 'text2.txt'
     size, last_update = get_file_stat(path)
     file_info2 = FileInfo(src=path,
                           dest='bucket/another_directory/text2.txt',
                           compare_key='another_directory/text2.txt',
                           size=size, last_update=last_update,
                           src_type='local',
                           dest_type='s3', operation_name='',
                           service=None, endpoint=None)
     ref_list = [file_info2, file_info]
     self.assertEqual(len(result_list), len(ref_list))
     for i in range(len(result_list)):
         compare_files(self, result_list[i], ref_list[i])
コード例 #10
0
ファイル: test_fileinfo.py プロジェクト: wenhuanhuang/aws-cli
 def test_set_size_from_s3(self):
     file_info = FileInfo(src="bucket/key", endpoint=None)
     with mock.patch(
             'awscli.customizations.s3.fileinfo.operate') as op_mock:
         op_mock.return_value = ({'ContentLength': 5}, None)
         file_info.set_size_from_s3()
     self.assertEqual(file_info.size, 5)
コード例 #11
0
 def test_can_submit(self):
     fileinfo = FileInfo(src=self.source_bucket + '/' + self.source_key,
                         dest=self.bucket + '/' + self.key,
                         operation_name='copy')
     self.assertTrue(self.transfer_request_submitter.can_submit(fileinfo))
     fileinfo.operation_name = 'foo'
     self.assertFalse(self.transfer_request_submitter.can_submit(fileinfo))
コード例 #12
0
 def test_empty_src(self):
     """
     Confirm the appropriate action is taken when there are no more source
     files to take.
     """
     src_files = []
     dest_files = []
     ref_list = []
     result_list = []
     time = datetime.datetime.now()
     dest_file = FileInfo(
         src="",
         dest="",
         compare_key="comparator_test.py",
         size=10,
         last_update=time,
         src_type="s3",
         dest_type="local",
         operation_name="",
         service=None,
         endpoint=None,
     )
     dest_files.append(dest_file)
     dest_file.operation = "delete"
     ref_list.append(dest_file)
     files = self.comparator.call(iter(src_files), iter(dest_files))
     for filename in files:
         result_list.append(filename)
     self.assertEqual(result_list, ref_list)
コード例 #13
0
    def test_list_objects(self):
        """
        Tests the ability to list objects, common prefixes, and buckets.
        If an error occurs the test fails as this is only a printing
        operation
        """
        prefix_name = self.bucket + '/'
        file_info = FileInfo(
            src=prefix_name,
            operation_name='list_objects',
            size=0,
            service=self.service,
            endpoint=self.endpoint,
        )
        params = {'region': 'us-east-1'}
        s3_handler = S3Handler(self.session, params)
        s3_handler.call([file_info])

        file_info = FileInfo(
            src='',
            operation_name='list_objects',
            size=0,
            service=self.service,
            endpoint=self.endpoint,
        )
        params = {'region': 'us-east-1'}
        s3_handler = S3Handler(self.session, params)
        s3_handler.call([file_info])
コード例 #14
0
ファイル: test_comparator.py プロジェクト: AsherBond/aws-cli
 def test_compare_key_greater(self):
     """
     Confirm the appropriate action is taken when the soruce compare key
     is greater than the destination compare key.
     """
     src_files = []
     dest_files = []
     ref_list = []
     result_list = []
     time = datetime.datetime.now()
     src_file = FileInfo(src='', dest='',
                         compare_key='domparator_test.py', size=10,
                         last_update=time, src_type='local',
                         dest_type='s3', operation_name='upload',
                         service=None, endpoint=None)
     dest_file = FileInfo(src='', dest='',
                          compare_key='comparator_test.py', size=10,
                          last_update=time, src_type='s3',
                          dest_type='local', operation_name='',
                          service=None, endpoint=None)
     src_files.append(src_file)
     dest_files.append(dest_file)
     src_file.operation = 'upload'
     dest_file.operation = 'delete'
     ref_list.append(dest_file)
     ref_list.append(src_file)
     files = self.comparator.call(iter(src_files), iter(dest_files))
     for filename in files:
         result_list.append(filename)
     self.assertEqual(result_list, ref_list)
コード例 #15
0
    def test_s3_delete_directory(self):
        """
        Generates s3 files under a common prefix. Also it ensures that
        the directory itself is included because it is a delete command
        Note: Size and last update are not tested because s3 generates them.
        """
        input_s3_file = {
            'src': {
                'path': self.bucket + '/',
                'type': 's3'
            },
            'dest': {
                'path': '',
                'type': 'local'
            },
            'dir_op': True,
            'use_src_name': True
        }
        params = {'region': 'us-east-1'}
        files = FileGenerator(self.service, self.endpoint, 'delete',
                              params).call(input_s3_file)
        result_list = []
        for filename in files:
            result_list.append(filename)

        file_info1 = FileInfo(src=self.bucket + '/another_directory/',
                              dest='another_directory' + os.sep,
                              compare_key='another_directory/',
                              size=result_list[0].size,
                              last_update=result_list[0].last_update,
                              src_type='s3',
                              dest_type='local',
                              operation_name='delete',
                              service=self.service,
                              endpoint=self.endpoint)
        file_info2 = FileInfo(src=self.file2,
                              dest='another_directory' + os.sep + 'text2.txt',
                              compare_key='another_directory/text2.txt',
                              size=result_list[1].size,
                              last_update=result_list[1].last_update,
                              src_type='s3',
                              dest_type='local',
                              operation_name='delete',
                              service=self.service,
                              endpoint=self.endpoint)
        file_info3 = FileInfo(src=self.file1,
                              dest='text1.txt',
                              compare_key='text1.txt',
                              size=result_list[2].size,
                              last_update=result_list[2].last_update,
                              src_type='s3',
                              dest_type='local',
                              operation_name='delete',
                              service=self.service,
                              endpoint=self.endpoint)

        ref_list = [file_info1, file_info2, file_info3]
        self.assertEqual(len(result_list), len(ref_list))
        for i in range(len(result_list)):
            compare_files(self, result_list[i], ref_list[i])
コード例 #16
0
 def test_empty_src(self):
     """
     Confirm the appropriate action is taken when there are no more source
     files to take.
     """
     src_files = []
     dest_files = []
     ref_list = []
     result_list = []
     time = datetime.datetime.now()
     dest_file = FileInfo(src='',
                          dest='',
                          compare_key='comparator_test.py',
                          size=10,
                          last_update=time,
                          src_type='s3',
                          dest_type='local',
                          operation_name='',
                          service=None,
                          endpoint=None)
     dest_files.append(dest_file)
     dest_file.operation = 'delete'
     ref_list.append(dest_file)
     files = self.comparator.call(iter(src_files), iter(dest_files))
     for filename in files:
         result_list.append(filename)
     self.assertEqual(result_list, ref_list)
コード例 #17
0
 def test_compare_key_equal(self):
     """
     Confirms checking compare key works.
     """
     src_files = []
     dest_files = []
     ref_list = []
     result_list = []
     time = datetime.datetime.now()
     src_file = FileInfo(src='',
                         dest='',
                         compare_key='comparator_test.py',
                         size=10,
                         last_update=time,
                         src_type='local',
                         dest_type='s3',
                         operation_name='upload',
                         service=None,
                         endpoint=None)
     dest_file = FileInfo(src='',
                          dest='',
                          compare_key='comparator_test.py',
                          size=10,
                          last_update=time,
                          src_type='s3',
                          dest_type='local',
                          operation_name='',
                          service=None,
                          endpoint=None)
     src_files.append(src_file)
     dest_files.append(dest_file)
     files = self.comparator.call(iter(src_files), iter(dest_files))
     for filename in files:
         result_list.append(filename)
     self.assertEqual(result_list, ref_list)
コード例 #18
0
    def test_compare_exact_timestamps_same_age_diff_size(self):
        """
        Confirm that files of differing sizes are synced when
        the source and destination are the same age and
        `exact_timestamps` is set.
        """
        time_both = datetime.datetime.now()

        src_file = FileInfo(src='',
                            dest='',
                            compare_key='test.py',
                            size=20,
                            last_update=time_both,
                            src_type='s3',
                            dest_type='local',
                            operation_name='download',
                            service=None,
                            endpoint=None)

        dst_file = FileInfo(src='',
                            dest='',
                            compare_key='test.py',
                            size=10,
                            last_update=time_both,
                            src_type='local',
                            dest_type='s3',
                            operation_name='',
                            service=None,
                            endpoint=None)

        files = self.comparator.call(iter([src_file]), iter([dst_file]))
        self.assertEqual(sum(1 for _ in files), 1)
コード例 #19
0
 def test_enqueue_multipart_download_stream(self):
     """
     This test ensures the right calls are made in ``_enqueue_tasks()``
     if the file should be a multipart download.
     """
     s3handler = S3StreamHandler(self.session, self.params,
                                 multi_threshold=5)
     s3handler.executor = mock.Mock()
     fileinfo = FileInfo('filename', operation_name='download',
                         is_stream=True)
     with mock.patch('awscli.customizations.s3.s3handler'
                     '.S3StreamHandler._enqueue_range_download_tasks') as \
             mock_enqueue_range_tasks:
         with mock.patch('awscli.customizations.s3.fileinfo.FileInfo'
                         '.set_size_from_s3') as mock_set_size_from_s3:
             # Set the file size to something larger than the multipart
             # threshold.
             fileinfo.size = 100
             # Run the main enqueue function.
             s3handler._enqueue_tasks([fileinfo])
             # Assert that the size of the ``FileInfo`` object was set
             # if we are downloading a stream.
             self.assertTrue(mock_set_size_from_s3.called)
             # Ensure that this download would have been a multipart
             # download.
             self.assertTrue(mock_enqueue_range_tasks.called)
コード例 #20
0
    def test_s3_delete_directory(self):
        #
        # Generates s3 files under a common prefix. Also it ensures that
        # the directory itself is included because it is a delete command
        # Note: Size and last update are not tested because s3 generates them.
        #
        input_s3_file = {
            'src': {
                'path': self.bucket + '/',
                'type': 's3'
            },
            'dest': {
                'path': '',
                'type': 'local'
            },
            'dir_op': True,
            'use_src_name': True
        }
        params = {'region': 'us-east-1'}
        result_list = list(
            FileGenerator(self.service, self.endpoint, 'delete',
                          params).call(input_s3_file))

        file_info1 = FileInfo(src=self.bucket + '/another_directory/',
                              dest='another_directory' + os.sep,
                              compare_key='another_directory/',
                              size=0,
                              last_update=result_list[0].last_update,
                              src_type='s3',
                              dest_type='local',
                              operation_name='delete',
                              service=self.service,
                              endpoint=self.endpoint)
        file_info2 = FileInfo(src=self.file2,
                              dest='another_directory' + os.sep + 'text2.txt',
                              compare_key='another_directory/text2.txt',
                              size=21,
                              last_update=result_list[1].last_update,
                              src_type='s3',
                              dest_type='local',
                              operation_name='delete',
                              service=self.service,
                              endpoint=self.endpoint)
        file_info3 = FileInfo(src=self.file1,
                              dest='text1.txt',
                              compare_key='text1.txt',
                              size=15,
                              last_update=result_list[2].last_update,
                              src_type='s3',
                              dest_type='local',
                              operation_name='delete',
                              service=self.service,
                              endpoint=self.endpoint)

        expected_list = [file_info1, file_info2, file_info3]
        self.assertEqual(len(result_list), 3)
        compare_files(self, result_list[0], expected_list[0])
        compare_files(self, result_list[1], expected_list[1])
        compare_files(self, result_list[2], expected_list[2])
コード例 #21
0
ファイル: test_fileinfo.py プロジェクト: avinashkolla/AWSCLI
class TestIsGlacierCompatible(unittest.TestCase):
    def setUp(self):
        self.file_info = FileInfo('bucket/key')
        self.file_info.associated_response_data = {'StorageClass': 'GLACIER'}

    def test_operation_is_glacier_compatible(self):
        self.file_info.operation_name = 'delete'
        self.assertTrue(self.file_info.is_glacier_compatible())

    def test_download_operation_is_not_glacier_compatible(self):
        self.file_info.operation_name = 'download'
        self.assertFalse(self.file_info.is_glacier_compatible())

    def test_copy_operation_is_not_glacier_compatible(self):
        self.file_info.operation_name = 'copy'
        self.assertFalse(self.file_info.is_glacier_compatible())

    def test_operation_is_glacier_compatible_for_non_glacier(self):
        self.file_info.operation_name = 'download'
        self.file_info.associated_response_data = {'StorageClass': 'STANDARD'}
        self.assertTrue(self.file_info.is_glacier_compatible())

    def test_move_operation_is_not_glacier_compatible_for_s3_source(self):
        self.file_info.operation_name = 'move'
        self.file_info.src_type = 's3'
        self.assertFalse(self.file_info.is_glacier_compatible())

    def test_move_operation_is_glacier_compatible_for_local_source(self):
        self.file_info.operation_name = 'move'
        self.file_info.src_type = 'local'
        self.assertTrue(self.file_info.is_glacier_compatible())

    def test_response_is_not_glacier(self):
        self.file_info.associated_response_data = {'StorageClass': 'STANDARD'}
        self.assertTrue(self.file_info.is_glacier_compatible())

    def test_response_missing_storage_class(self):
        self.file_info.associated_response_data = {'Key': 'Foo'}
        self.assertTrue(self.file_info.is_glacier_compatible())

    def test_task_info_glacier_compatibility(self):
        task_info = TaskInfo('bucket/key', 's3', 'remove_bucket', None)
        self.assertTrue(task_info.is_glacier_compatible())

    def test_restored_object_is_glacier_compatible(self):
        self.file_info.operation_name = 'download'
        self.file_info.associated_response_data = {
            'StorageClass': 'GLACIER',
            'Restore': 'ongoing-request="false", expiry-date="..."'
        }
        self.assertTrue(self.file_info.is_glacier_compatible())

    def test_ongoing_restore_is_not_glacier_compatible(self):
        self.file_info.operation_name = 'download'
        self.file_info.associated_response_data = {
            'StorageClass': 'GLACIER',
            'Restore': 'ongoing-request="true", expiry-date="..."'
        }
        self.assertFalse(self.file_info.is_glacier_compatible())
コード例 #22
0
 def test_can_submit(self):
     fileinfo = FileInfo(src=self.filename,
                         dest=None,
                         operation_name='delete',
                         src_type='local')
     self.assertTrue(self.transfer_request_submitter.can_submit(fileinfo))
     fileinfo.operation_name = 'foo'
     self.assertFalse(self.transfer_request_submitter.can_submit(fileinfo))
コード例 #23
0
ファイル: test_s3handler.py プロジェクト: MAS150MD200/aws-cli
 def test_can_submit(self):
     fileinfo = FileInfo(
         src=self.filename, dest=None, operation_name='delete',
         src_type='local')
     self.assertTrue(
         self.transfer_request_submitter.can_submit(fileinfo))
     fileinfo.operation_name = 'foo'
     self.assertFalse(
         self.transfer_request_submitter.can_submit(fileinfo))
コード例 #24
0
ファイル: test_s3handler.py プロジェクト: MAS150MD200/aws-cli
 def test_can_submit(self):
     fileinfo = FileInfo(
         src=self.source_bucket+'/'+self.source_key,
         dest=self.bucket+'/'+self.key, operation_name='copy')
     self.assertTrue(
         self.transfer_request_submitter.can_submit(fileinfo))
     fileinfo.operation_name = 'foo'
     self.assertFalse(
         self.transfer_request_submitter.can_submit(fileinfo))
コード例 #25
0
 def test_local_file(self):
     """
     Generate a single local file.
     """
     input_local_file = {'src': {'path': self.local_file,
                                 'type': 'local'},
                         'dest': {'path': 'bucket/text1.txt',
                                  'type': 's3'},
                         'dir_op': False, 'use_src_name': False}
     params = {'region': 'us-east-1'}
     files = FileGenerator(self.service,
                           self.endpoint, '').call(input_local_file)
     result_list = []
     for filename in files:
         result_list.append(filename)
     size, last_update = get_file_stat(self.local_file)
     file_info = FileInfo(src=self.local_file, dest='bucket/text1.txt',
                          compare_key='text1.txt', size=size,
                          last_update=last_update, src_type='local',
                          dest_type='s3', operation_name='',
                          service=None, endpoint=None)
     ref_list = [file_info]
     self.assertEqual(len(result_list), len(ref_list))
     for i in range(len(result_list)):
         compare_files(self, result_list[i], ref_list[i])
コード例 #26
0
    def _inject_info(self, file_base):
        file_info_attr = {}
        file_info_attr['src'] = file_base.src
        file_info_attr['dest'] = file_base.dest
        file_info_attr['compare_key'] = file_base.compare_key
        file_info_attr['size'] = file_base.size
        file_info_attr['last_update'] = file_base.last_update
        file_info_attr['src_type'] = file_base.src_type
        file_info_attr['dest_type'] = file_base.dest_type
        file_info_attr['operation_name'] = file_base.operation_name
        file_info_attr['parameters'] = self._parameters
        file_info_attr['is_stream'] = self._is_stream
        file_info_attr['associated_response_data'] = file_base.response_data

        # This is a bit quirky. The below conditional hinges on the --delete
        # flag being set, which only occurs during a sync command. The source
        # client in a sync delete refers to the source of the sync rather than
        # the source of the delete. What this means is that the client that
        # gets called during the delete process would point to the wrong region.
        # Normally this doesn't matter because DNS will re-route the request
        # to the correct region. In the case of s3v4 signing, however, this
        # would result in a failed delete. The conditional below fixes this
        # issue by swapping clients only in the case of a sync delete since
        # swapping which client is used in the delete function would then break
        # moving under s3v4.
        if (file_base.operation_name == 'delete'
                and self._parameters.get('delete')):
            file_info_attr['client'] = self._source_client
            file_info_attr['source_client'] = self._client
        else:
            file_info_attr['client'] = self._client
            file_info_attr['source_client'] = self._source_client

        return FileInfo(**file_info_attr)
コード例 #27
0
    def test_s3_file(self):
        """
        Generate a single s3 file
        Note: Size and last update are not tested because s3 generates them.
        """
        input_s3_file = {'src': {'path': self.file1, 'type': 's3'},
                         'dest': {'path': 'text1.txt', 'type': 'local'},
                         'dir_op': False, 'use_src_name': False}
        params = {'region': 'us-east-1'}
        files = FileGenerator(self.service, self.endpoint,
                              '').call(input_s3_file)
        result_list = []
        for filename in files:
            result_list.append(filename)
        file_info = FileInfo(src=self.file1, dest='text1.txt',
                             compare_key='text1.txt',
                             size=result_list[0].size,
                             last_update=result_list[0].last_update,
                             src_type='s3',
                             dest_type='local', operation_name='',
                             service=None, endpoint=None)

        ref_list = [file_info]
        self.assertEqual(len(result_list), len(ref_list))
        for i in range(len(result_list)):
            compare_files(self, result_list[i], ref_list[i])
コード例 #28
0
ファイル: test_s3handler.py プロジェクト: zsjohny/aws-cli
 def test_download(self):
     # Confirm that the files do not exist.
     for filename in self.loc_files:
         self.assertFalse(os.path.exists(filename))
     # Create file info objects to perform download.
     tasks = []
     time = datetime.datetime.now()
     for i in range(len(self.s3_files)):
         tasks.append(
             FileInfo(src=self.s3_files[i],
                      src_type='s3',
                      dest=self.loc_files[i],
                      dest_type='local',
                      last_update=time,
                      operation_name='download',
                      size=0,
                      client=self.client))
     # Perform the download.
     self.s3_handler.call(tasks)
     # Confirm that the files now exist.
     for filename in self.loc_files:
         self.assertTrue(os.path.exists(filename))
     # Ensure the contents are as expected.
     with open(self.loc_files[0], 'rb') as filename:
         self.assertEqual(filename.read(), b'This is a test.')
     with open(self.loc_files[1], 'rb') as filename:
         self.assertEqual(filename.read(), b'This is another test.')
コード例 #29
0
 def test_multi_download(self):
     tasks = []
     time = datetime.datetime.now()
     for i in range(len(self.s3_files)):
         tasks.append(
             FileInfo(
                 src=self.s3_files[i],
                 src_type='s3',
                 dest=self.loc_files[i],
                 dest_type='local',
                 last_update=time,
                 operation_name='download',
                 size=15,
                 service=self.service,
                 endpoint=self.endpoint,
             ))
     # Perform the multipart  download.
     self.s3_handler_multi.call(tasks)
     # Confirm that the files now exist.
     for filename in self.loc_files:
         self.assertTrue(os.path.exists(filename))
     # Ensure the contents are as expected.
     with open(self.loc_files[0], 'rb') as filename:
         self.assertEqual(filename.read(), b'This is a test.')
     with open(self.loc_files[1], 'rb') as filename:
         self.assertEqual(filename.read(), b'This is another test.')
コード例 #30
0
 def test_enqueue_upload_stream(self):
     self.parameters['is_stream'] = True
     self.s3_transfer_handler.call(
         [FileInfo(src='-', dest='bucket/key', operation_name='upload')])
     self.assertEqual(self.transfer_manager.upload.call_count, 1)
     upload_call_kwargs = self.transfer_manager.upload.call_args[1]
     self.assertIsInstance(upload_call_kwargs['fileobj'], NonSeekableStream)
コード例 #31
0
 def test_can_submit(self):
     fileinfo = FileInfo(src=self.bucket + '/' + self.key,
                         dest=self.filename,
                         operation_name='download')
     self.assertTrue(self.transfer_request_submitter.can_submit(fileinfo))
     self.cli_params['is_stream'] = False
     self.assertFalse(self.transfer_request_submitter.can_submit(fileinfo))
コード例 #32
0
    def test_enqueue_local_deletes(self):
        fileinfos = []
        num_transfers = 5
        for _ in range(num_transfers):
            fileinfos.append(
                FileInfo(src='myfile',
                         dest=None,
                         operation_name='delete',
                         src_type='local'))

        self.s3_transfer_handler.call(fileinfos)
        # The number of processed results will be equal to:
        # number_of_local_deletes * 2 + 1
        # The 2 represents the QueuedResult and SuccessResult/FailureResult
        # for each transfer
        # The 1 represents the TotalFinalSubmissionResult
        self.assertEqual(len(self.processed_results), 11)

        # Make sure that the results are as expected by checking just one
        # of them
        first_submitted_result = self.processed_results[0]
        self.assertEqual(first_submitted_result.transfer_type, 'delete')
        self.assertTrue(first_submitted_result.src.endswith('myfile'))

        # Also make sure that transfer manager's delete() was never called
        self.assertEqual(self.transfer_manager.delete.call_count, 0)
コード例 #33
0
ファイル: test_s3handler.py プロジェクト: warpaul/aws-cli
 def test_multi_download_fail(self):
     """
     This test ensures that a multipart download can handle a
     standard error exception stemming from an operation
     being performed on a nonexistant bucket.  The existing file
     should be downloaded properly but the other will not.
     """
     tasks = []
     wrong_s3_files = [
         self.bucket + '/text1.txt',
         self.bucket[:-1] + '/another_directory/text2.txt'
     ]
     time = datetime.datetime.now()
     for i in range(len(self.s3_files)):
         tasks.append(
             FileInfo(src=wrong_s3_files[i],
                      src_type='s3',
                      dest=self.loc_files[i],
                      dest_type='local',
                      last_update=time,
                      operation_name='download',
                      size=15,
                      service=self.service,
                      endpoint=self.endpoint))
     # Perform the multipart  download.
     self.s3_handler_multi.call(tasks)
     # Confirm that the files now exist.
     self.assertTrue(os.path.exists(self.loc_files[0]))
     # The second file should not exist.
     self.assertFalse(os.path.exists(self.loc_files[1]))
     # Ensure that contents are as expected.
     with open(self.loc_files[0], 'rb') as filename:
         self.assertEqual(filename.read(), b'This is a test.')
コード例 #34
0
ファイル: test_s3handler.py プロジェクト: warpaul/aws-cli
 def test_move(self):
     # Create file info objects to perform move.
     tasks = []
     time = datetime.datetime.now()
     for i in range(len(self.s3_files)):
         tasks.append(
             FileInfo(src=self.s3_files[i],
                      src_type='s3',
                      dest=self.loc_files[i],
                      dest_type='local',
                      last_update=time,
                      operation_name='move',
                      size=0,
                      service=self.service,
                      endpoint=self.endpoint))
     # Perform the move.
     self.s3_handler.call(tasks)
     # Confirm that the files now exist.
     for filename in self.loc_files:
         self.assertTrue(os.path.exists(filename))
     # Ensure the contents are as expected.
     with open(self.loc_files[0], 'rb') as filename:
         self.assertEqual(filename.read(), b'This is a test.')
     with open(self.loc_files[1], 'rb') as filename:
         self.assertEqual(filename.read(), b'This is another test.')
     # Ensure the objects are no longer in the bucket.
     self.assertEqual(len(list_contents(self.bucket, self.session)), 1)
コード例 #35
0
ファイル: test_s3handler.py プロジェクト: warpaul/aws-cli
 def test_upload_fail(self):
     """
     One of the uploads will fail to upload in this test as
     the second s3 destination's bucket does not exist.
     """
     self.assertEqual(len(list_contents(self.bucket, self.session)), 0)
     fail_s3_files = [
         self.bucket + '/text1.txt',
         self.bucket[:-1] + '/another_directory/text2.txt'
     ]
     files = [self.loc_files[0], self.loc_files[1]]
     tasks = []
     for i in range(len(files)):
         tasks.append(
             FileInfo(src=self.loc_files[i],
                      dest=fail_s3_files[i],
                      compare_key=None,
                      src_type='local',
                      dest_type='s3',
                      operation_name='upload',
                      size=0,
                      last_update=None,
                      service=self.service,
                      endpoint=self.endpoint))
     self.s3_handler.call(tasks)
     # Confirm only one of the files was uploaded.
     self.assertEqual(len(list_contents(self.bucket, self.session)), 1)
コード例 #36
0
 def test_compare_key_less(self):
     """
     Confirm the appropriate action is taken when the soruce compare key
     is less than the destination compare key.
     """
     src_files = []
     dest_files = []
     ref_list = []
     result_list = []
     time = datetime.datetime.now()
     src_file = FileInfo(
         src="",
         dest="",
         compare_key="bomparator_test.py",
         size=10,
         last_update=time,
         src_type="local",
         dest_type="s3",
         operation_name="upload",
         service=None,
         endpoint=None,
     )
     dest_file = FileInfo(
         src="",
         dest="",
         compare_key="comparator_test.py",
         size=10,
         last_update=time,
         src_type="s3",
         dest_type="local",
         operation_name="",
         service=None,
         endpoint=None,
     )
     src_files.append(src_file)
     dest_files.append(dest_file)
     dest_file.operation = "delete"
     ref_list.append(src_file)
     ref_list.append(dest_file)
     files = self.comparator.call(iter(src_files), iter(dest_files))
     for filename in files:
         result_list.append(filename)
     self.assertEqual(result_list, ref_list)
コード例 #37
0
ファイル: test_fileinfo.py プロジェクト: roymmcm/aws-cli
class TestIsGlacierCompatible(unittest.TestCase):
    def setUp(self):
        self.file_info = FileInfo("bucket/key")
        self.file_info.associated_response_data = {"StorageClass": "GLACIER"}

    def test_operation_is_glacier_compatible(self):
        self.file_info.operation_name = "delete"
        self.assertTrue(self.file_info.is_glacier_compatible())

    def test_download_operation_is_not_glacier_compatible(self):
        self.file_info.operation_name = "download"
        self.assertFalse(self.file_info.is_glacier_compatible())

    def test_copy_operation_is_not_glacier_compatible(self):
        self.file_info.operation_name = "copy"
        self.assertFalse(self.file_info.is_glacier_compatible())

    def test_operation_is_glacier_compatible_for_non_glacier(self):
        self.file_info.operation_name = "download"
        self.file_info.associated_response_data = {"StorageClass": "STANDARD"}
        self.assertTrue(self.file_info.is_glacier_compatible())

    def test_move_operation_is_not_glacier_compatible_for_s3_source(self):
        self.file_info.operation_name = "move"
        self.file_info.src_type = "s3"
        self.assertFalse(self.file_info.is_glacier_compatible())

    def test_move_operation_is_glacier_compatible_for_local_source(self):
        self.file_info.operation_name = "move"
        self.file_info.src_type = "local"
        self.assertTrue(self.file_info.is_glacier_compatible())

    def test_response_is_not_glacier(self):
        self.file_info.associated_response_data = {"StorageClass": "STANDARD"}
        self.assertTrue(self.file_info.is_glacier_compatible())

    def test_response_missing_storage_class(self):
        self.file_info.associated_response_data = {"Key": "Foo"}
        self.assertTrue(self.file_info.is_glacier_compatible())

    def test_task_info_glacier_compatibility(self):
        task_info = TaskInfo("bucket/key", "s3", "remove_bucket", None)
        self.assertTrue(task_info.is_glacier_compatible())
コード例 #38
0
ファイル: test_fileinfo.py プロジェクト: gonzalez/aws-cli
 def test_set_size_from_s3(self):
     file_info = FileInfo(src="bucket/key", endpoint=None)
     with mock.patch('awscli.customizations.s3.fileinfo.operate') as op_mock:
         op_mock.return_value = ({'ContentLength': 5}, None)
         file_info.set_size_from_s3()
     self.assertEqual(file_info.size, 5)
コード例 #39
0
ファイル: test_fileinfo.py プロジェクト: roymmcm/aws-cli
 def setUp(self):
     self.file_info = FileInfo("bucket/key")
     self.file_info.associated_response_data = {"StorageClass": "GLACIER"}
コード例 #40
0
 def setUp(self):
     self.file_info = FileInfo('bucket/key')
     self.file_info.associated_response_data = {'StorageClass': 'GLACIER'}
コード例 #41
0
 def test_set_size_from_s3(self):
     client = mock.Mock()
     client.head_object.return_value = {'ContentLength': 5}
     file_info = FileInfo(src="bucket/key", client=client)
     file_info.set_size_from_s3()
     self.assertEqual(file_info.size, 5)