def test_local_file(self):
     """
     Generate a single local file.
     """
     input_local_file = {
         'src': {
             'path': self.local_file,
             'type': 'local'
         },
         'dest': {
             'path': 'bucket/text1.txt',
             'type': 's3'
         },
         'dir_op': False,
         'use_src_name': False
     }
     params = {'region': 'us-east-1'}
     files = FileGenerator(self.session, '', params).call(input_local_file)
     result_list = []
     for filename in files:
         result_list.append(filename)
     size, last_update = get_file_stat(self.local_file)
     file_info = FileInfo(src=self.local_file,
                          dest='bucket/text1.txt',
                          compare_key='text1.txt',
                          size=size,
                          last_update=last_update,
                          src_type='local',
                          dest_type='s3',
                          operation='')
     ref_list = [file_info]
     self.assertEqual(len(result_list), len(ref_list))
     for i in range(len(result_list)):
         compare_files(self, result_list[i], ref_list[i])
Beispiel #2
0
 def call(self, files):
     """
     This is the generalized function to yield the ``FileInfo`` objects.
     ``dir_op`` and ``use_src_name`` flags affect which files are used and
     ensure the proper destination paths and compare keys are formed.
     """
     src = files['src']
     dest = files['dest']
     src_type = src['type']
     dest_type = dest['type']
     function_table = {'s3': self.list_objects, 'local': self.list_files}
     sep_table = {'s3': '/', 'local': os.sep}
     source = src['path']
     file_list = function_table[src_type](source, files['dir_op'])
     for src_path, size, last_update in file_list:
         if files['dir_op']:
             rel_path = src_path[len(src['path']):]
         else:
             rel_path = src_path.split(sep_table[src_type])[-1]
         compare_key = rel_path.replace(sep_table[src_type], '/')
         if files['use_src_name']:
             dest_path = dest['path']
             dest_path += rel_path.replace(sep_table[src_type],
                                           sep_table[dest_type])
         else:
             dest_path = dest['path']
         yield FileInfo(src=src_path, dest=dest_path,
                        compare_key=compare_key, size=size,
                        last_update=last_update, src_type=src_type,
                        dest_type=dest_type, operation=self.operation)
Beispiel #3
0
    def _inject_info(self, file_base):
        file_info_attr = {}
        file_info_attr['src'] = file_base.src
        file_info_attr['dest'] = file_base.dest
        file_info_attr['compare_key'] = file_base.compare_key
        file_info_attr['size'] = file_base.size
        file_info_attr['last_update'] = file_base.last_update
        file_info_attr['src_type'] = file_base.src_type
        file_info_attr['dest_type'] = file_base.dest_type
        file_info_attr['operation_name'] = file_base.operation_name
        file_info_attr['parameters'] = self._parameters
        file_info_attr['is_stream'] = self._is_stream
        file_info_attr['associated_response_data'] = file_base.response_data

        # This is a bit quirky. The below conditional hinges on the --delete
        # flag being set, which only occurs during a sync command. The source
        # client in a sync delete refers to the source of the sync rather than
        # the source of the delete. What this means is that the client that
        # gets called during the delete process would point to the wrong region.
        # Normally this doesn't matter because DNS will re-route the request
        # to the correct region. In the case of s3v4 signing, however, this
        # would result in a failed delete. The conditional below fixes this
        # issue by swapping clients only in the case of a sync delete since
        # swapping which client is used in the delete function would then break
        # moving under s3v4.
        if (file_base.operation_name == 'delete' and
                self._parameters.get('delete')):
            file_info_attr['client'] = self._source_client
            file_info_attr['source_client'] = self._client
        else:
            file_info_attr['client'] = self._client
            file_info_attr['source_client'] = self._source_client

        return FileInfo(**file_info_attr)
Beispiel #4
0
 def test_multi_download_fail(self):
     """
     This test ensures that a multipart download can handle a
     standard error exception stemming from an operation
     being performed on a nonexistant bucket.  The existing file
     should be downloaded properly but the other will not.
     """
     tasks = []
     wrong_s3_files = [
         self.bucket + '/text1.txt',
         self.bucket[:-1] + '/another_directory/text2.txt'
     ]
     time = datetime.datetime.now()
     for i in range(len(self.s3_files)):
         tasks.append(
             FileInfo(src=wrong_s3_files[i],
                      src_type='s3',
                      dest=self.loc_files[i],
                      dest_type='local',
                      last_update=time,
                      operation='download',
                      size=15))
     # Perform the multipart  download.
     self.s3_handler_multi.call(tasks)
     # Confirm that the files now exist.
     for filename in self.loc_files:
         self.assertTrue(os.path.exists(filename))
     # Ensure that contents are as expected.
     with open(self.loc_files[0], 'rb') as filename:
         self.assertEqual(filename.read(), b'This is a test.')
     with open(self.loc_files[1], 'rb') as filename:
         self.assertNotEqual(filename.read(), b'This is a test.')
 def test_empty_dest(self):
     """
     Confirm the appropriate action is taken when there are no more dest
     files to take.
     """
     src_files = []
     dest_files = []
     ref_list = []
     result_list = []
     time = datetime.datetime.now()
     src_file = FileInfo(src='',
                         dest='',
                         compare_key='domparator_test.py',
                         size=10,
                         last_update=time,
                         src_type='local',
                         dest_type='s3',
                         operation_name='upload',
                         service=None,
                         endpoint=None)
     src_files.append(src_file)
     ref_list.append(src_file)
     files = self.comparator.call(iter(src_files), iter(dest_files))
     for filename in files:
         result_list.append(filename)
     self.assertEqual(result_list, ref_list)
Beispiel #6
0
 def test_download(self):
     # Confirm that the files do not exist.
     for filename in self.loc_files:
         self.assertFalse(os.path.exists(filename))
     # Create file info objects to perform download.
     tasks = []
     time = datetime.datetime.now()
     for i in range(len(self.s3_files)):
         tasks.append(
             FileInfo(src=self.s3_files[i],
                      src_type='s3',
                      dest=self.loc_files[i],
                      dest_type='local',
                      last_update=time,
                      operation='download',
                      size=0))
     # Perform the download.
     self.s3_handler.call(tasks)
     # Confirm that the files now exist.
     for filename in self.loc_files:
         self.assertTrue(os.path.exists(filename))
     # Ensure the contents are as expected.
     with open(self.loc_files[0], 'rb') as filename:
         self.assertEqual(filename.read(), b'This is a test.')
     with open(self.loc_files[1], 'rb') as filename:
         self.assertEqual(filename.read(), b'This is another test.')
Beispiel #7
0
 def test_move(self):
     # Create file info objects to perform move.
     tasks = []
     time = datetime.datetime.now()
     for i in range(len(self.s3_files)):
         tasks.append(
             FileInfo(src=self.s3_files[i],
                      src_type='s3',
                      dest=self.loc_files[i],
                      dest_type='local',
                      last_update=time,
                      operation='move',
                      size=0))
     # Perform the move.
     self.s3_handler.call(tasks)
     # Confirm that the files now exist.
     for filename in self.loc_files:
         self.assertTrue(os.path.exists(filename))
     # Ensure the contents are as expected.
     with open(self.loc_files[0], 'rb') as filename:
         self.assertEqual(filename.read(), b'This is a test.')
     with open(self.loc_files[1], 'rb') as filename:
         self.assertEqual(filename.read(), b'This is another test.')
     # Ensure the objects are no longer in the bucket.
     self.assertEqual(len(list_contents(self.bucket, self.session)), 1)
    def test_s3_file(self):
        """
        Generate a single s3 file
        Note: Size and last update are not tested because s3 generates them.
        """
        input_s3_file = {
            'src': {
                'path': self.file1,
                'type': 's3'
            },
            'dest': {
                'path': 'text1.txt',
                'type': 'local'
            },
            'dir_op': False,
            'use_src_name': False
        }
        params = {'region': 'us-east-1'}
        files = FileGenerator(self.session, '', params).call(input_s3_file)
        result_list = []
        for filename in files:
            result_list.append(filename)
        file_info = FileInfo(src=self.file1,
                             dest='text1.txt',
                             compare_key='text1.txt',
                             size=result_list[0].size,
                             last_update=result_list[0].last_update,
                             src_type='s3',
                             dest_type='local',
                             operation='')

        ref_list = [file_info]
        self.assertEqual(len(result_list), len(ref_list))
        for i in range(len(result_list)):
            compare_files(self, result_list[i], ref_list[i])
Beispiel #9
0
 def test_multi_download(self):
     tasks = []
     time = datetime.datetime.now()
     for i in range(len(self.s3_files)):
         tasks.append(FileInfo(
             src=self.s3_files[i], src_type='s3',
             dest=self.loc_files[i], dest_type='local',
             last_update=time, operation_name='download',
             size=15, client=self.client))
     mock_stream = mock.Mock()
     mock_stream.read.side_effect = [
         b'This ', b'', b'is a ', b'', b'test.', b'',
         b'This ', b'', b'is a ', b'', b'test.', b''
     ]
     self.parsed_responses = [
         {'ETag': '"120ea8a25e5d487bf68b5f7096440019"',
          'Body': mock_stream},
         {'ETag': '"120ea8a25e5d487bf68b5f7096440019"',
          'Body': mock_stream},
         {'ETag': '"120ea8a25e5d487bf68b5f7096440019"',
          'Body': mock_stream},
         {'ETag': '"120ea8a25e5d487bf68b5f7096440019"',
          'Body': mock_stream},
         {'ETag': '"120ea8a25e5d487bf68b5f7096440019"',
          'Body': mock_stream},
         {'ETag': '"120ea8a25e5d487bf68b5f7096440019"',
          'Body': mock_stream}
     ]
     ref_calls = [
         ('GetObject',
          {'Bucket': self.bucket, 'Key': 'text1.txt',
           'Range': 'bytes=0-4'}),
         ('GetObject',
          {'Bucket': self.bucket, 'Key': 'text1.txt',
           'Range': 'bytes=5-9'}),
         ('GetObject',
          {'Bucket': self.bucket, 'Key': 'text1.txt',
           'Range': 'bytes=10-'}),
         ('GetObject',
          {'Bucket': self.bucket, 'Key': 'another_directory/text2.txt',
           'Range': 'bytes=0-4'}),
         ('GetObject',
          {'Bucket': self.bucket, 'Key': 'another_directory/text2.txt',
           'Range': 'bytes=5-9'}),
         ('GetObject',
          {'Bucket': self.bucket, 'Key': 'another_directory/text2.txt',
           'Range': 'bytes=10-'}),
     ]
     # Perform the multipart  download.
     self.assert_operations_for_s3_handler(self.s3_handler_multi, tasks,
                                           ref_calls)
     # Confirm that the files now exist.
     for filename in self.loc_files:
         self.assertTrue(os.path.exists(filename))
     # Ensure the contents are as expected.
     with open(self.loc_files[0], 'rb') as filename:
         self.assertEqual(filename.read(), b'This is a test.')
     with open(self.loc_files[1], 'rb') as filename:
         self.assertEqual(filename.read(), b'This is a test.')
Beispiel #10
0
    def test_bucket(self):
        rand1 = random.randrange(5000)
        rand2 = random.randrange(5000)
        self.bucket = str(rand1) + 'mybucket' + str(rand2) + '/'
        orig_number_buckets = len(list_buckets(self.session))

        file_info = FileInfo(src=self.bucket, operation='make_bucket', size=0)
        self.s3_handler.call([file_info])
        number_buckets = len(list_buckets(self.session))
        self.assertEqual(orig_number_buckets + 1, number_buckets)

        file_info = FileInfo(src=self.bucket,
                             operation='remove_bucket',
                             size=0)
        self.s3_handler.call([file_info])
        number_buckets = len(list_buckets(self.session))
        self.assertEqual(orig_number_buckets, number_buckets)
Beispiel #11
0
 def test_can_submit(self):
     fileinfo = FileInfo(src=self.filename,
                         dest=None,
                         operation_name='delete',
                         src_type='local')
     self.assertTrue(self.transfer_request_submitter.can_submit(fileinfo))
     fileinfo.operation_name = 'foo'
     self.assertFalse(self.transfer_request_submitter.can_submit(fileinfo))
Beispiel #12
0
 def test_multi_upload(self):
     """
     This test only checks that the multipart upload process works.
     It confirms that the parts are properly formatted but does not
     perform any tests past checking the parts are uploaded correctly.
     """
     files = [self.loc_files[0]]
     tasks = []
     for i in range(len(files)):
         tasks.append(
             FileInfo(src=self.loc_files[i],
                      dest=self.s3_files[i],
                      size=15,
                      operation_name='upload',
                      client=self.client))
     self.parsed_responses = [{
         'UploadId': 'foo'
     }, {
         'ETag': '"120ea8a25e5d487bf68b5f7096440019"'
     }, {
         'ETag': '"120ea8a25e5d487bf68b5f7096440019"'
     }, {}]
     ref_calls = [('CreateMultipartUpload', {
         'Bucket': 'mybucket',
         'ContentType': 'text/plain',
         'Key': 'text1.txt',
         'ACL': 'private'
     }),
                  ('UploadPart', {
                      'Body': mock.ANY,
                      'Bucket': 'mybucket',
                      'PartNumber': 1,
                      'UploadId': 'foo',
                      'Key': 'text1.txt'
                  }),
                  ('UploadPart', {
                      'Body': mock.ANY,
                      'Bucket': 'mybucket',
                      'PartNumber': 2,
                      'UploadId': 'foo',
                      'Key': 'text1.txt'
                  }),
                  ('CompleteMultipartUpload', {
                      'MultipartUpload': {
                          'Parts': [{
                              'PartNumber': 1,
                              'ETag': mock.ANY
                          }, {
                              'PartNumber': 2,
                              'ETag': mock.ANY
                          }]
                      },
                      'Bucket': 'mybucket',
                      'UploadId': 'foo',
                      'Key': 'text1.txt'
                  })]
     self.assert_operations_for_s3_handler(self.s3_handler_multi, tasks,
                                           ref_calls)
 def test_local_directory(self):
     """
     Generate an entire local directory.
     """
     input_local_dir = {
         'src': {
             'path': self.local_dir,
             'type': 'local'
         },
         'dest': {
             'path': 'bucket/',
             'type': 's3'
         },
         'dir_op': True,
         'use_src_name': True
     }
     params = {'region': 'us-east-1'}
     files = FileGenerator(self.session, '', params).call(input_local_dir)
     result_list = []
     for filename in files:
         result_list.append(filename)
     size, last_update = get_file_stat(self.local_file)
     file_info = FileInfo(src=self.local_file,
                          dest='bucket/text1.txt',
                          compare_key='text1.txt',
                          size=size,
                          last_update=last_update,
                          src_type='local',
                          dest_type='s3',
                          operation='')
     path = self.local_dir + 'another_directory' + os.sep \
         + 'text2.txt'
     size, last_update = get_file_stat(path)
     file_info2 = FileInfo(src=path,
                           dest='bucket/another_directory/text2.txt',
                           compare_key='another_directory/text2.txt',
                           size=size,
                           last_update=last_update,
                           src_type='local',
                           dest_type='s3',
                           operation='')
     ref_list = [file_info2, file_info]
     self.assertEqual(len(result_list), len(ref_list))
     for i in range(len(result_list)):
         compare_files(self, result_list[i], ref_list[i])
Beispiel #14
0
    def test_s3_directory(self):
        """
        Generates s3 files under a common prefix. Also it ensures that
        zero size files are ignored.
        Note: Size and last update are not tested because s3 generates them.
        """
        input_s3_file = {
            'src': {
                'path': self.bucket + '/',
                'type': 's3'
            },
            'dest': {
                'path': '',
                'type': 'local'
            },
            'dir_op': True,
            'use_src_name': True
        }
        params = {'region': 'us-east-1'}
        files = FileGenerator(self.service, self.endpoint, '',
                              params).call(input_s3_file)
        result_list = []
        for filename in files:
            result_list.append(filename)
        file_info = FileInfo(src=self.file2,
                             dest='another_directory' + os.sep + 'text2.txt',
                             compare_key='another_directory/text2.txt',
                             size=result_list[0].size,
                             last_update=result_list[0].last_update,
                             src_type='s3',
                             dest_type='local',
                             operation_name='')
        file_info2 = FileInfo(src=self.file1,
                              dest='text1.txt',
                              compare_key='text1.txt',
                              size=result_list[1].size,
                              last_update=result_list[1].last_update,
                              src_type='s3',
                              dest_type='local',
                              operation_name='')

        ref_list = [file_info, file_info2]
        self.assertEqual(len(result_list), len(ref_list))
        for i in range(len(result_list)):
            compare_files(self, result_list[i], ref_list[i])
Beispiel #15
0
 def test_list_objects(self):
     """
     Tests the ability to list objects, common prefixes, and buckets.
     If an error occurs the test fails as this is only a printing
     operation
     """
     prefix_name = self.bucket + '/'
     file_info = FileInfo(
         src=prefix_name, operation_name='list_objects', size=0,
         service=self.service, endpoint=self.endpoint)
     params = {'region': 'us-east-1'}
     s3_handler = S3Handler(self.session, params)
     s3_handler.call([file_info])
     file_info = FileInfo(
         src='', operation_name='list_objects', size=0,
         service=self.service, endpoint=self.endpoint)
     s3_handler = S3Handler(self.session, params)
     s3_handler.call([file_info])
Beispiel #16
0
 def test_remove_bucket(self):
     file_info = FileInfo(src=self.bucket,
                          operation_name='remove_bucket',
                          size=0,
                          client=self.client)
     s3_handler = S3Handler(self.session, self.params)
     ref_calls = [('DeleteBucket', {'Bucket': self.bucket})]
     self.assert_operations_for_s3_handler(s3_handler, [file_info],
                                           ref_calls)
Beispiel #17
0
    def test_compare_lastmod_download(self):
        """
        Confirms compare time works for downloads.
        """
        src_files = []
        dest_files = []
        ref_list = []
        result_list = []
        time = datetime.datetime.now()
        future_time = time + datetime.timedelta(0, 3)
        src_file = FileInfo(src='', dest='',
                            compare_key='comparator_test.py', size=10,
                            last_update=time, src_type='s3',
                            dest_type='local', operation='download')
        dest_file = FileInfo(src='', dest='',
                             compare_key='comparator_test.py', size=10,
                             last_update=future_time, src_type='local',
                             dest_type='s3', operation='')
        src_files.append(src_file)
        dest_files.append(dest_file)
        files = self.comparator.call(iter(src_files), iter(dest_files))
        ref_list.append(src_file)
        for filename in files:
            result_list.append(filename)
        self.assertEqual(result_list, ref_list)

        # If the source is newer than the destination do not download.
        src_file = FileInfo(src='', dest='',
                            compare_key='comparator_test.py', size=10,
                            last_update=future_time, src_type='s3',
                            dest_type='local', operation='download')
        dest_file = FileInfo(src='', dest='',
                             compare_key='comparator_test.py', size=10,
                             last_update=time, src_type='local',
                             dest_type='s3', operation='')
        src_files = []
        dest_files = []
        src_files.append(src_file)
        dest_files.append(dest_file)
        files = self.comparator.call(iter(src_files), iter(dest_files))
        result_list = []
        for filename in files:
            result_list.append(filename)
        self.assertEqual(result_list, [])
    def test_s3_delete_directory(self):
        """
        Generates s3 files under a common prefix. Also it ensures that
        the directory itself is included because it is a delete command
        Note: Size and last update are not tested because s3 generates them.
        """
        input_s3_file = {'src': {'path': self.bucket + '/', 'type': 's3'},
                         'dest': {'path': '', 'type': 'local'},
                         'dir_op': True, 'use_src_name': True}
        files = FileGenerator(self.service, self.endpoint,
                              'delete').call(input_s3_file)
        result_list = []
        for filename in files:
            result_list.append(filename)

        file_info1 = FileInfo(src=self.bucket + '/another_directory/',
                              dest='another_directory' + os.sep,
                              compare_key='another_directory/',
                              size=result_list[0].size,
                              last_update=result_list[0].last_update,
                              src_type='s3',
                              dest_type='local', operation_name='delete',
                              service=None, endpoint=None)
        file_info2 = FileInfo(src=self.file2,
                              dest='another_directory' + os.sep + 'text2.txt',
                              compare_key='another_directory/text2.txt',
                              size=result_list[1].size,
                              last_update=result_list[1].last_update,
                              src_type='s3',
                              dest_type='local', operation_name='delete',
                              service=None, endpoint=None)
        file_info3 = FileInfo(src=self.file1,
                              dest='text1.txt',
                              compare_key='text1.txt',
                              size=result_list[2].size,
                              last_update=result_list[2].last_update,
                              src_type='s3',
                              dest_type='local', operation_name='delete',
                              service=None, endpoint=None)

        ref_list = [file_info1, file_info2, file_info3]
        self.assertEqual(len(result_list), len(ref_list))
        for i in range(len(result_list)):
            compare_files(self, result_list[i], ref_list[i])
Beispiel #19
0
 def test_enqueue_range_download_tasks_stream(self):
     s3handler = S3StreamHandler(self.session, self.params, chunksize=100)
     s3handler.executor = mock.Mock()
     fileinfo = FileInfo('filename', operation_name='download',
                         is_stream=True, size=100)
     s3handler._enqueue_range_download_tasks(fileinfo)
     # Ensure that no request was sent to make a file locally.
     submitted_tasks = s3handler.executor.submit.call_args_list
     self.assertNotEqual(type(submitted_tasks[0][0][0]),
                         CreateLocalFileTask)
Beispiel #20
0
 def test_delete_url_encode(self):
     key = self.bucket + '/a+b/foo'
     tasks = [FileInfo(
         src=key, src_type='s3',
         dest_type='local', operation_name='delete', size=0,
         service=self.service, endpoint=self.endpoint,
     )]
     self.assertEqual(len(list_contents(self.bucket, self.session)), 1)
     self.s3_handler.call(tasks)
     self.assertEqual(len(list_contents(self.bucket, self.session)), 0)
    def test_s3_directory(self):
        #
        # Generates s3 files under a common prefix. Also it ensures that
        # zero size files are ignored.
        # Note: Size and last update are not tested because s3 generates them.
        #
        input_s3_file = {
            'src': {
                'path': self.bucket + '/',
                'type': 's3'
            },
            'dest': {
                'path': '',
                'type': 'local'
            },
            'dir_op': True,
            'use_src_name': True
        }
        params = {'region': 'us-east-1'}
        result_list = list(
            FileGenerator(self.service, self.endpoint, '',
                          params).call(input_s3_file))
        file_info = FileInfo(src=self.file2,
                             dest='another_directory' + os.sep + 'text2.txt',
                             compare_key='another_directory/text2.txt',
                             size=21,
                             last_update=result_list[0].last_update,
                             src_type='s3',
                             dest_type='local',
                             operation_name='')
        file_info2 = FileInfo(src=self.file1,
                              dest='text1.txt',
                              compare_key='text1.txt',
                              size=15,
                              last_update=result_list[1].last_update,
                              src_type='s3',
                              dest_type='local',
                              operation_name='')

        expected_result = [file_info, file_info2]
        self.assertEqual(len(result_list), 2)
        compare_files(self, result_list[0], expected_result[0])
        compare_files(self, result_list[1], expected_result[1])
Beispiel #22
0
 def test_multi_download_fail(self):
     """
     This test ensures that a multipart download can handle a
     standard error exception stemming from an operation
     being performed on a nonexistant bucket.  The existing file
     should be downloaded properly but the other will not.
     """
     tasks = []
     wrong_s3_files = [
         self.bucket + '/text1.txt',
         self.bucket[:-1] + '/another_directory/text2.txt'
     ]
     time = datetime.datetime.now()
     for i in range(len(self.s3_files)):
         tasks.append(
             FileInfo(src=wrong_s3_files[i],
                      src_type='s3',
                      dest=self.loc_files[i],
                      dest_type='local',
                      last_update=time,
                      operation_name='download',
                      size=15,
                      client=self.client))
     mock_stream = mock.Mock()
     mock_stream.read.side_effect = [
         b'This ', b'', b'is a ', b'', b'test.', b''
     ]
     self.parsed_responses = [
         {
             'ETag': '"120ea8a25e5d487bf68b5f7096440019"',
             'Body': mock_stream
         },
         {
             'ETag': '"120ea8a25e5d487bf68b5f7096440019"',
             'Body': mock_stream
         },
         {
             'ETag': '"120ea8a25e5d487bf68b5f7096440019"',
             'Body': mock_stream
         },
         # Response with no body will throw an error for the second
         # multipart download.
         {},
         {},
         {}
     ]
     # Perform the multipart  download.
     stdout, stderr, rc = self.run_s3_handler(self.s3_handler_multi, tasks)
     # Confirm that the files now exist.
     self.assertTrue(os.path.exists(self.loc_files[0]))
     # The second file should not exist.
     self.assertFalse(os.path.exists(self.loc_files[1]))
     # Ensure that contents are as expected.
     with open(self.loc_files[0], 'rb') as filename:
         self.assertEqual(filename.read(), b'This is a test.')
Beispiel #23
0
    def test_bucket(self):
        rand1 = random.randrange(5000)
        rand2 = random.randrange(5000)
        self.bucket = str(rand1) + 'mybucket' + str(rand2) + '/'

        file_info = FileInfo(src=self.bucket, operation='make_bucket', size=0)
        self.s3_handler.call([file_info])
        buckets_list = []
        for bucket in list_buckets(self.session):
            buckets_list.append(bucket['Name'])
        self.assertIn(self.bucket[:-1], buckets_list)

        file_info = FileInfo(src=self.bucket,
                             operation='remove_bucket',
                             size=0)
        self.s3_handler.call([file_info])
        buckets_list = []
        for bucket in list_buckets(self.session):
            buckets_list.append(bucket['Name'])
        self.assertNotIn(self.bucket[:-1], buckets_list)
Beispiel #24
0
    def test_submit_with_extra_args(self):
        fileinfo = FileInfo(src=self.source_bucket + '/' + self.source_key,
                            dest=self.bucket + '/' + self.key)
        # Set some extra argument like storage_class to make sure cli
        # params get mapped to request parameters.
        self.cli_params['storage_class'] = 'STANDARD_IA'
        self.transfer_request_submitter.submit(fileinfo)

        copy_call_kwargs = self.transfer_manager.copy.call_args[1]
        self.assertEqual(copy_call_kwargs['extra_args'],
                         {'StorageClass': 'STANDARD_IA'})
Beispiel #25
0
    def test_download_swallows_exceptions(self):
        handler = S3TransferStreamHandler(
            self.session, self.params, manager=self.transfer_manager)
        file = FileInfo('foo-bucket/bar.txt', '-', is_stream=True,
                        operation_name='download')

        self.transfer_future.result.side_effect = Exception()

        response = handler.call([file])
        self.assertEqual(response.num_tasks_failed, 1)
        self.assertEqual(response.num_tasks_warned, 0)
Beispiel #26
0
    def test_submit_when_no_guess_content_mime_type(self):
        fileinfo = FileInfo(src=self.source_bucket + '/' + self.source_key,
                            dest=self.bucket + '/' + self.key)
        self.cli_params['guess_mime_type'] = False
        self.transfer_request_submitter.submit(fileinfo)

        copy_call_kwargs = self.transfer_manager.copy.call_args[1]
        ref_subscribers = [ProvideSizeSubscriber, CopyResultSubscriber]
        actual_subscribers = copy_call_kwargs['subscribers']
        self.assertEqual(len(ref_subscribers), len(actual_subscribers))
        for i, actual_subscriber in enumerate(actual_subscribers):
            self.assertIsInstance(actual_subscriber, ref_subscribers[i])
Beispiel #27
0
 def create_file_info(self, key, associated_response_data=None):
     kwargs = {
         'src': self.bucket + '/' + key,
         'src_type': 's3',
         'dest': self.filename,
         'dest_type': 'local',
         'operation_name': 'download',
         'compare_key': key,
     }
     if associated_response_data is not None:
         kwargs['associated_response_data'] = associated_response_data
     return FileInfo(**kwargs)
Beispiel #28
0
 def test_enqueue_dowload_stream(self):
     self.parameters['is_stream'] = True
     self.s3_transfer_handler.call([
         FileInfo(src='bucket/key',
                  dest='-',
                  compare_key='key',
                  operation_name='download')
     ])
     self.assertEqual(self.transfer_manager.download.call_count, 1)
     download_call_kwargs = self.transfer_manager.download.call_args[1]
     self.assertIsInstance(download_call_kwargs['fileobj'],
                           StdoutBytesWriter)
Beispiel #29
0
 def test_exception_when_enqueuing(self):
     fileinfos = [
         FileInfo(src='filename',
                  dest='bucket/key',
                  operation_name='upload')
     ]
     self.transfer_manager.__exit__.side_effect = Exception(
         'some exception')
     command_result = self.s3_transfer_handler.call(fileinfos)
     # Exception should have been raised casing the command result to
     # have failed results of one.
     self.assertEqual(command_result, (1, 0))
Beispiel #30
0
    def test_enqueue_copies(self):
        fileinfos = []
        num_transfers = 5
        for _ in range(num_transfers):
            fileinfos.append(
                FileInfo(src='sourcebucket/sourcekey',
                         dest='bucket/key',
                         compare_key='key',
                         operation_name='copy'))

        self.s3_transfer_handler.call(fileinfos)
        self.assertEqual(self.transfer_manager.copy.call_count, num_transfers)