def _should_contain_zip_content(value): if not isinstance(value, bytes): # If it's not bytes it's basically impossible for # this to be valid zip content, but we'll at least # still try to load the contents as a zip file # to be absolutely sure. value = value.encode('utf-8') fileobj = six.BytesIO(value) try: with closing(zipfile.ZipFile(fileobj)) as f: f.infolist() except zipfile.BadZipfile: raise ValueError(ERROR_MSG)
def test_multipart_download_uses_correct_client_calls(self): client = mock.Mock() response_body = b'foobarbaz' client.get_object.return_value = {'Body': six.BytesIO(response_body)} downloader = MultipartDownloader(client, TransferConfig(), InMemoryOSLayer({}), SequentialExecutor) downloader.download_file('bucket', 'key', 'filename', len(response_body), {}) client.get_object.assert_called_with(Range='bytes=0-', Bucket='bucket', Key='key')
def test_get_object_stream_is_retried_and_succeeds(self): below_threshold = 20 osutil = InMemoryOSLayer({'smallfile': b'hello world'}) transfer = S3Transfer(self.client, osutil=osutil) self.client.head_object.return_value = { 'ContentLength': below_threshold} self.client.get_object.side_effect = [ # First request fails. socket.error("fake error"), # Second succeeds. {'Body': six.BytesIO(b'foobar')} ] transfer.download_file('bucket', 'key', '/tmp/smallfile') self.assertEqual(self.client.get_object.call_count, 2)
def test_io_thread_fails_to_open_triggers_shutdown_error(self): client = mock.Mock() client.get_object.return_value = { 'Body': six.BytesIO(b'asdf') } os_layer = mock.Mock(spec=OSUtils) os_layer.open.side_effect = IOError("Can't open file") downloader = MultipartDownloader( client, TransferConfig(), os_layer, SequentialExecutor) # We're verifying that the exception raised from the IO future # propogates back up via download_file(). with self.assertRaisesRegexp(IOError, "Can't open file"): downloader.download_file('bucket', 'key', 'filename', len(b'asdf'), {})
def test_multipart_download_with_multiple_parts_and_extra_args(self): client = Session().create_client('s3') stubber = Stubber(client) response_body = b'foobarbaz' response = {'Body': six.BytesIO(response_body)} expected_params = { 'Range': mock.ANY, 'Bucket': mock.ANY, 'Key': mock.ANY, 'RequestPayer': 'requester'} stubber.add_response('get_object', response, expected_params) stubber.activate() downloader = MultipartDownloader( client, TransferConfig(), InMemoryOSLayer({}), SequentialExecutor) downloader.download_file( 'bucket', 'key', 'filename', len(response_body), {'RequestPayer': 'requester'}) stubber.assert_no_pending_responses()
def test_io_thread_failure_triggers_shutdown(self): client = mock.Mock() response_body = b'foobarbaz' client.get_object.return_value = {'Body': six.BytesIO(response_body)} os_layer = mock.Mock() mock_fileobj = mock.MagicMock() mock_fileobj.__enter__.return_value = mock_fileobj mock_fileobj.write.side_effect = Exception("fake IO error") os_layer.open.return_value = mock_fileobj downloader = MultipartDownloader(client, TransferConfig(), os_layer, SequentialExecutor) # We're verifying that the exception raised from the IO future # propogates back up via download_file(). with self.assertRaisesRegexp(Exception, "fake IO error"): downloader.download_file('bucket', 'key', 'filename', len(response_body), {})
def test_download_futures_fail_triggers_shutdown(self): class FailedDownloadParts(SequentialExecutor): def __init__(self, max_workers): self.is_first = True def submit(self, function): future = super(FailedDownloadParts, self).submit(function) if self.is_first: # This is the download_parts_thread. future.set_exception( Exception("fake download parts error")) self.is_first = False return future client = mock.Mock() response_body = b'foobarbaz' client.get_object.return_value = {'Body': six.BytesIO(response_body)} downloader = MultipartDownloader(client, TransferConfig(), InMemoryOSLayer({}), FailedDownloadParts) with self.assertRaisesRegexp(Exception, "fake download parts error"): downloader.download_file('bucket', 'key', 'filename', len(response_body), {})
def test_download_file_fowards_extra_args(self): extra_args = { 'SSECustomerKey': 'foo', 'SSECustomerAlgorithm': 'AES256', } below_threshold = 20 osutil = InMemoryOSLayer({'smallfile': b'hello world'}) transfer = S3Transfer(self.client, osutil=osutil) self.client.head_object.return_value = { 'ContentLength': below_threshold} self.client.get_object.return_value = { 'Body': six.BytesIO(b'foobar') } transfer.download_file('bucket', 'key', '/tmp/smallfile', extra_args=extra_args) # Note that we need to invoke the HeadObject call # and the PutObject call with the extra_args. # This is necessary. Trying to HeadObject an SSE object # will return a 400 if you don't provide the required # params. self.client.get_object.assert_called_with( Bucket='bucket', Key='key', SSECustomerAlgorithm='AES256', SSECustomerKey='foo')
def open_file_chunk_reader(self, filename, start_byte, size, callback): return closing(six.BytesIO(self.filemap[filename]))