def test_upload_with_invalid_upload_id_in_tracker_file(self): """ Tests resumable upload with invalid upload ID """ invalid_upload_id = ('http://pub.storage.googleapis.com/?upload_id=' 'AyzB2Uo74W4EYxyi5dp_-r68jz8rtbvshsv4TX7srJVkJ57CxTY5Dw2') tmpdir = self._MakeTempDir() invalid_upload_id_tracker_file_name = os.path.join(tmpdir, 'invalid_upload_id_tracker') with open(invalid_upload_id_tracker_file_name, 'w') as f: f.write(invalid_upload_id) res_upload_handler = ResumableUploadHandler( tracker_file_name=invalid_upload_id_tracker_file_name) small_src_file_as_string, small_src_file = self.make_small_file() # An error should occur, but then the tracker URI should be # regenerated and the the update should succeed. small_src_file.seek(0) dst_key = self._MakeKey(set_contents=False) dst_key.set_contents_from_file( small_src_file, res_upload_handler=res_upload_handler) self.assertEqual(SMALL_KEY_SIZE, dst_key.size) self.assertEqual(small_src_file_as_string, dst_key.get_contents_as_string()) self.assertNotEqual(invalid_upload_id, res_upload_handler.get_tracker_uri())
def test_upload_with_invalid_upload_id_in_tracker_file(self): """ Tests resumable upload with invalid upload ID """ invalid_upload_id = ( 'http://pub.storage.googleapis.com/?upload_id=' 'AyzB2Uo74W4EYxyi5dp_-r68jz8rtbvshsv4TX7srJVkJ57CxTY5Dw2') tmpdir = self._MakeTempDir() invalid_upload_id_tracker_file_name = os.path.join( tmpdir, 'invalid_upload_id_tracker') with open(invalid_upload_id_tracker_file_name, 'w') as f: f.write(invalid_upload_id) res_upload_handler = ResumableUploadHandler( tracker_file_name=invalid_upload_id_tracker_file_name) small_src_file_as_string, small_src_file = self.make_small_file() # An error should occur, but then the tracker URI should be # regenerated and the the update should succeed. small_src_file.seek(0) dst_key = self._MakeKey(set_contents=False) dst_key.set_contents_from_file(small_src_file, res_upload_handler=res_upload_handler) self.assertEqual(SMALL_KEY_SIZE, dst_key.size) self.assertEqual(small_src_file_as_string, dst_key.get_contents_as_string()) self.assertNotEqual(invalid_upload_id, res_upload_handler.get_tracker_uri())
def test_failed_upload_with_persistent_tracker(self): """ Tests that failed resumable upload leaves a correct tracker URI file """ harnass = CallbackTestHarnass() res_upload_handler = ResumableUploadHandler( tracker_file_name=self.tracker_file_name, num_retries=0) try: self.dst_key.set_contents_from_file( self.small_src_file, cb=harnass.call, res_upload_handler=res_upload_handler) self.fail('Did not get expected ResumableUploadException') except ResumableUploadException, e: # We'll get a ResumableUploadException at this point because # of CallbackTestHarnass (above). Check that the tracker file was # created correctly. self.assertEqual(e.disposition, ResumableTransferDisposition.ABORT_CUR_PROCESS) self.assertTrue(os.path.exists(self.tracker_file_name)) f = open(self.tracker_file_name) uri_from_file = f.readline().strip() f.close() self.assertEqual(uri_from_file, res_upload_handler.get_tracker_uri())
def test_failed_upload_with_persistent_tracker(self): """ Tests that failed resumable upload leaves a correct tracker URI file """ harness = CallbackTestHarness() tracker_file_name = self.make_tracker_file() res_upload_handler = ResumableUploadHandler( tracker_file_name=tracker_file_name, num_retries=0) small_src_file_as_string, small_src_file = self.make_small_file() small_src_file.seek(0) dst_key = self._MakeKey(set_contents=False) try: dst_key.set_contents_from_file( small_src_file, cb=harness.call, res_upload_handler=res_upload_handler) self.fail('Did not get expected ResumableUploadException') except ResumableUploadException, e: # We'll get a ResumableUploadException at this point because # of CallbackTestHarness (above). Check that the tracker file was # created correctly. self.assertEqual(e.disposition, ResumableTransferDisposition.ABORT_CUR_PROCESS) self.assertTrue(os.path.exists(tracker_file_name)) f = open(tracker_file_name) uri_from_file = f.readline().strip() f.close() self.assertEqual(uri_from_file, res_upload_handler.get_tracker_uri())
def test_upload_with_invalid_upload_id_in_tracker_file(self): """ Tests resumable upload with invalid upload ID """ res_upload_handler = ResumableUploadHandler( tracker_file_name=self.invalid_upload_id_tracker_file_name) # An error should occur, but then the tracker URI should be # regenerated and the the update should succeed. self.dst_key.set_contents_from_file( self.small_src_file, res_upload_handler=res_upload_handler) self.assertEqual(self.small_src_file_size, self.dst_key.size) self.assertEqual(self.small_src_file_as_string, self.dst_key.get_contents_as_string()) self.assertNotEqual(self.invalid_upload_id, res_upload_handler.get_tracker_uri())
def test_upload_with_invalid_upload_id_in_tracker_file(self): """ Tests resumable upload with invalid upload ID """ res_upload_handler = ResumableUploadHandler( tracker_file_name=self.invalid_upload_id_tracker_file_name) # An error should occur, but then the tracker URI should be # regenerated and the the update should succeed. self.dst_key.set_contents_from_file( self.small_src_file, res_upload_handler=res_upload_handler) self.assertEqual(self.small_src_file_size, self.dst_key.size) self.assertEqual(self.small_src_file_as_string, self.dst_key.get_contents_as_string()) self.assertNotEqual(self.invalid_upload_id, res_upload_handler.get_tracker_uri())
def Execute(): res_upload_handler = ResumableUploadHandler(num_retries=1) dst_key = self._MakeKey(set_contents=False) bucket_uri = storage_uri('gs://' + dst_key.bucket.name) dst_key_uri = bucket_uri.clone_replace_name(dst_key.name) try: dst_key.set_contents_from_file( test_file, cb=harness.call, res_upload_handler=res_upload_handler) return False except ResumableUploadException as e: self.assertEqual(e.disposition, ResumableTransferDisposition.ABORT) # Ensure the file size didn't change. test_file.seek(0, os.SEEK_END) self.assertEqual(test_file_size, test_file.tell()) self.assertNotEqual( e.message.find('md5 signature doesn\'t match etag'), -1) # Ensure the bad data wasn't left around. try: dst_key_uri.get_key() self.fail('Did not get expected InvalidUriError') except InvalidUriError as e: pass return True
def test_upload_with_file_content_change_during_upload(self): """ Tests resumable upload on a file that changes one byte of content (so, size stays the same) while upload in progress """ test_file_size = 500 * 1024 # 500 KB. test_file = self.build_input_file(test_file_size)[1] harness = CallbackTestHarness(fail_after_n_bytes=test_file_size/2, fp_to_change=test_file, # Write to byte 1, as the CallbackTestHarness writes # 3 bytes. This will result in the data on the server # being different than the local file. fp_change_pos=1) res_upload_handler = ResumableUploadHandler(num_retries=1) dst_key = self._MakeKey(set_contents=False) bucket_uri = storage_uri('gs://' + dst_key.bucket.name) dst_key_uri = bucket_uri.clone_replace_name(dst_key.name) try: dst_key.set_contents_from_file( test_file, cb=harness.call, res_upload_handler=res_upload_handler) self.fail('Did not get expected ResumableUploadException') except ResumableUploadException, e: self.assertEqual(e.disposition, ResumableTransferDisposition.ABORT) # Ensure the file size didn't change. test_file.seek(0, os.SEEK_END) self.assertEqual(test_file_size, test_file.tell()) self.assertNotEqual( e.message.find('md5 signature doesn\'t match etag'), -1) # Ensure the bad data wasn't left around. try: dst_key_uri.get_key() self.fail('Did not get expected InvalidUriError') except InvalidUriError, e: pass
def test_upload_with_file_content_change_during_upload(self): """ Tests resumable upload on a file that changes one byte of content (so, size stays the same) while upload in progress """ test_file_size = 500 * 1024 # 500 KB. test_file = self.build_test_input_file(test_file_size)[1] harnass = CallbackTestHarnass( fail_after_n_bytes=test_file_size / 2, fp_to_change=test_file, # Writing at file_size-5 won't change file # size because CallbackTestHarnass only # writes 3 bytes. fp_change_pos=test_file_size - 5) res_upload_handler = ResumableUploadHandler(num_retries=1) try: self.dst_key.set_contents_from_file( test_file, cb=harnass.call, res_upload_handler=res_upload_handler) self.fail('Did not get expected ResumableUploadException') except ResumableUploadException, e: self.assertEqual(e.disposition, ResumableTransferDisposition.ABORT) # Ensure the file size didn't change. test_file.seek(0, os.SEEK_END) self.assertEqual(test_file_size, test_file.tell()) self.assertNotEqual( e.message.find('md5 signature doesn\'t match etag'), -1) # Ensure the bad data wasn't left around. try: self.dst_key_uri.get_key() self.fail('Did not get expected InvalidUriError') except InvalidUriError, e: pass
def test_upload_retains_metadata(self): """ Tests that resumable upload correctly sets passed metadata """ res_upload_handler = ResumableUploadHandler() headers = { 'Content-Type': 'text/plain', 'Content-Encoding': 'gzip', 'x-goog-meta-abc': 'my meta', 'x-goog-acl': 'public-read' } self.dst_key.set_contents_from_file( self.small_src_file, headers=headers, res_upload_handler=res_upload_handler) self.assertEqual(self.small_src_file_size, self.dst_key.size) self.assertEqual(self.small_src_file_as_string, self.dst_key.get_contents_as_string()) self.dst_key.open_read() self.assertEqual('text/plain', self.dst_key.content_type) self.assertEqual('gzip', self.dst_key.content_encoding) self.assertTrue('abc' in self.dst_key.metadata) self.assertEqual('my meta', str(self.dst_key.metadata['abc'])) acl = self.dst_key.get_acl() for entry in acl.entries.entry_list: if str(entry.scope) == '<AllUsers>': self.assertEqual('READ', str(acl.entries.entry_list[1].permission)) return self.fail('No <AllUsers> scope found')
def test_upload_with_file_size_change_between_starts(self): """ Tests resumable upload on a file that changes sizes between inital upload start and restart """ harnass = CallbackTestHarnass( fail_after_n_bytes=self.larger_src_file_size / 2) # Set up first process' ResumableUploadHandler not to do any # retries (initial upload request will establish expected size to # upload server). res_upload_handler = ResumableUploadHandler( tracker_file_name=self.tracker_file_name, num_retries=0) try: self.dst_key.set_contents_from_file( self.larger_src_file, cb=harnass.call, res_upload_handler=res_upload_handler) self.fail('Did not get expected ResumableUploadException') except ResumableUploadException, e: # First abort (from harnass-forced failure) should be # ABORT_CUR_PROCESS. self.assertEqual(e.disposition, ResumableTransferDisposition.ABORT_CUR_PROCESS) # Ensure a tracker file survived. self.assertTrue(os.path.exists(self.tracker_file_name))
def test_multiple_in_process_failures_then_succeed_with_tracker_file(self): """ Tests resumable upload that fails completely in one process, then when restarted completes, using a tracker file """ # Set up test harnass that causes more failures than a single # ResumableUploadHandler instance will handle, writing enough data # before the first failure that some of it survives that process run. harnass = CallbackTestHarnass( fail_after_n_bytes=self.larger_src_file_size / 2, num_times_to_fail=2) res_upload_handler = ResumableUploadHandler( tracker_file_name=self.tracker_file_name, num_retries=1) self.larger_src_file.seek(0) try: self.dst_key.set_contents_from_file( self.larger_src_file, cb=harnass.call, res_upload_handler=res_upload_handler) self.fail('Did not get expected ResumableUploadException') except ResumableUploadException, e: self.assertEqual(e.disposition, ResumableTransferDisposition.ABORT_CUR_PROCESS) # Ensure a tracker file survived. self.assertTrue(os.path.exists(self.tracker_file_name))
def test_empty_file_upload(self): """ Tests uploading an empty file (exercises boundary conditions). """ res_upload_handler = ResumableUploadHandler() self.dst_key.set_contents_from_file( self.empty_src_file, res_upload_handler=res_upload_handler) self.assertEqual(0, self.dst_key.size)
def test_upload_without_persistent_tracker(self): """ Tests a single resumable upload, with no tracker URI persistence """ res_upload_handler = ResumableUploadHandler() self.dst_key.set_contents_from_file( self.small_src_file, res_upload_handler=res_upload_handler) self.assertEqual(self.small_src_file_size, self.dst_key.size) self.assertEqual(self.small_src_file_as_string, self.dst_key.get_contents_as_string())
def test_multiple_in_process_failures_then_succeed(self): """ Tests resumable upload that fails twice in one process, then completes """ res_upload_handler = ResumableUploadHandler(num_retries=3) self.dst_key.set_contents_from_file( self.small_src_file, res_upload_handler=res_upload_handler) # Ensure uploaded object has correct content. self.assertEqual(self.small_src_file_size, self.dst_key.size) self.assertEqual(self.small_src_file_as_string, self.dst_key.get_contents_as_string())
def test_empty_file_upload(self): """ Tests uploading an empty file (exercises boundary conditions). """ res_upload_handler = ResumableUploadHandler() empty_src_file = StringIO.StringIO('') empty_src_file.seek(0) dst_key = self._MakeKey(set_contents=False) dst_key.set_contents_from_file(empty_src_file, res_upload_handler=res_upload_handler) self.assertEqual(0, dst_key.size)
def test_upload_without_persistent_tracker(self): """ Tests a single resumable upload, with no tracker URI persistence """ res_upload_handler = ResumableUploadHandler() small_src_file_as_string, small_src_file = self.make_small_file() small_src_file.seek(0) dst_key = self._MakeKey(set_contents=False) dst_key.set_contents_from_file(small_src_file, res_upload_handler=res_upload_handler) self.assertEqual(SMALL_KEY_SIZE, dst_key.size) self.assertEqual(small_src_file_as_string, dst_key.get_contents_as_string())
def test_upload_with_syntactically_invalid_tracker_uri(self): """ Tests resumable upload with a syntactically invalid tracker URI """ res_upload_handler = ResumableUploadHandler( tracker_file_name=self.syntactically_invalid_tracker_file_name) # An error should be printed about the invalid URI, but then it # should run the update successfully. self.dst_key.set_contents_from_file( self.small_src_file, res_upload_handler=res_upload_handler) self.assertEqual(self.small_src_file_size, self.dst_key.size) self.assertEqual(self.small_src_file_as_string, self.dst_key.get_contents_as_string())
def test_multiple_in_process_failures_then_succeed(self): """ Tests resumable upload that fails twice in one process, then completes """ res_upload_handler = ResumableUploadHandler(num_retries=3) small_src_file_as_string, small_src_file = self.make_small_file() small_src_file.seek(0) dst_key = self._MakeKey(set_contents=False) dst_key.set_contents_from_file(small_src_file, res_upload_handler=res_upload_handler) # Ensure uploaded object has correct content. self.assertEqual(SMALL_KEY_SIZE, dst_key.size) self.assertEqual(small_src_file_as_string, dst_key.get_contents_as_string())
def test_upload_with_unwritable_tracker_file(self): """ Tests resumable upload with an unwritable tracker file """ # Make dir where tracker_file lives temporarily unwritable. save_mod = os.stat(self.tmp_dir).st_mode try: os.chmod(self.tmp_dir, 0) res_upload_handler = ResumableUploadHandler( tracker_file_name=self.tracker_file_name) except ResumableUploadException, e: self.assertEqual(e.disposition, ResumableTransferDisposition.ABORT) self.assertNotEqual( e.message.find('Couldn\'t write URI tracker file'), -1)
def test_broken_pipe_recovery(self): """ Tests handling of a Broken Pipe (which interacts with an httplib bug) """ exception = IOError(errno.EPIPE, "Broken pipe") harnass = CallbackTestHarnass(exception=exception) res_upload_handler = ResumableUploadHandler(num_retries=1) self.dst_key.set_contents_from_file( self.small_src_file, cb=harnass.call, res_upload_handler=res_upload_handler) # Ensure uploaded object has correct content. self.assertEqual(self.small_src_file_size, self.dst_key.size) self.assertEqual(self.small_src_file_as_string, self.dst_key.get_contents_as_string())
def test_non_retryable_exception_handling(self): """ Tests a resumable upload that fails with a non-retryable exception """ harnass = CallbackTestHarnass( exception=OSError(errno.EACCES, 'Permission denied')) res_upload_handler = ResumableUploadHandler(num_retries=1) try: self.dst_key.set_contents_from_file( self.small_src_file, cb=harnass.call, res_upload_handler=res_upload_handler) self.fail('Did not get expected OSError') except OSError, e: # Ensure the error was re-raised. self.assertEqual(e.errno, 13)
def test_retryable_exception_recovery(self): """ Tests handling of a retryable exception """ # Test one of the RETRYABLE_EXCEPTIONS. exception = ResumableUploadHandler.RETRYABLE_EXCEPTIONS[0] harnass = CallbackTestHarnass(exception=exception) res_upload_handler = ResumableUploadHandler(num_retries=1) self.dst_key.set_contents_from_file( self.small_src_file, cb=harnass.call, res_upload_handler=res_upload_handler) # Ensure uploaded object has correct content. self.assertEqual(self.small_src_file_size, self.dst_key.size) self.assertEqual(self.small_src_file_as_string, self.dst_key.get_contents_as_string())
def _GetTransferHandlers(self, uri, key, file_size, upload): """ Selects upload/download and callback handlers. We use a callback handler that shows a simple textual progress indicator if file_size is above the configurable threshold. We use a resumable transfer handler if file_size is >= the configurable threshold and resumable transfers are supported by the given provider. boto supports resumable downloads for all providers, but resumable uploads are currently only supported by GS. """ config = boto.config resumable_threshold = config.getint('GSUtil', 'resumable_threshold', ONE_MB) if file_size >= resumable_threshold: cb = self._FileCopyCallbackHandler(upload).call num_cb = int(file_size / ONE_MB) resumable_tracker_dir = config.get( 'GSUtil', 'resumable_tracker_dir', os.path.expanduser('~' + os.sep + '.gsutil')) if not os.path.exists(resumable_tracker_dir): os.makedirs(resumable_tracker_dir) if upload: # Encode the src bucket and key into the tracker file name. res_tracker_file_name = (re.sub( '[/\\\\]', '_', 'resumable_upload__%s__%s.url' % (key.bucket.name, key.name))) else: # Encode the fully-qualified src file name into the tracker file name. res_tracker_file_name = (re.sub( '[/\\\\]', '_', 'resumable_download__%s.etag' % (os.path.realpath(uri.object_name)))) tracker_file = '%s%s%s' % (resumable_tracker_dir, os.sep, res_tracker_file_name) if upload: if uri.scheme == 'gs': transfer_handler = ResumableUploadHandler(tracker_file) else: transfer_handler = None else: transfer_handler = ResumableDownloadHandler(tracker_file) else: transfer_handler = None cb = None num_cb = None return (cb, num_cb, transfer_handler)
def test_upload_with_content_length_header_set(self): """ Tests resumable upload on a file when the user supplies a Content-Length header. This is used by gsutil, for example, to set the content length when gzipping a file. """ res_upload_handler = ResumableUploadHandler() try: self.dst_key.set_contents_from_file( self.small_src_file, res_upload_handler=res_upload_handler, headers={'Content-Length': self.small_src_file_size}) self.fail('Did not get expected ResumableUploadException') except ResumableUploadException, e: self.assertEqual(e.disposition, ResumableTransferDisposition.ABORT) self.assertNotEqual( e.message.find('Attempt to specify Content-Length header'), -1)
def test_upload_with_file_size_change_between_starts(self): """ Tests resumable upload on a file that changes sizes between initial upload start and restart """ harness = CallbackTestHarness(fail_after_n_bytes=LARGE_KEY_SIZE / 2) tracker_file_name = self.make_tracker_file() # Set up first process' ResumableUploadHandler not to do any # retries (initial upload request will establish expected size to # upload server). res_upload_handler = ResumableUploadHandler( tracker_file_name=tracker_file_name, num_retries=0) larger_src_file_as_string, larger_src_file = self.make_large_file() larger_src_file.seek(0) dst_key = self._MakeKey(set_contents=False) try: dst_key.set_contents_from_file( larger_src_file, cb=harness.call, res_upload_handler=res_upload_handler) self.fail('Did not get expected ResumableUploadException') except ResumableUploadException as e: # First abort (from harness-forced failure) should be # ABORT_CUR_PROCESS. self.assertEqual(e.disposition, ResumableTransferDisposition.ABORT_CUR_PROCESS) # Ensure a tracker file survived. self.assertTrue(os.path.exists(tracker_file_name)) # Try it again, this time with different size source file. # Wait 1 second between retry attempts, to give upload server a # chance to save state so it can respond to changed file size with # 500 response in the next attempt. time.sleep(1) try: largest_src_file = self.build_input_file(LARGEST_KEY_SIZE)[1] largest_src_file.seek(0) dst_key.set_contents_from_file( largest_src_file, res_upload_handler=res_upload_handler) self.fail('Did not get expected ResumableUploadException') except ResumableUploadException as e: # This abort should be a hard abort (file size changing during # transfer). self.assertEqual(e.disposition, ResumableTransferDisposition.ABORT) self.assertNotEqual(e.message.find('file size changed'), -1, e.message)
def test_retryable_exception_recovery(self): """ Tests handling of a retryable exception """ # Test one of the RETRYABLE_EXCEPTIONS. exception = ResumableUploadHandler.RETRYABLE_EXCEPTIONS[0] harness = CallbackTestHarness(exception=exception) res_upload_handler = ResumableUploadHandler(num_retries=1) small_src_file_as_string, small_src_file = self.make_small_file() small_src_file.seek(0) dst_key = self._MakeKey(set_contents=False) dst_key.set_contents_from_file(small_src_file, cb=harness.call, res_upload_handler=res_upload_handler) # Ensure uploaded object has correct content. self.assertEqual(SMALL_KEY_SIZE, dst_key.size) self.assertEqual(small_src_file_as_string, dst_key.get_contents_as_string())
def test_failed_and_restarted_upload_with_persistent_tracker(self): """ Tests resumable upload that fails once and then completes, with tracker file """ harnass = CallbackTestHarnass() res_upload_handler = ResumableUploadHandler( tracker_file_name=self.tracker_file_name, num_retries=1) self.dst_key.set_contents_from_file( self.small_src_file, cb=harnass.call, res_upload_handler=res_upload_handler) # Ensure uploaded object has correct content. self.assertEqual(self.small_src_file_size, self.dst_key.size) self.assertEqual(self.small_src_file_as_string, self.dst_key.get_contents_as_string()) # Ensure tracker file deleted. self.assertFalse(os.path.exists(self.tracker_file_name))
def test_upload_with_unwritable_tracker_file(self): """ Tests resumable upload with an unwritable tracker file """ # Make dir where tracker_file lives temporarily unwritable. tmp_dir = self._MakeTempDir() tracker_file_name = self.make_tracker_file(tmp_dir) save_mod = os.stat(tmp_dir).st_mode try: os.chmod(tmp_dir, 0) res_upload_handler = ResumableUploadHandler( tracker_file_name=tracker_file_name) except ResumableUploadException as e: self.assertEqual(e.disposition, ResumableTransferDisposition.ABORT) self.assertNotEqual( e.message.find('Couldn\'t write URI tracker file'), -1) finally: # Restore original protection of dir where tracker_file lives. os.chmod(tmp_dir, save_mod)
def put_file(self, obj_path, file_path, tracker_path): filename = os.path.basename(file_path) file_size = os.path.getsize(file_path) with make_progress_bar(filename, file_size) as pbar: def callback(total_xfer, total_size): pbar.update(total_xfer) key = self.bucket.new_key(obj_path) if self.__uri.scheme == 'gs': handler = ResumableUploadHandler(tracker_path) key.set_contents_from_filename(file_path, cb=callback, num_cb=NUM_CB, res_upload_handler=handler) else: key.set_contents_from_filename(file_path, cb=callback, num_cb=NUM_CB)
def test_upload_with_file_size_change_during_upload(self): """ Tests resumable upload on a file that changes sizes while upload in progress """ # Create a file we can change during the upload. test_file_size = 500 * 1024 # 500 KB. test_file = self.build_test_input_file(test_file_size)[1] harnass = CallbackTestHarnass(fp_to_change=test_file, fp_change_pos=test_file_size) res_upload_handler = ResumableUploadHandler(num_retries=1) try: self.dst_key.set_contents_from_file( test_file, cb=harnass.call, res_upload_handler=res_upload_handler) self.fail('Did not get expected ResumableUploadException') except ResumableUploadException, e: self.assertEqual(e.disposition, ResumableTransferDisposition.ABORT) self.assertNotEqual( e.message.find('File changed during upload'), -1)
def test_upload_with_syntactically_invalid_tracker_uri(self): """ Tests resumable upload with a syntactically invalid tracker URI """ tmp_dir = self._MakeTempDir() syntactically_invalid_tracker_file_name = os.path.join( tmp_dir, 'synt_invalid_uri_tracker') with open(syntactically_invalid_tracker_file_name, 'w') as f: f.write('ftp://example.com') res_upload_handler = ResumableUploadHandler( tracker_file_name=syntactically_invalid_tracker_file_name) small_src_file_as_string, small_src_file = self.make_small_file() # An error should be printed about the invalid URI, but then it # should run the update successfully. small_src_file.seek(0) dst_key = self._MakeKey(set_contents=False) dst_key.set_contents_from_file(small_src_file, res_upload_handler=res_upload_handler) self.assertEqual(SMALL_KEY_SIZE, dst_key.size) self.assertEqual(small_src_file_as_string, dst_key.get_contents_as_string())
def test_multiple_in_process_failures_then_succeed_with_tracker_file(self): """ Tests resumable upload that fails completely in one process, then when restarted completes, using a tracker file """ # Set up test harness that causes more failures than a single # ResumableUploadHandler instance will handle, writing enough data # before the first failure that some of it survives that process run. harness = CallbackTestHarness(fail_after_n_bytes=LARGE_KEY_SIZE / 2, num_times_to_fail=2) tracker_file_name = self.make_tracker_file() res_upload_handler = ResumableUploadHandler( tracker_file_name=tracker_file_name, num_retries=1) larger_src_file_as_string, larger_src_file = self.make_large_file() larger_src_file.seek(0) dst_key = self._MakeKey(set_contents=False) try: dst_key.set_contents_from_file( larger_src_file, cb=harness.call, res_upload_handler=res_upload_handler) self.fail('Did not get expected ResumableUploadException') except ResumableUploadException as e: self.assertEqual(e.disposition, ResumableTransferDisposition.ABORT_CUR_PROCESS) # Ensure a tracker file survived. self.assertTrue(os.path.exists(tracker_file_name)) # Try it one more time; this time should succeed. larger_src_file.seek(0) dst_key.set_contents_from_file(larger_src_file, cb=harness.call, res_upload_handler=res_upload_handler) self.assertEqual(LARGE_KEY_SIZE, dst_key.size) self.assertEqual(larger_src_file_as_string, dst_key.get_contents_as_string()) self.assertFalse(os.path.exists(tracker_file_name)) # Ensure some of the file was uploaded both before and after failure. self.assertTrue( len(harness.transferred_seq_before_first_failure) > 1 and len(harness.transferred_seq_after_first_failure) > 1)