def test_round_trip(): fhid = None filepath = utils.make_bogus_binary_file(multipart_upload_module.MIN_PART_SIZE + 777771) print('Made bogus file: ', filepath) try: fhid = multipart_upload(syn, filepath) print('FileHandle: {fhid}'.format(fhid=fhid)) # Download the file and compare it with the original junk = File(filepath, parent=project, dataFileHandleId=fhid) junk.properties.update(syn._createEntity(junk.properties)) (tmp_f, tmp_path) = tempfile.mkstemp() schedule_for_cleanup(tmp_path) junk.update(syn._downloadFileEntity(junk, tmp_path)) assert filecmp.cmp(filepath, junk.path) finally: try: if 'junk' in locals(): syn.delete(junk) except Exception: print(traceback.format_exc()) try: os.remove(filepath) except Exception: print(traceback.format_exc())
def test_round_trip(): fhid = None filepath = utils.make_bogus_binary_file(multipart_upload_module.MIN_PART_SIZE + 777771) try: fhid = multipart_upload(syn, filepath) # Download the file and compare it with the original junk = File(parent=project, dataFileHandleId=fhid) junk.properties.update(syn._createEntity(junk.properties)) (tmp_f, tmp_path) = tempfile.mkstemp() schedule_for_cleanup(tmp_path) junk['path'] = syn._downloadFileHandle(fhid, junk['id'], 'FileEntity', tmp_path) assert filecmp.cmp(filepath, junk.path) finally: try: if 'junk' in locals(): syn.delete(junk) except Exception: print(traceback.format_exc()) try: os.remove(filepath) except Exception: print(traceback.format_exc())
def test_single_thread_upload(): synapseclient.config.single_threaded = True try: filepath = utils.make_bogus_binary_file(multipart_upload_module.MIN_PART_SIZE * 2 + 1) assert_is_not_none(multipart_upload(syn, filepath)) finally: synapseclient.config.single_threaded = False
def test_single_thread_upload(): synapseclient.config.single_threaded = True try: filepath = utils.make_bogus_binary_file( multipart_upload_module.MIN_PART_SIZE * 2 + 1) assert_is_not_none(multipart_upload(syn, filepath)) finally: synapseclient.config.single_threaded = False
def test_upload_chunk__expired_url(): upload_parts = [{ 'uploadPresignedUrl': 'https://www.fake.url/fake/news', 'partNumber': 420 }, { 'uploadPresignedUrl': 'https://www.google.com', 'partNumber': 421 }, { 'uploadPresignedUrl': 'https://rito.pls/', 'partNumber': 422 }, { 'uploadPresignedUrl': 'https://never.lucky.gg', 'partNumber': 423 }] with patch.object(multipart_upload, "_put_chunk", side_effect=SynapseHTTPError("useless message",response=MagicMock(status_code=403))) as mocked_put_chunk, \ patch.object(warnings, "warn") as mocked_warn, \ patch.object(multipart_upload, '_start_multipart_upload', return_value=DictObject({'partsState': '0', 'uploadId': '1', 'state': 'COMPLETED', 'resultFileHandleId': '1'})), \ patch.object(multipart_upload, "_get_presigned_urls", return_value=upload_parts): file_size = 1 * MB filepath = make_bogus_binary_file(n=file_size) try: multipart_upload.multipart_upload(syn, filepath) finally: if os.path.isfile(filepath): os.remove(filepath) mocked_warn.assert_called_with( 'The pre-signed upload URL has expired. Restarting upload...\n') # 4 URLs, 7 retries. assert mocked_warn.call_count == 28 # assert _put_chunk was called at least once assert_greater_equal(len(mocked_put_chunk.call_args_list), 1)
def test_randomly_failing_parts(): FAILURE_RATE = 1.0 / 3.0 fhid = None multipart_upload_module.MIN_PART_SIZE = 5 * MB multipart_upload_module.MAX_RETRIES = 20 filepath = utils.make_bogus_binary_file( multipart_upload_module.MIN_PART_SIZE * 2 + 777771) print('Made bogus file: ', filepath) normal_put_chunk = None def _put_chunk_or_fail_randomly(url, chunk, verbose=False): if random.random() < FAILURE_RATE: raise IOError("Ooops! Artificial upload failure for testing.") else: return normal_put_chunk(url, chunk, verbose) ## Mock _put_chunk to fail randomly normal_put_chunk = multipart_upload_module._put_chunk multipart_upload_module._put_chunk = _put_chunk_or_fail_randomly try: fhid = multipart_upload(syn, filepath) print('FileHandle: {fhid}'.format(fhid=fhid)) # Download the file and compare it with the original junk = File(parent=project, dataFileHandleId=fhid) junk.properties.update(syn._createEntity(junk.properties)) (tmp_f, tmp_path) = tempfile.mkstemp() schedule_for_cleanup(tmp_path) junk['path'] = syn._downloadFileHandle(fhid, junk['id'], 'FileEntity', tmp_path) assert filecmp.cmp(filepath, junk.path) finally: ## Un-mock _put_chunk if normal_put_chunk: multipart_upload_module._put_chunk = normal_put_chunk try: if 'junk' in locals(): syn.delete(junk) except Exception: print(traceback.format_exc()) try: os.remove(filepath) except Exception: print(traceback.format_exc())
def test_randomly_failing_parts(): FAILURE_RATE = 1.0/3.0 fhid = None multipart_upload_module.MIN_PART_SIZE = 5*MB multipart_upload_module.MAX_RETRIES = 20 filepath = utils.make_bogus_binary_file(multipart_upload_module.MIN_PART_SIZE*2 + 777771) print('Made bogus file: ', filepath) normal_put_chunk = None def _put_chunk_or_fail_randomly(url, chunk, verbose=False): if random.random() < FAILURE_RATE: raise IOError("Ooops! Artificial upload failure for testing.") else: return normal_put_chunk(url, chunk, verbose) ## Mock _put_chunk to fail randomly normal_put_chunk = multipart_upload_module._put_chunk multipart_upload_module._put_chunk = _put_chunk_or_fail_randomly try: fhid = multipart_upload(syn, filepath) print('FileHandle: {fhid}'.format(fhid=fhid)) # Download the file and compare it with the original junk = File(filepath, parent=project, dataFileHandleId=fhid) junk.properties.update(syn._createEntity(junk.properties)) (tmp_f, tmp_path) = tempfile.mkstemp() schedule_for_cleanup(tmp_path) junk.update(syn._downloadFileEntity(junk, tmp_path)) assert filecmp.cmp(filepath, junk.path) finally: ## Un-mock _put_chunk if normal_put_chunk: multipart_upload_module._put_chunk = normal_put_chunk try: if 'junk' in locals(): syn.delete(junk) except Exception: print(traceback.format_exc()) try: os.remove(filepath) except Exception: print(traceback.format_exc())