def download_file( client, download_request: DownloadRequest, *, max_concurrent_parts: int = None, ): """ Main driver for the multi-threaded download. Users an ExecutorService, either set externally onto a thread local by an outside process, or creating one as needed otherwise. :param client: A synapseclient :param download_request: A batch of DownloadRequest objects specifying what Synapse files to download :param max_concurrent_parts: The maximum concurrent number parts to download at once when downloading this file """ # we obtain an executor from a thread local if we are in the context of a Synapse sync # and wan't to re-use the same threadpool as was created for that executor = getattr(_thread_local, 'executor', None) shutdown_after = False if not executor: shutdown_after = True executor = get_executor(client.max_threads) max_concurrent_parts = max_concurrent_parts or client.max_threads try: downloader = _MultithreadedDownloader(client, executor, max_concurrent_parts) downloader.download_file(download_request) finally: # if we created the Executor for the purposes of processing this download we also # shut it down. if it was passed in from the outside then it's managed by the caller if shutdown_after: executor.shutdown()
def test_upload__error(self, syn): """Verify that if an item upload fails the error is raised in the main thread and any running Futures are cancelled""" item_1 = _SyncUploadItem(File(path='/tmp/foo', parentId='syn123'), [], [], {}) item_2 = _SyncUploadItem(File(path='/tmp/bar', parentId='syn123'), [], [], {}) items = [item_1, item_2] def syn_store_side_effect(entity, *args, **kwargs): if entity.path == entity.path: raise ValueError() return Mock() uploader = _SyncUploader(syn, get_executor()) original_abort = uploader._abort def abort_side_effect(futures): return original_abort(futures) with patch.object(syn, 'store') as mock_syn_store, \ patch.object(uploader, '_abort') as mock_abort: mock_syn_store.side_effect = syn_store_side_effect mock_abort.side_effect = abort_side_effect with pytest.raises(ValueError): uploader.upload(items) # it would be aborted with Futures mock_abort.assert_called_once_with([ANY]) isinstance(mock_abort.call_args_list[0][0], Future)
def _executor(max_threads, shutdown_wait): """Yields an executor for running some asynchronous code, either obtaining the executor from the shared_executor or otherwise creating one. :param max_threads: the maxmimum number of threads a created executor should use :param shutdown_wait: whether a created executor should shutdown after running the yielded to code """ executor = getattr(_thread_local, 'executor', None) shutdown_after = False if not executor: shutdown_after = True executor = pool_provider.get_executor(thread_count=max_threads) try: yield executor finally: if shutdown_after: executor.shutdown(wait=shutdown_wait)
def test_upload(self, mock_os_isfile, syn): """Ensure that an upload including multiple items which depend on each other through provenance are all uploaded and in the expected order.""" mock_os_isfile.return_value = True item_1 = _SyncUploadItem( File(path='/tmp/foo', parentId='syn123'), [], # used [], # executed {}, # annotations ) item_2 = _SyncUploadItem( File(path='/tmp/bar', parentId='syn123'), ['/tmp/foo'], # used [], # executed {}, # annotations ) item_3 = _SyncUploadItem( File(path='/tmp/baz', parentId='syn123'), ['/tmp/bar'], # used [], # executed {}, # annotations ) items = [ item_1, item_2, item_3, ] convert_provenance_calls = 2 * len(items) convert_provenance_condition = threading.Condition() mock_stored_entities = { item_1.entity.path: Mock(), item_2.entity.path: Mock(), item_3.entity.path: Mock(), } uploader = _SyncUploader(syn, get_executor()) convert_provenance_original = uploader._convert_provenance def patched_convert_provenance(provenance, finished_items): # we hack the convert_provenance method as a way of ensuring that # the first item doesn't finish storing until the items that depend on it # have finished one trip through the wait loop. that way we ensure that # our locking logic is being exercised. nonlocal convert_provenance_calls with convert_provenance_condition: convert_provenance_calls -= 1 if convert_provenance_calls == 0: convert_provenance_condition.notifyAll() return convert_provenance_original(provenance, finished_items) def syn_store_side_effect(entity, *args, **kwargs): if entity.path == item_1.entity.path: with convert_provenance_condition: if convert_provenance_calls > 0: convert_provenance_condition.wait_for( lambda: convert_provenance_calls == 0) return mock_stored_entities[entity.path] with patch.object(uploader, '_convert_provenance') as mock_convert_provenance, \ patch.object(syn, 'store') as mock_syn_store: mock_convert_provenance.side_effect = patched_convert_provenance mock_syn_store.side_effect = syn_store_side_effect uploader.upload(items) # all three of our items should have been stored stored = [args[0][0].path for args in mock_syn_store.call_args_list] assert [i.entity.path for i in items] == stored
def test_get_executor_for_multiple_thread(self): with _patch_config(False): assert isinstance(get_executor(), ThreadPoolExecutor)
def test_get_executor_for_single_thread(self): with _patch_config(True): assert isinstance(get_executor(), SingleThreadExecutor)