def run_multi_thread(dir): proteins = get_proteins() #print(len(proteins)) #print(proteins) download = partial(download_link, dir) with Pool(4) as p: p.map(download, proteins)
def run_multiprocessing(mydir): proteins = get_proteins() download = partial(download_link, mydir) # for p in proteins: # download(p) with Pool(16) as pool: pool.map(download, proteins)
def run_workers(dir): proteins = get_proteins() queue = Queue() for n in range(16): worker = DownloadWorker(queue) worker.daemon = True worker.start() for p in proteins: queue.put((dir, p)) queue.join()
def run_workers(dir): proteins = get_proteins() queue = Queue() for n in range(4): worker = DownloadWorker(queue) # Stop when the thread that stared it (main) stops worker.daemon = True worker.start() for p in proteins: queue.put((dir, p)) queue.join()
def run_workers(dir): proteins = get_proteins() #print(len(proteins)) #print(proteins) queue = Queue() for n in range(30): worker = DownloadWorker(queue) # Don't wait until this thread exits, stop it when main thread exits worker.daemon = True worker.start() for p in proteins: queue.put((dir, p)) queue.join()
def run_single_thread(dir): proteins = get_proteins() print(len(proteins)) print(proteins) for p in proteins: download_link(dir, p)
def run_multiprocessing(mydir): proteins = get_proteins() download = partial(download_link, mydir) with Pool(8) as pl: pl.map(download, proteins)
def run_sequentially(dir): for p in get_proteins(): download_link(dir, p)
def run_single(dir): proteins = get_proteins() for pr in proteins: download_link(dir, pr)
def run_parallel(dir): proteins = get_proteins() download = partial(download_link, dir) # 5 at a time with Pool(10) as pool: pool.map(download, proteins)