コード例 #1
0
def run_multi_thread(dir):
    proteins = get_proteins()
    #print(len(proteins))
    #print(proteins)
    download = partial(download_link, dir)
    with Pool(4) as p:
        p.map(download, proteins)
コード例 #2
0
def run_multiprocessing(mydir):
    proteins = get_proteins()
    download = partial(download_link, mydir)
    #    for p in proteins:
    #        download(p)
    with Pool(16) as pool:
        pool.map(download, proteins)
コード例 #3
0
def run_workers(dir):
    proteins = get_proteins()
    queue = Queue()
    for n in range(16):
        worker = DownloadWorker(queue)
        worker.daemon = True
        worker.start()
    for p in proteins:
        queue.put((dir, p))
    queue.join()
コード例 #4
0
def run_workers(dir):
    proteins = get_proteins()
    queue = Queue()
    for n in range(4):
        worker = DownloadWorker(queue)
        # Stop when the thread that stared it (main) stops
        worker.daemon = True
        worker.start()
    for p in proteins:
        queue.put((dir, p))
    queue.join()
コード例 #5
0
def run_workers(dir):
    proteins = get_proteins()
    #print(len(proteins))
    #print(proteins)
    queue = Queue()
    for n in range(30):
        worker = DownloadWorker(queue)
        # Don't wait until this thread exits, stop it when main thread exits
        worker.daemon = True
        worker.start()
    for p in proteins:
        queue.put((dir, p))
    queue.join()
コード例 #6
0
def run_single_thread(dir):
    proteins = get_proteins()
    print(len(proteins))
    print(proteins)
    for p in proteins:
        download_link(dir, p)
コード例 #7
0
def run_multiprocessing(mydir):
    proteins = get_proteins()
    download = partial(download_link, mydir)
    with Pool(8) as pl:
        pl.map(download, proteins)
コード例 #8
0
def run_sequentially(dir):
    for p in get_proteins():
        download_link(dir, p)
コード例 #9
0
def run_single(dir):
    proteins = get_proteins()
    for pr in proteins:
        download_link(dir, pr)
コード例 #10
0
def run_parallel(dir):
    proteins = get_proteins()
    download = partial(download_link, dir)
    # 5 at a time
    with Pool(10) as pool:
        pool.map(download, proteins)