def download_many(): urls = user_conf() loop = asyncio.get_event_loop() to_do = [download_one(url) for url in urls] wait_coro = asyncio.wait(to_do) res, _ = loop.run_until_complete(wait_coro) loop.close() return len(res)
def download_many(): zzr = user_conf() task_list = [] for item in zzr: t = threading.Thread(target=download_one, args=(item, )) t.start() task_list.append(t) [t.join() for t in task_list]
def download_many(): zzr = user_conf() task_list = [] for item in zzr: t = Process(target=download_one, args=(item,)) t.start() task_list.append(t) [t.join() for t in task_list] ## wait for the time consuming caculation
def download_many(): zzr = user_conf() with futures.ProcessPoolExecutor(len(zzr)) as executor: res = executor.map(download_one, zzr) return len(list(res))
def download_many(): zzr = user_conf() jobs = [gevent.spawn(download_one, item) for item in zzr] gevent.joinall(jobs)
def download_many(): zzr = user_conf() with futures.ThreadPoolExecutor(len(zzr)) as executor: to_do = [executor.submit(download_one, item) for item in zzr] ret = [future.result() for future in futures.as_completed(to_do)] return ret