async def main(): download_dir = setup_download_dir() async with aiohttp.ClientSession(conn_timeout=3, read_timeout=3) as session: links = get_links(CLIENT_ID) logger.info("There has {} images".format(links.__len__())) tasks = [(async_download_link(session, download_dir, l)) for l in links] await asyncio.gather(*tasks, return_exceptions=True)
def main(): ts = time.time() download_dir = setup_download_dir() links = get_links(CLIENT_ID) logger.info("There has {} images".format(links.__len__())) with ThreadPoolExecutor(max_workers=20) as exectutor: download = partial(download_link, download_dir) exectutor.map(download, links, timeout=30) logger.info("Took {} seconds".format(timedelta(seconds=time.time() - ts)))
def main(): ts = time.time() download_dir = setup_download_dir() links = get_links(CLIENT_ID) logger.info("They has {} images".format(len(links))) for link in links: download_link(download_dir, link) # worker logger.info("Took {} seconds".format(timedelta(seconds=time.time() - ts)))
def main(): ts = time.time() links = get_links(CLIENT_ID) logger.info("They has {} images".format(len(links))) download_dir = setup_download_dir() pool = Pool(4) download = partial(download_link, download_dir) with pool as p: p.map(download, links) logger.info("Took {} seconds".format(timedelta(seconds=time.time() - ts)))
def main(): ts = time.time() download_dir = setup_download_dir() links = get_links(CLIENT_ID) logger.info("They has {} images".format(len(links))) queue = Queue() # worker for x in range(8): worker = DownloadWorker(queue) worker.daemon = True worker.start() # producer for link in links: logger.info('Queueing {}'.format(link)) queue.put((download_dir, link)) queue.join() logger.info("Took {} seconds".format(timedelta(seconds=time.time() - ts)))