pool = futures.ThreadPoolExecutor(MAX_CONCURRENT_REQUESTS) pending = {} for date in dates: job = pool.submit(get_picture_url, date) pending[job] = date urls = [] count = 0 # get results as jobs are done for job in futures.as_completed(pending, timeout=GLOBAL_TIMEOUT): try: url = job.result() except NoPictureForDate as exc: if verbose: print('*** {!r} ***'.format(exc)) continue count += 1 if verbose: print(format(count, '3d'), end=' ') print(url.split('/')[-1]) else: print(url) urls.append(url) return urls if __name__ == '__main__': main(sys.argv[1:], get_picture_urls)
count = 0 # get results as jobs are done for job in asyncio.as_completed(tasks, timeout=GLOBAL_TIMEOUT): try: url = yield from job except NoPictureForDate as exc: if verbose: print('*** {!r} ***'.format(exc)) continue except aiohttp.ClientResponseError as exc: print('****** {!r} ******'.format(exc)) continue count += 1 if verbose: print(format(count, '3d'), end=' ') print(url.split('/')[-1]) else: print(url) urls.append(url) return urls def run_loop(dates, verbose=False): loop = asyncio.get_event_loop() return loop.run_until_complete(get_picture_urls(dates, verbose)) if __name__ == '__main__': main(sys.argv[1:], run_loop)