async def _pull_images( self, images: Iterable[str], prog_bar: bool = True, force: bool = False, ): pulls = [self._pull_image(image, force=force) for image in images] if prog_bar: pulls = tqdm.as_completed( pulls, total=len(pulls), ncols=WRAP_LINES, desc="Pulling images", ascii=True, position=0, leave=True, bar_format="{desc}: {n}/{total}|{bar}|", ) for pull in pulls: await pull if prog_bar: pulls.close() # type: ignore # Makes the next echo start on the line underneath the # status bar instead of after. await asyncio.sleep(0.05) # TODO: Check whether we can use print(flush=True) here to # make this class not dependend on utils. utils.echo() await self.close_aclient()
async def __init_db(self): print(f'[!] 正在构建imagehash数据库...') tasks = [ asyncio.create_task(self.__getPortalHash(n, po)) for n, po in enumerate(self.polist) ] hash_list = [ result for coro in tqdm.as_completed(tasks) if (result := await coro) is not None ] print(f'[!] 数据库构建完成,包含{len(hash_list)}个图像imagehash结果。') return hash_list
async def main(): #: Parsed Arguments a = parse_args() #: Logger object l = Logger(filename=a.output, colors=not a.no_colors, verbosity=a.verbosity) #: Urls to check urls = [] if a.url: urls = [a.url] if a.url_list: if not os.path.isfile(a.url_list): l.error(f"File not found: {a.url_list}") return urls = loadlist(a.url_list) nbt = len(urls) l.info(f"{nbt} hosts will be checked") #: HTTP Session object s = HTTPSession() l.info("Finding vulnerables hosts ...") futures = [ s.get(f"{u}/wp-content/plugins/{a.slug}/readme.txt") for u in urls ] nbv = 0 for f in tqdm.as_completed(futures, ascii=BARCURSOR, bar_format=BARFORMAT): try: #: HTTP Response object r = await f #: Founded version v = get_version(r) if v: if v < a.version: l.success( f"{r.host} - {a.slug} version is vulnerable: {v}") nbv += 1 else: l.partial(f"{r.host} - {a.slug} is not vulnerable: {v}") else: l.fail(f"{r.host} - plugin not found") except Exception as e: l.error(e) l.info(f"{nbv} hosts have vulnerable versions of {a.slug}")
async def PortalsIterator(client, tiles): async with client: tasks = [ asyncio.create_task(getTileData(client, tile)) for tile in tiles ] for coro in tqdm.as_completed(tasks): tile_data = await coro if tile_data is None: continue for _, val in tile_data['result']['map'].items(): for entity in val['gameEntities']: if entity[2][0] == 'p': yield PortalParser(entity[0], entity[2])
async def main(): N = int(1e6) async for row in tqdm(trange(N, desc="inner"), desc="outer"): if row >= N: break with tqdm(count(), desc="coroutine", total=N + 2) as pbar: async for row in pbar: if row == N: pbar.send(-10) elif row < 0: assert row == -9 break # should be under 10 seconds for i in tqdm.as_completed(list(map(asyncio.sleep, [1] * 10)), desc="as_completed"): await i
async def main(): N = int(1e6) async for row in tqdm(trange(N, desc="inner"), desc="outer"): if row >= N: break with tqdm(count(), desc="coroutine", total=N + 2) as pbar: async for row in pbar: if row == N: pbar.send(-10) elif row < 0: assert row == -9 break # should be ~1sec rather than ~50s due to async scheduling for i in tqdm.as_completed( [asyncio.sleep(0.01 * i) for i in range(100, 0, -1)], desc="as_completed"): await i
async def download_from_csv(self, portals_csv): portals_list = await asyncio.to_thread(PortalsCSV.read_csv, portals_csv) async with httpx.AsyncClient(headers=self.headers, limits=self.limits) as client: print(f'[!] 正在根据({portals_csv})下载Portal图像...') tasks = [ asyncio.create_task(self.__download_img(client, portal)) for portal in portals_list ] unfinished = [ n for n, coro in enumerate(tqdm.as_completed(tasks), 1) if await coro is False ] print(f'[!] 图像下载完成。') if any(unfinished): print(f"[!] 有{len(unfinished)}个图像下载失败,请重新执行下载剩余图像。")
async def __search_course(self): results = [] async with aiohttp.ClientSession(headers={"User-Agent": self.__useragent}, timeout=aiohttp.ClientTimeout(10)) \ as session: tasks = tuple(func(session) for func in self.__scraping_funcs) for func in tqdm.as_completed(tasks, loop=self.loop): result = await func if result: last_updated = [ int(i) for i in result['last_updated'].split( 'Last updated ')[1].split('/') ] if len(str(last_updated[0])) < len(str(last_updated[1])): last_updated = last_updated[::-1] result['year'], result['month'] = last_updated results.append(result) return results
async def _upload_blob() -> None: async with ContainerClient.from_connection_string( # type: ignore[attr-defined] _connection_string(subscription, resource_group, storage_name), container_name, ) as container_client: class UploadTasks: def __init__(self, paths_to_upload: List[pathlib.Path], max_open_files: int = 100): self.index = 0 self.paths_to_upload = paths_to_upload self.semaphore = asyncio.Semaphore(max_open_files) # Forward reference not possible on Python 3.6: def __iter__(self): # type: ignore[no-untyped-def] return self def __next__(self) -> asyncio.Task: try: task = asyncio.create_task( _upload_file( container_client, self.paths_to_upload[self.index], source_folder, self.semaphore, )) except IndexError as exc: raise StopIteration from exc self.index += 1 return task def __len__(self) -> int: return len(self.paths_to_upload) for task in tqdm.as_completed( UploadTasks(paths_to_upload), bar_format="{l_bar} {bar} | Uploaded {n_fmt}/{total_fmt}", ): await task
async def pull_images(images, force_pull): async_docker = aiodocker.Docker() # Show an ascii status bar. tasks = [pull_image(image, async_docker, force_pull) for image in images] pbar = tqdm.as_completed( tasks, total=len(tasks), ncols=100, desc="Pulling images", ascii=True, bar_format="{desc}: {n}/{total}|{bar}|", ) for task in pbar: await task pbar.close() # Makes the next echo start on the line underneath the status bar # instead of after. await asyncio.sleep(0.05) typer.echo() await async_docker.close()