async def bulk_get_request(datas, port): ports = get_ports(port) new_datas = gen_new_datas(datas, ports) logger.log('INFOR', f'正在异步进行子域的GET请求') limit_open_conn = get_limit_conn() conn = aiohttp.TCPConnector(ttl_dns_cache=300, ssl=config.verify_ssl, limit=limit_open_conn, limit_per_host=config.limit_per_host) header = None if config.fake_header: header = utils.gen_fake_header() async with ClientSession(connector=conn, headers=header) as session: tasks = [] for i, data in enumerate(new_datas): url = data.get('url') task = asyncio.ensure_future(fetch(session, url)) task.add_done_callback( functools.partial(request_callback, index=i, datas=new_datas)) tasks.append(task) # 任务列表里有任务不空时才进行解析 if tasks: # 等待所有task完成 错误聚合到结果列表里 futures = asyncio.as_completed(tasks) for future in tqdm.tqdm(futures, total=len(tasks), desc='Progress', smoothing=1.0, ncols=True): await future logger.log('INFOR', f'完成异步进行子域的GET请求') return new_datas
async def bulk_get_request(datas, port): logger.log('INFOR', f'正在异步进行子域的GET请求') ports = get_ports(port) new_datas = gen_new_datas(datas, ports) header = None if config.fake_header: header = utils.gen_fake_header() # 使用异步域名解析器 自定义域名服务器 resolver = AsyncResolver(nameservers=config.resolver_nameservers) conn = aiohttp.TCPConnector(ssl=config.verify_ssl, limit=config.limit_open_conn, limit_per_host=config.limit_per_host, resolver=resolver) semaphore = asyncio.Semaphore(utils.get_semaphore()) async with ClientSession(connector=conn, headers=header) as session: tasks = [] for i, data in enumerate(new_datas): url = data.get('url') task = asyncio.ensure_future(fetch(session, url, semaphore)) task.add_done_callback( functools.partial(request_callback, index=i, datas=new_datas)) tasks.append(task) if tasks: # 任务列表里有任务不空时才进行解析 await asyncio.wait(tasks) # 等待所有task完成 logger.log('INFOR', f'完成异步进行子域的GET请求') return new_datas
async def bulk_get_request(datas, port): ports = get_ports(port) new_datas = gen_new_datas(datas, ports) logger.log('INFOR', f'正在异步进行子域的GET请求') limit_open_conn = config.limit_open_conn if limit_open_conn is None: # 默认情况 limit_open_conn = utils.get_semaphore() elif not isinstance(limit_open_conn, int): # 如果传入不是数字的情况 limit_open_conn = utils.get_semaphore() # 使用异步域名解析器 自定义域名服务器 resolver = AsyncResolver(nameservers=config.resolver_nameservers) conn = aiohttp.TCPConnector(ssl=config.verify_ssl, limit=limit_open_conn, limit_per_host=config.limit_per_host, resolver=resolver) semaphore = asyncio.Semaphore(limit_open_conn) header = None if config.fake_header: header = utils.gen_fake_header() async with ClientSession(connector=conn, headers=header) as session: tasks = [] for i, data in enumerate(new_datas): url = data.get('url') task = asyncio.ensure_future(fetch(session, url, semaphore)) tasks.append(task) if tasks: # 任务列表里有任务不空时才进行解析 # 等待所有task完成 错误聚合到结果列表里 results = await asyncio.gather(*tasks, return_exceptions=True) new_datas = deal_results(new_datas, results) logger.log('INFOR', f'完成异步进行子域的GET请求') return new_datas
def get_session(): header = utils.gen_fake_header() verify = settings.request_ssl_verify redirect_limit = settings.request_redirect_limit session = requests.Session() session.headers = header session.verify = verify session.max_redirects = redirect_limit return session
def get_header(self): """ Get request header :return: header """ # logger.log('DEBUG', f'Get request header') if setting.enable_fake_header: return utils.gen_fake_header() else: return self.header
def get_header(self): """ Get request header :return: header """ headers = utils.gen_fake_header() if isinstance(headers, dict): self.header = headers return headers return self.header
async def bulk_get_request(datas, port): ports = get_ports(port) new_datas = gen_new_datas(datas, ports) logger.log('INFOR', f'正在异步进行子域的GET请求') limit_open_conn = config.limit_open_conn if limit_open_conn is None: # 默认情况 limit_open_conn = utils.get_semaphore() elif not isinstance(limit_open_conn, int): # 如果传入不是数字的情况 limit_open_conn = utils.get_semaphore() # 使用异步域名解析器 自定义域名服务器 resolver = AsyncResolver(nameservers=config.resolver_nameservers) conn = aiohttp.TCPConnector(ssl=config.verify_ssl, limit=limit_open_conn, limit_per_host=config.limit_per_host, resolver=resolver) semaphore = asyncio.Semaphore(limit_open_conn) header = None if config.fake_header: header = utils.gen_fake_header() async with ClientSession(connector=conn, headers=header) as session: tasks = [] for i, data in enumerate(new_datas): url = data.get('url') task = asyncio.ensure_future(fetch(session, url, semaphore)) task.add_done_callback(functools.partial(request_callback, index=i, datas=new_datas)) tasks.append(task) if tasks: # 任务列表里有任务不空时才进行解析 # 等待所有task完成 错误聚合到结果列表里 futures = asyncio.as_completed(tasks) for future in tqdm.tqdm(futures, total=len(tasks), desc='Progress', smoothing=1.0, ncols=True): try: await future except: pass logger.log('INFOR', f'完成异步进行子域的GET请求') return new_datas
def get_header(): header = None if config.fake_header: header = utils.gen_fake_header() return header
def get_header(): header = None if setting.fake_header: header = utils.gen_fake_header() return header