async def bulk_request(data, port): ports = get_ports(port) no_req_data = utils.get_filtered_data(data) to_req_data = gen_req_data(data, ports) method = settings.request_method.upper() logger.log('INFOR', f'Use {method} method to request') logger.log('INFOR', 'Async subdomains request in progress') connector = get_connector() headers = utils.get_random_header() session = ClientSession(connector=connector, headers=headers) tasks = [] for num, data in enumerate(to_req_data): url = data.get('url') task = asyncio.ensure_future(fetch(session, method, url)) task.add_done_callback( functools.partial(request_callback, index=num, datas=to_req_data)) tasks.append(task) if tasks: futures = asyncio.as_completed(tasks) for future in tqdm.tqdm(futures, total=len(tasks), desc='Request Progress', ncols=80): await future await session.close() return to_req_data + no_req_data
async def bulk_request(data, port): ports = get_ports(port) no_req_data = utils.get_filtered_data(data) to_req_data = gen_req_data(data, ports) method = setting.request_method logger.log('INFOR', f'请求使用{method}方法') logger.log('INFOR', f'正在进行异步子域请求') connector = get_connector() header = get_header() async with ClientSession(connector=connector, headers=header) as session: tasks = [] for i, data in enumerate(to_req_data): url = data.get('url') task = asyncio.ensure_future(fetch(session, url)) task.add_done_callback( functools.partial(request_callback, index=i, datas=to_req_data)) tasks.append(task) # 任务列表里有任务不空时才进行解析 if tasks: # 等待所有task完成 错误聚合到结果列表里 futures = asyncio.as_completed(tasks) for future in tqdm.tqdm(futures, total=len(tasks), desc='Request Progress', ncols=80): await future return to_req_data + no_req_data
def run_request(domain, data, port): """ HTTP request entrance :param str domain: domain to be requested :param list data: subdomains data to be requested :param any port: range of ports to be requested :return list: result """ logger.log('INFOR', f'Start requesting subdomains of {domain}') data = utils.set_id_none(data) ports = get_port_seq(port) filtered_data = utils.get_filtered_data(data) req_urls = gen_req_urls(data, ports) resp_list = bulk_request(req_urls) new_data = gen_new_data(data, resp_list) data = new_data + filtered_data count = utils.count_alive(data) logger.log('INFOR', f'Found that {domain} has {count} alive subdomains') return data