Beispiel #1
0
async def bulk_get_request(datas, port):
    ports = get_ports(port)
    new_datas = gen_new_datas(datas, ports)
    logger.log('INFOR', f'正在异步进行子域的GET请求')

    limit_open_conn = config.limit_open_conn
    if limit_open_conn is None:  # 默认情况
        limit_open_conn = utils.get_semaphore()
    elif not isinstance(limit_open_conn, int):  # 如果传入不是数字的情况
        limit_open_conn = utils.get_semaphore()
    # 使用异步域名解析器 自定义域名服务器
    resolver = AsyncResolver(nameservers=config.resolver_nameservers)
    conn = aiohttp.TCPConnector(ssl=config.verify_ssl,
                                limit=limit_open_conn,
                                limit_per_host=config.limit_per_host,
                                resolver=resolver)

    semaphore = asyncio.Semaphore(limit_open_conn)
    header = None
    if config.fake_header:
        header = utils.gen_fake_header()
    async with ClientSession(connector=conn, headers=header) as session:
        tasks = []
        for i, data in enumerate(new_datas):
            url = data.get('url')
            task = asyncio.ensure_future(fetch(session, url, semaphore))
            tasks.append(task)
        if tasks:  # 任务列表里有任务不空时才进行解析
            # 等待所有task完成 错误聚合到结果列表里
            results = await asyncio.gather(*tasks, return_exceptions=True)
            new_datas = deal_results(new_datas, results)

    logger.log('INFOR', f'完成异步进行子域的GET请求')
    return new_datas
Beispiel #2
0
def get_limit_conn():
    limit_open_conn = config.limit_open_conn
    if limit_open_conn is None:  # 默认情况
        limit_open_conn = utils.get_semaphore()
    elif not isinstance(limit_open_conn, int):  # 如果传入不是数字的情况
        limit_open_conn = utils.get_semaphore()
    return limit_open_conn
Beispiel #3
0
 async def main(self, domain, rx_queue):
     if not self.fuzz:  # fuzz模式不探测域名是否使用泛解析
         self.enable_wildcard, self.wildcard_ips, self.wildcard_ttl = detect_wildcard(
             domain)
     tasks = self.gen_tasks(domain)
     logger.log('INFOR', f'正在爆破{domain}的域名')
     sem = asyncio.Semaphore(utils.get_semaphore())
     for task in tqdm.tqdm(tasks,
                           desc='Progress',
                           smoothing=1.0,
                           ncols=True):
         async with aiomultiprocess.Pool(
                 processes=self.processes,
                 initializer=init_worker,
                 childconcurrency=self.coroutine) as pool:
             try:
                 results = await pool.map(resolve.aiodns_query_a, task)
             except KeyboardInterrupt:
                 logger.log('ALERT', '爆破终止正在退出')
                 pool.terminate()  # 关闭pool,结束工作进程,不在处理未完成的任务。
                 self.save_json()
                 self.gen_result()
                 rx_queue.put(self.results)
                 return
             else:
                 self.deal_results(results)
         self.save_json()
         self.gen_result()
         rx_queue.put(self.results)
Beispiel #4
0
async def bulk_get_request(datas, port):
    logger.log('INFOR', f'正在异步进行子域的GET请求')
    ports = get_ports(port)
    new_datas = gen_new_datas(datas, ports)
    header = None
    if config.fake_header:
        header = utils.gen_fake_header()
    # 使用异步域名解析器 自定义域名服务器
    resolver = AsyncResolver(nameservers=config.resolver_nameservers)
    conn = aiohttp.TCPConnector(ssl=config.verify_ssl,
                                limit=config.limit_open_conn,
                                limit_per_host=config.limit_per_host,
                                resolver=resolver)
    semaphore = asyncio.Semaphore(utils.get_semaphore())
    async with ClientSession(connector=conn, headers=header) as session:
        tasks = []
        for i, data in enumerate(new_datas):
            url = data.get('url')
            task = asyncio.ensure_future(fetch(session, url, semaphore))
            task.add_done_callback(
                functools.partial(request_callback, index=i, datas=new_datas))
            tasks.append(task)
        if tasks:  # 任务列表里有任务不空时才进行解析
            await asyncio.wait(tasks)  # 等待所有task完成
    logger.log('INFOR', f'完成异步进行子域的GET请求')
    return new_datas
async def bulk_get_request(datas, port):
    ports = get_ports(port)
    new_datas = gen_new_datas(datas, ports)
    logger.log('INFOR', f'正在异步进行子域的GET请求')

    limit_open_conn = config.limit_open_conn
    if limit_open_conn is None:  # 默认情况
        limit_open_conn = utils.get_semaphore()
    elif not isinstance(limit_open_conn, int):  # 如果传入不是数字的情况
        limit_open_conn = utils.get_semaphore()
    # 使用异步域名解析器 自定义域名服务器
    resolver = AsyncResolver(nameservers=config.resolver_nameservers)
    conn = aiohttp.TCPConnector(ssl=config.verify_ssl,
                                limit=limit_open_conn,
                                limit_per_host=config.limit_per_host,
                                resolver=resolver)

    semaphore = asyncio.Semaphore(limit_open_conn)
    header = None
    if config.fake_header:
        header = utils.gen_fake_header()
    async with ClientSession(connector=conn, headers=header) as session:
        tasks = []
        for i, data in enumerate(new_datas):
            url = data.get('url')
            task = asyncio.ensure_future(fetch(session, url, semaphore))
            task.add_done_callback(functools.partial(request_callback,
                                                     index=i,
                                                     datas=new_datas))
            tasks.append(task)
        if tasks:  # 任务列表里有任务不空时才进行解析
            # 等待所有task完成 错误聚合到结果列表里
            futures = asyncio.as_completed(tasks)
            for future in tqdm.tqdm(futures,
                                    total=len(tasks),
                                    desc='Progress',
                                    smoothing=1.0,
                                    ncols=True):
                try:
                    await future
                except:
                    pass

    logger.log('INFOR', f'完成异步进行子域的GET请求')
    return new_datas
Beispiel #6
0
async def aiodns_query_a(hostname, semaphore=None):
    """
    异步查询A记录

    :param str hostname: 主机名
    :param semaphore: 并发查询数量
    :return: 主机名或查询结果或查询异常
    """
    if semaphore is None:
        semaphore = utils.get_semaphore()
    async with semaphore:
        resolver = aiodns_resolver()
        answers = await resolver.query(hostname, 'A')
        return hostname, answers
Beispiel #7
0
async def bulk_query_a(datas):
    """
    批量查询A记录

    :param datas: 待查的数据集
    :return: 查询过得到的结果集
    """
    logger.log('INFOR', '正在异步查询子域的A记录')
    tasks = []
    semaphore = asyncio.Semaphore(utils.get_semaphore())
    for i, data in enumerate(datas):
        if not data.get('ips'):
            subdomain = data.get('subdomain')
            task = asyncio.ensure_future(aiodns_query_a(subdomain, semaphore))
            task.add_done_callback(
                functools.partial(resolve_callback, index=i,
                                  datas=datas))  # 回调
            tasks.append(task)
    if tasks:  # 任务列表里有任务不空时才进行解析
        await asyncio.wait(tasks)  # 等待所有task完成
    logger.log('INFOR', '完成异步查询子域的A记录')
    return datas
Beispiel #8
0
def get_limit_conn():
    limit_open_conn = settings.limit_open_conn
    if limit_open_conn is None:  # 默认情况
        limit_open_conn = utils.get_semaphore()
    return limit_open_conn