Пример #1
0
def pppFoxScan(filename):
    print("Start pppFoxScan,filename is {}".format(filename))
    try:
        with open(filename, 'r') as f:
            lines = f.readlines()
            for line in lines:
                target = line.strip()
                target = base.addHttpHeader(target)
                config.ppp_queue.put(target)
    except Exception as e:
        print(e)
        pass
    while not config.ppp_queue.empty():
        current_target = config.ppp_queue.get()
        # 对搜集到的目标挨个进行扫描
        currentfilename = hashlib.md5(
            current_target.encode("utf-8")).hexdigest()
        if base.checkBlackList(current_target):
            req_pool = crawlergoMain.crawlergoGet(current_target)
            req_pool.add(current_target)
            # 对目标网址使用 crawlergoGet 页面URL动态爬取,保存在 req_pool 集合里
            threadPoolScan(req_pool, currentfilename, current_target)
        else:
            print("扫描网址在黑名单内,退出")
    print("pppFoxScan End~")
    return
Пример #2
0
def foxScanDetail(target):
    thread = ThreadPoolExecutor(config.ThreadNum)
    filename = hashlib.md5(target.encode("utf-8")).hexdigest()
    print("Start attsrc foxScan {}\nfilename : {}\n".format(target, filename))
    base.subScan(target, filename)
    # 进行子域名搜集
    while not config.target_queue.empty():
        current_target = config.target_queue.get()
        # 对搜集到的目标挨个进行扫描
        if base.checkBlackList(current_target):
            req_pool = crawlergoMain.crawlergoGet(current_target)
            req_pool.add(current_target)
            i = 0
            all_task = []
            while len(req_pool) != 0:
                # 将 req_pool 里的URL依次弹出并扫描
                temp_url = req_pool.pop()
                current_filename = hashlib.md5(
                    temp_url.encode("utf-8")).hexdigest()
                i += 1
                one_t = thread.submit(threadPoolDetailScan, temp_url,
                                      current_filename)
                all_task.append(one_t)
                if i == 5 or len(req_pool) == 0:
                    i = 0
                    wait(all_task, return_when=ALL_COMPLETED)
                    all_task = []
    print("InPuT T4rGet {} Sc3n EnD#".format(target))
    return
Пример #3
0
def foxScan(target):
    filename = hashlib.md5(target.encode("utf-8")).hexdigest()
    print(
        f"{Hx_config.yellow}{Hx_config.green}Start attsrc foxScan {target}\nfilename : {filename}\n{Hx_config.end}"
    )
    base.subScan(target, filename)
    # 将队列列表化并进行子域名搜集
    _ = base.from_queue_to_list(Hx_config.target_queue)
    base.ArlScan(name=target, target=_)  # 启动ARL扫描,第一个参数target表示文件名
    print(
        f"{Hx_config.yellow}InPuT T4rGet {target} Sc3n Start!{Hx_config.end}")
    while not Hx_config.target_queue.empty():
        current_target = base.addHttpHeader(Hx_config.target_queue.get())
        try:
            if base.checkBlackList(current_target):
                # 对搜集到的目标挨个进行扫描
                req_pool = crawlergoMain.crawlergoGet(
                    current_target)  # 返回crawlergoGet结果列表,是多个url路径
                req_pool.add(current_target)  # 添加自己本身到该列表里
                req_pool = WAF(req_pool).run_detect()
                base.save(
                    req_pool,
                    filepath=f"{Hx_config.Crawlergo_save_path}{target}.txt",
                    host=current_target)
                tempFilename = hashlib.md5(
                    current_target.encode("utf-8")).hexdigest()
                # 对目标网址使用 crawlergoGet 页面URL动态爬取,保存在 req_pool 集合里
                threadPoolScan(req_pool, tempFilename, target)
        except:
            pass
    print(f"{Hx_config.yellow}InPuT T4rGet {target} Sc3n EnD#{Hx_config.end}")
    return
Пример #4
0
def oneFoxScan(target):
    if base.checkBlackList(target):
        target = base.addHttpHeader(target)
        filename = hashlib.md5(target.encode("utf-8")).hexdigest()
        print("Start foxScan {}\nfilename : {}\n".format(target, filename))
        req_pool = crawlergoMain.crawlergoGet(target)
        # 对目标网址使用 crawlergoGet 页面URL动态爬取,保存在 req_pool 集合里
        req_pool.add(target)
        threadPoolScan(req_pool, filename, target)
    print("InPuT T4rGet {} Sc3n EnD#".format(target))
    return
Пример #5
0
def foxScan(target):
    filename=hashlib.md5(target.encode("utf-8")).hexdigest()
    print("Start attsrc foxScan {}\nfilename : {}\n".format(target,filename))
    base.subScan(target,filename)
    #进行子域名搜集
    while not config.target_queue.empty():
        current_target=config.target_queue.get()
        if base.checkBlackList(current_target):
            # 对搜集到的目标挨个进行扫描
            req_pool=crawlergoMain.crawlergoGet(current_target)
            req_pool.add(current_target)
            #对目标网址使用 crawlergoGet 页面URL动态爬取,保存在 req_pool 集合里
            threadPoolScan(req_pool,filename)
    print("InPuT T4rGet {} Sc3n EnD#".format(target))
    return
Пример #6
0
def oneFoxScan(target):
    if base.checkBlackList(target):
        target = base.addHttpHeader(target)
        filename = hashlib.md5(target.encode("utf-8")).hexdigest()
        print(
            f"{Hx_config.yellow}Start foxScan {target}\nfilename : {filename}\n{Hx_config.end}"
        )
        req_pool = crawlergoMain.crawlergoGet(target)
        # 对目标网址使用 crawlergoGet 页面URL动态爬取,保存在 req_pool 集合里
        req_pool.add(target)
        threadPoolScan(req_pool, filename, target)
    else:
        print("扫描网址在黑名单内,退出")
    print(f"{Hx_config.yellow}InPuT T4rGet {target} Sc3n EnD#{Hx_config.end}")
    return
Пример #7
0
def foxScan():
    # 进行子域名搜集
    config.target_queue.put('127.0.0.1')
    config.target_queue.put('http://testphp.vulnweb.com/')

    while not config.target_queue.empty():
        current_target = config.target_queue.get()
        if base.checkBlackList(current_target):
            # 对搜集到的目标挨个进行扫描
            req_pool = crawlergoMain.crawlergoGet(current_target)
            req_pool.add(current_target)
            tempFilename = hashlib.md5(
                current_target.encode("utf-8")).hexdigest()
            # 对目标网址使用 crawlergoGet 页面URL动态爬取,保存在 req_pool 集合里
            threadPoolScan(req_pool, tempFilename, "aa")
    print("InPuT T4rGet {} Sc3n EnD#".format("aa"))
    return