Exemplo n.º 1
0
def count_status():
    red = getredis()
    while True:
        try:
            time.sleep(int(scan_set.get("status_flush_time", 30)))
            burpdata_undo = red.llen("burpdata")
            vuln = red.llen("vuln_all")
            data = red.hmget("count_all", "doned", "request", "block_host",
                             "request_fail")
            burpdata_doned, request, block_host, request_fail = list(
                map(lambda x: x.decode(), data))
            reverse_count = 0
            res, resdata = query_reverse("myscan_total")
            if res:
                reverse_count = int(resdata.get("total"))
            logger.warning(
                "do/undo:{}/{} req_succ:{}/fail:{} blockhost:{} vuln:{}/reverse:{}"
                .format(burpdata_doned, burpdata_undo, request, request_fail,
                        block_host, vuln, reverse_count),
                text="STATUS")
        except KeyboardInterrupt as ex:
            logger.warning("Ctrl+C was pressed ,aborted program")
        except Exception as ex:
            logger.warning("Count stat moudle get error:{}".format(ex))
            pass
Exemplo n.º 2
0
def is_wildcard_dns(domain, istopdomain=False, level=1):
    '''
    domain: like baidu.com or www.baidu.com
    topdomain: True--> domain is baidu.com,False--> domain is www.baidu.com

    return :
    True:
    False:
    None: error
    '''
    if not istopdomain:
        domain = ".".join(domain.split(".")[1:])
        if domain == "":
            return None  #
    red = getredis()
    key = getmd5(domain)
    if red.sismember("dns_wildcard_true", key):
        return True
    if red.sismember("dns_wildcard_false", key):
        return False
    try:
        r = dns.resolver.Resolver(configure=False)
        r.nameservers = others.dns_servers
        answers = r.query('myscan-not-%s-test.%s' %
                          (get_random_str(4).lower(), domain))
        ips = ', '.join(sorted([answer.address for answer in answers]))
        if level == 1:
            wildcard_test('any-sub-to.%s' % domain, istopdomain, 2)
        elif level == 2:
            red.sadd("dns_wildcard_true", key)
            return True
    except Exception as e:
        red.sadd("dns_wildcard_false", key)
        return False
Exemplo n.º 3
0
def run_python_plugin():
    red = getredis()
    try:
        while True:
            try:
                workdata = red.lpop("plugin_data_py")
                if workdata:
                    workdata_ = pickle.loads(workdata)
                    signal.signal(signal.SIGALRM, handler_plugin)
                    signal.alarm(int(scan_set.get("poc_timeout")))

                    # workdata_ will like this
                    # {
                    #     "id": id,
                    #     "poc": poc,
                    # }
                    logger.debug("Python plugin get one data")
                    p = python_plugin(workdata_)
                    p.run()
                    signal.alarm(0)
                else:
                    time.sleep(random.uniform(1, 2))
            except Exception as ex:
                traceback.print_exc()
                logger.warning("Run_python scan get error:{}".format(ex))
                pass
    except KeyboardInterrupt as ex:
        logger.warning("Ctrl+C was pressed ,aborted program")
    except Exception as ex:
        traceback.print_exc()
        logger.warning("Run_python main scan get error:{}".format(ex))
Exemplo n.º 4
0
def writeresults():
    red = getredis()
    total_write = 0
    if "." not in cmd_line_options.html_output:
        cmd_line_options.html_output = cmd_line_options.html_output + ".html"
    while True:
        try:
            results = []
            while True:
                id = red.lpop("vuln_all_write")
                if id:
                    pickle_data = red.get(id)
                    if pickle_data:
                        results.append(pickle.loads(pickle_data))
                else:
                    if results:
                        for result in results:
                            total_write += 1
                            current = int(total_write /
                                          scan_set.get("max_html_output", 10))
                            outfilename = "{}{}.html".format(
                                '.'.join(
                                    cmd_line_options.html_output.split(".")
                                    [:-1]), current)
                            check(outfilename)
                            out = htmlexport([result], outfilename)
                            out.save()
                            results = []
                    time.sleep(5)
        except KeyboardInterrupt as ex:
            logger.warning("Ctrl+C was pressed ,aborted program")
        except Exception as ex:
            traceback.print_exc()
            logger.warning(ex)
            pass
Exemplo n.º 5
0
def count_status():
    red = getredis()
    while True:
        try:
            time.sleep(int(scan_set.get("status_flush_time", 30)))
            burpdata_undo = red.llen("burpdata")
            if scan_set.get("random_test", False):
                # workdata = red.spop("work_data_py_set")
                unactive = red.scard("work_data_py_set")

            else:
                # red.lpush("work_data_py", pickledata)
                # workdata = red.lpop("work_data_py")

                unactive = red.llen("work_data_py")

            vuln = red.llen("vuln_all")
            data = red.hmget("count_all", "doned", "request", "block_host",
                             "request_fail", "active")
            burpdata_doned, request, block_host, request_fail, active = list(
                map(lambda x: x.decode(), data))
            reverse_count = 0
            res, resdata = query_reverse("myscan_total")
            if res:
                reverse_count = int(resdata.get("total"))

            if cmd_line_options.command == "hostscan":
                logger.warning(
                    "do/undo/active/unactive:{}/{}/{}/{}  vuln:{}/reverse:{}".
                    format(burpdata_doned, burpdata_undo, active, unactive,
                           vuln, reverse_count),
                    text="STATUS")
            elif cmd_line_options.command == "webscan":
                if cmd_line_options.allow_plugin:
                    undoplugin = red.llen("plugin_data_py")
                    logger.warning(
                        "do/undo/active/unactive/undoplugin:{}/{}/{}/{}/{} req_total/fail:{}/{} blockhost:{} vuln:{}/reverse:{}"
                        .format(burpdata_doned, burpdata_undo, active,
                                unactive, undoplugin, request, request_fail,
                                block_host, vuln, reverse_count),
                        text="STATUS")
                else:
                    logger.warning(
                        "do/undo/active/unactive:{}/{}/{}/{} req_total/fail:{}/{} blockhost:{} vuln:{}/reverse:{}"
                        .format(burpdata_doned, burpdata_undo, active,
                                unactive, request, request_fail, block_host,
                                vuln, reverse_count),
                        text="STATUS")
        except KeyboardInterrupt as ex:
            logger.warning("Ctrl+C was pressed ,aborted program")
        except Exception as ex:
            logger.warning("Count stat moudle get error:{}".format(ex))
            traceback.print_exc()
            pass
Exemplo n.º 6
0
 def find_ip(self):
     if is_ipaddr(self.domain):
         return self.domain
     red = getredis()
     key = getmd5("domain_to_ip_" + self.domain)
     res = red.get(key)
     if res:
         return res.decode()
     mythread(self.query, copy.deepcopy(others.dns_servers), 6)
     data = ",".join(list(self.msg))
     red.set(key, data)
     return data
Exemplo n.º 7
0
def request(**kwargs_sour):
    kwargs = copy.deepcopy(kwargs_sour)
    red = getredis()

    # print("start:",kwargs)
    if not kwargs.get("verify", None):
        kwargs["verify"] = False
    if not kwargs.get("timeout", None):
        kwargs["timeout"] = 8
    if not kwargs.get("headers", None):
        kwargs["headers"] = {
            "User-Agent":
            "Mozilla/5.0 (Windows NT 10.0; WOW64; rv:38.0) Gecko/20100101 Firefox/38.0"
        }
    if cmd_line_options.proxy:
        kwargs["proxies"] = cmd_line_options.proxy
    if cmd_line_options.timeout:
        kwargs["timeout"] = cmd_line_options.timeout
    # print("end:",kwargs)
    if kwargs.get('data', None):
        if isinstance(kwargs.get("data"), str):
            kwargs["data"] = kwargs["data"].encode("utf-8", "ignore")
    r = None
    red.hincrby("count_all", "request", amount=1)
    h, p = gethostportfromurl(kwargs.get("url"))
    block = block_info(h, p)
    # retry
    for x in range(cmd_line_options.retry + 1):
        try:
            r = requests.request(**kwargs)
            block.push_result_status(0)
            break
        except requests.exceptions.ConnectTimeout:
            pass
            # logger.debug("request connect timeout :{}".format(kwargs["url"]))
        except requests.exceptions.ReadTimeout:
            pass
            # logger.debug("request read timeout :{}".format(kwargs["url"]))
        except Exception as ex:
            # print(kwargs)
            logger.debug("Request error url:{} error:{}".format(
                kwargs["url"], ex))
        block.push_result_status(1)
        sleep(uniform(0, 0.2))
    if r != None:
        if scan_set.get("search_open", False):
            s = searchmsg(r)
            s.verify()
            s.saveresult()
    else:
        red.hincrby("count_all", "request_fail", amount=1)
    return r
Exemplo n.º 8
0
 def output(self, msg, insert=False):
     msg = "/".join(self.url.split("/")[:3]) + " " + msg
     msgmd5 = getmd5(msg)[10:18]
     red = getredis()
     if insert == False:
         if not red.sismember("myscan_max_output", msgmd5):
             return True  # 可以输出
         else:
             # logger.debug("sql boolen moudle : {} 输出个数已达{}上限,不再测试输出".format(msg, self.verify_count))
             return False  # 不可以继续输出
     else:
         # red.hincrby("myscan_max_output", msgmd5, amount=1)
         red.sadd("myscan_max_output", msgmd5)
Exemplo n.º 9
0
def set_conn():
    try:

        redis_conn()
        red=getredis()
        if not red.ping():
            error_msg = "redis ping error . will exit program"
            logger.warning(error_msg)
            sys.exit()
        else:
            logger.info("Redis ping success")
    except Exception as ex:
        error_msg =" connnect redis get error {}:please use --redis pass@host:port:db ,if pass is none ,like --redis @host:port:db".format(ex)
        logger.warning(error_msg)
        sys.exit()
Exemplo n.º 10
0
    def __init__(self, dictdata):
        self.dictdata = dictdata
        self.red = getredis()
        # self.red = ""
        data = copy.deepcopy(dictdata)
        self.url = data.get("url")
        self.request = data.get("request")
        self.request_bodyoffset = int(self.dictdata.get("request").get("bodyoffset"))
        self.response_bodyoffset = int(self.dictdata.get("response").get("bodyoffset"))

        self.response = data.get("response")
        self.keys = {
            "perfile": "doned_perfile",
            "perfolder": "doned_perfolder",
            "perscheme": "doned_perscheme",
        }
Exemplo n.º 11
0
    def can_output(self, msg, insert=False):
        '''
        msg : should url+somename
        '''

        msgmd5 = getmd5(msg)
        red = getredis()
        if insert == False:
            if not red.sismember("myscan_max_output", msgmd5):
                return True  # 可以输出
            else:
                logger.debug("{} 输出个数已达一次,不再测试输出".format(msg))
                return False  # 不可以继续输出
        else:
            # red.hincrby("myscan_max_output", msgmd5, amount=1)
            red.sadd("myscan_max_output", msgmd5)
Exemplo n.º 12
0
def start_input():
    if cmd_line_options.command == "hostscan":
        try:
            red = getredis()
            if cmd_line_options.input_nmaptext:
                datas = get_data_from_textfile(cmd_line_options.input_nmaptext)
                logger.info("input {} lines from nmap_text".format(len(datas)))
                for data in datas:
                    red.lpush("hostdata", json.dumps(data))
            if cmd_line_options.input_jsonfile:
                datas = get_data_from_jsonfile(cmd_line_options.input_jsonfile)
                logger.info("input {} lines from nmap_json".format(len(datas)))
                for data in datas:
                    red.lpush("hostdata", json.dumps(data))
        except Exception as ex:
            traceback.print_exc()
            logger.warning("input target to hostdata get error:{}".format(ex))
Exemplo n.º 13
0
def cleandb():
    # red = redis.StrictRedis(connection_pool=conn.redis)
    red=getredis()
    if None in red.hmget("count_all", "doned", "request", "block_host","request_fail"):
        count_all = {
            "block_host": 0,  # 被封的host_port
            'doned': 0,  # 已经做过的burpdata条数
            "request": 0,  # request 次数
            "request_fail": 0,  # request fail次数
        }
        red.hmset("count_all", count_all)
    if cmd_line_options.clean:
        red.flushall()
        count_all = {
            "block_host": 0,  # 被封的host_port
            'doned': 0,  # 已经做过的burpdata条数
            "request": 0,  # request 次数
            "request_fail":0, #request fail次数
        }
        red.hmset("count_all", count_all)
Exemplo n.º 14
0
 def saveresult(self, result_data, info):
     red = getredis()
     if not result_data.get("createtime", None):
         result_data["createtime"] = time.strftime("%Y-%m-%d %H:%M:%S",
                                                   time.localtime())
     parsehash = hash(
         str(result_data.get("detail").get("parse")) +
         result_data.get("url") + result_data.get("name"))
     hosthash = "saerch_" + str(
         hash(parse.urlparse(result_data.get("url")).netloc.split(":")[0]))
     if not red.sismember(hosthash, parsehash):
         self.output(info.get("vulmsg"), insert=True)
         red.sadd(hosthash, parsehash)
         logger.critical(result_data)
         random_id = get_random_str(9)
         red.set("result_" + random_id, pickle.dumps(result_data))
         red.lpush("vuln_" + result_data["name"].replace(" ", "_"),
                   "result_" + random_id)
         red.lpush("vuln_all", "result_" + random_id)
         red.lpush("vuln_all_write",
                   "result_" + random_id)  # 保存结果到html,save线程取
Exemplo n.º 15
0
def run_python_poc():
    red = getredis()
    try:
        while True:
            try:
                if scan_set.get("random_test", False):
                    workdata = red.spop("work_data_py_set")
                else:
                    # red.lpush("work_data_py", pickledata)
                    workdata = red.lpop("work_data_py")

                if workdata:
                    workdata_ = pickle.loads(workdata)
                    signal.signal(signal.SIGALRM, handler)
                    signal.alarm(int(scan_set.get("poc_timeout")))

                    # workdata_ will like this
                    # {
                    #     "id": id,
                    #     "data": None,  perscheme 为None
                    #     "poc": poc,
                    #     "type": "perscheme"
                    # }
                    logger.debug("Python poc get one data, type:" +
                                 workdata_.get("type"))
                    p = python_poc(workdata_)
                    p.run()
                    signal.alarm(0)
                else:
                    time.sleep(random.uniform(1, 2))
            except Exception as ex:
                traceback.print_exc()
                logger.warning("Run_python scan get error:{}".format(ex))
                pass
    except KeyboardInterrupt as ex:
        logger.warning("Ctrl+C was pressed ,aborted program")
    except Exception as ex:
        traceback.print_exc()
        logger.warning("Run_python main scan get error:{}".format(ex))
Exemplo n.º 16
0
def run_python():
    red = getredis()
    try:
        while True:
            try:
                workdata = red.lpop("work_data_py")
                if workdata:
                    workdata = pickle.loads(workdata)
                    logger.debug("Python poc get one data, type:" +
                                 workdata.get("type"))
                    p = python_poc(workdata)
                    p.run()
                else:
                    time.sleep(random.uniform(1, 2))
            except Exception as ex:
                traceback.print_exc()
                logger.warning("Run_python process get error:{}".format(ex))
                pass
    except KeyboardInterrupt as ex:
        logger.warning("Ctrl+C was pressed ,aborted program")
    except Exception as ex:
        traceback.print_exc()
        logger.warning("Run_python main process get error:{}".format(ex))
Exemplo n.º 17
0
def is_cdn_domain(domain):
    '''
    return True ,False
    '''
    red = getredis()
    key = getmd5(domain)
    if red.sismember("domain_cdn_true", key):
        return True
    if red.sismember("domain_cdn_false", key):
        return False
    try:
        r = dns.resolver.Resolver(configure=False)
        r.nameservers = others.dns_servers
        answers = r.query('myscan-not-%s-test.%s' %
                          (get_random_str(4).lower(), domain))
        ips = ', '.join(sorted([answer.address for answer in answers]))
        if level == 1:
            wildcard_test('any-sub-to.%s' % domain, istopdomain, 2)
        elif level == 2:
            red.sadd("dns_wildcard_true", key)
            return True
    except Exception as e:
        red.sadd("dns_wildcard_false", key)
        return False
Exemplo n.º 18
0
def writeresults():
    red = getredis()
    while True:
        try:
            results = []
            while True:
                id = red.lpop("vuln_all_write")
                if id:
                    pickle_data = red.get(id)
                    if pickle_data:
                        results.append(pickle.loads(pickle_data))
                else:
                    if results:
                        check()
                        out = htmlexport(results)
                        out.save()
                        results = []
                    time.sleep(5)
        except KeyboardInterrupt as ex:
            logger.warning("Ctrl+C was pressed ,aborted program")
        except Exception as ex:
            traceback.print_exc()
            logger.warning(ex)
            pass
Exemplo n.º 19
0
 def __init__(self, workdata):
     self.workdata = workdata
     self.red = getredis()
Exemplo n.º 20
0
 def __init__(self, dictdata):
     self.dictdata = dictdata
     self.redis_key = "plugin_extractdomain"
     self.red = getredis()
Exemplo n.º 21
0
def start():
    logger.info("Myscan Python Moudle Listen ...")
    red = getredis()
    try:
        while True:
            try:
                if cmd_line_options.command == "webscan":
                    data = red.rpop("burpdata")
                    if data:
                        red.hincrby("count_all", "doned", amount=1)
                        logger.debug("Get one data from burpdata")
                        dictdata = None
                        try:
                            dictdata = json.loads(data)
                        except Exception as ex:
                            logger.warning(
                                "Process burpdata to json get error:" +
                                str(ex))
                            continue
                        if dictdata is not None:

                            # 把dictdata分配一个id
                            id = get_random_str(10) + str(get_random_num(5))
                            toredisdatas = []
                            # 开启plugin
                            if cmd_line_options.allow_plugin:
                                for pluginhash, plugin_info in cmd_line_options.allow_plugin.items(
                                ):
                                    toredisdatas.append(
                                        ("plugin_data_py",
                                         pickle.dumps({
                                             "id":
                                             id,
                                             "pochash":
                                             pluginhash,
                                             "poc":
                                             plugin_info.get("poc")
                                         })))

                            is_filter = dictdata.get("filter")
                            host = dictdata.get("url").get("host")
                            port = dictdata.get("url").get("port")
                            block = block_info(host, port)
                            if allow_host(host) and not block.is_block():
                                # 是否启动被动搜索模式
                                if scan_set.get("search_open", False):
                                    s = searchmsg(dictdata)
                                    s.verify()
                                    # s.saveresult()

                                data_parser = dictdata_parser(dictdata)
                                # perfile
                                if cmd_line_options.pocs_perfile:
                                    if not is_filter or not data_parser.is_perfile_doned(
                                    ):
                                        logger.debug(
                                            data_parser.getperfile(
                                            ).capitalize() +
                                            " is_perfile_doned res:False")
                                        for poc in cmd_line_options.pocs_perfile:
                                            toredisdatas.append(
                                                ("work_data_py",
                                                 pickle.dumps({
                                                     "id":
                                                     id,
                                                     "data":
                                                     data_parser.getperfile(),
                                                     "poc":
                                                     poc,
                                                     "type":
                                                     "perfile"
                                                 })))
                                    else:
                                        logger.debug(
                                            data_parser.getperfile(
                                            ).capitalize() +
                                            " is_perfile_doned res:True")
                                # perfolder
                                if cmd_line_options.pocs_perfoler:
                                    if not is_filter:
                                        folders = data_parser.getperfolders()
                                    else:
                                        folders = data_parser.is_perfolder_doned(
                                        )

                                    if folders != []:
                                        for folder in folders:
                                            for poc in cmd_line_options.pocs_perfoler:
                                                # red.lpush("work_data_py", pickle.dumps({
                                                #     "data": folder,
                                                #     "dictdata": dictdata,
                                                #     "type": "perfolder"
                                                # }))
                                                toredisdatas.append(
                                                    ("work_data_py",
                                                     pickle.dumps({
                                                         "id":
                                                         id,
                                                         "data":
                                                         folder,
                                                         "poc":
                                                         poc,
                                                         "type":
                                                         "perfolder"
                                                     })))
                                # scheme
                                if cmd_line_options.pocs_perscheme:
                                    if not is_filter or not data_parser.is_perscheme_doned(
                                    ):
                                        logger.debug(
                                            data_parser.getperfile(
                                            ).capitalize() +
                                            " is_perscheme_doned res:False")
                                        for poc in cmd_line_options.pocs_perscheme:
                                            toredisdatas.append((
                                                "work_data_py",
                                                pickle.dumps({
                                                    "id":
                                                    id,
                                                    "data":
                                                    None,  # 这里没有data字段,无关data字段了
                                                    "poc":
                                                    poc,
                                                    "type":
                                                    "perscheme"
                                                })))
                                    else:
                                        logger.debug(
                                            data_parser.getperfile(
                                            ).capitalize() +
                                            " is_perscheme_doned res:True")

                            else:
                                logger.debug("Host block:" + host)
                            # 分发
                            if toredisdatas:
                                # 给id新建一个hash
                                red.hmset(id, {
                                    'data': data,
                                    'count': len(toredisdatas)
                                })
                                for key, pickledata in toredisdatas:
                                    if key == "plugin_data_py":
                                        red.lpush("plugin_data_py", pickledata)
                                    else:
                                        if scan_set.get("random_test", False):
                                            red.sadd("work_data_py_set",
                                                     pickledata)
                                        else:
                                            red.lpush("work_data_py",
                                                      pickledata)
                    else:
                        time.sleep(random.uniform(0, 1))
                elif cmd_line_options.command == "hostscan":
                    data = red.rpop("hostdata")
                    if data:
                        red.hincrby("count_all", "doned", amount=1)
                        logger.debug("Get one data from hostdata")
                        dictdata = None
                        try:
                            dictdata = json.loads(data)
                        except Exception as ex:
                            logger.warning(
                                "Process hostdata to json get error:" +
                                str(ex))
                            continue
                        if dictdata is not None:
                            # 开启plugin
                            if cmd_line_options.plugins:
                                plugin(dictdata)
                            if "all" in cmd_line_options.disable:
                                continue
                            is_filter = dictdata.get("filter")
                            host = dictdata.get("addr")
                            port = dictdata.get("port")
                            block = block_info(host, port)
                            id = get_random_str(10) + str(get_random_num(5))
                            if allow_host(host):
                                toredisdatas = []
                                if is_filter:
                                    if not block.is_block():
                                        block.block_it()
                                    else:
                                        continue
                                for poc in cmd_line_options.pocs_perserver:
                                    toredisdatas.append(
                                        pickle.dumps({
                                            "id": id,
                                            "data":
                                            None,  # 这里没有data字段,无关data字段了
                                            "poc": poc,
                                            "type": "perserver"
                                        }))
                                if toredisdatas:
                                    red.hmset(id, {
                                        'data': data,
                                        'count': len(toredisdatas)
                                    })
                                red.hmset(id, {
                                    'data': data,
                                    'count': len(toredisdatas)
                                })
                                for pickledata in toredisdatas:
                                    if scan_set.get("random_test", False):
                                        red.sadd("work_data_py_set",
                                                 pickledata)
                                    else:
                                        red.lpush("work_data_py", pickledata)
                    else:
                        time.sleep(random.uniform(1, 2))

            except Exception as ex:
                logger.debug("Run start get error:{}".format(ex))
                traceback.print_exc()
                continue
    except KeyboardInterrupt as ex:
        logger.warning("Ctrl+C was pressed ,aborted program")
Exemplo n.º 22
0
 def __init__(self, dictdata):
     self.dictdata = dictdata
     self.dict_host_ip = {}
     self.hosts = set()
     self.red = getredis()
Exemplo n.º 23
0
def start():
    logger.info("Myscan Python Moudle Listen ...")
    red = getredis()
    try:
        while True:
            data = red.lpop("burpdata")
            if data:
                red.hincrby("count_all", "doned", amount=1)
                logger.debug("Get one data from burpdata")
                dictdata = ""
                try:
                    dictdata = json.loads(data)
                except Exception as ex:
                    logger.warning("Process burpdata to json get error:" +
                                   str(ex))
                    continue
                if dictdata != "":
                    # 开启plugin
                    if cmd_line_options.plugins:
                        plugin(dictdata)
                    if "all" in cmd_line_options.disable:
                        continue
                    is_filter = dictdata.get("filter")
                    host = dictdata.get("url").get("host")
                    port = dictdata.get("url").get("port")
                    block = block_info(host, port)
                    if allow_host(host) and not block.is_block():
                        # 是否启动被动搜索模式
                        if scan_set.get("search_open", False):
                            s = searchmsg(dictdata)
                            s.verify()
                            s.saveresult()
                        data_parser = dictdata_parser(dictdata)
                        # perfile
                        if cmd_line_options.pocs_perfile:
                            if not is_filter or not data_parser.is_perfile_doned(
                            ):
                                logger.debug(
                                    data_parser.getperfile().capitalize() +
                                    " is_perfile_doned res:False")
                                red.lpush(
                                    "work_data_py",
                                    pickle.dumps({
                                        "data":
                                        data_parser.getperfile(),
                                        "dictdata":
                                        dictdata,
                                        "type":
                                        "perfile"
                                    }))
                            else:
                                logger.debug(
                                    data_parser.getperfile().capitalize() +
                                    " is_perfile_doned res:True")
                        # perfolder
                        if cmd_line_options.pocs_perfoler:
                            if not is_filter:
                                folders = data_parser.getperfolders()
                            else:
                                folders = data_parser.is_perfolder_doned()

                            if folders != []:
                                for folder in folders:
                                    red.lpush(
                                        "work_data_py",
                                        pickle.dumps({
                                            "data": folder,
                                            "dictdata": dictdata,
                                            "type": "perfolder"
                                        }))
                        # scheme
                        if cmd_line_options.pocs_perscheme:
                            if not is_filter or not data_parser.is_perscheme_doned(
                            ):
                                logger.debug(
                                    data_parser.getperfile().capitalize() +
                                    " is_perscheme_doned res:False")
                                red.lpush(
                                    "work_data_py",
                                    pickle.dumps({
                                        "dictdata":
                                        dictdata,  # 这里没有data字段,无关data字段了
                                        "type": "perscheme"
                                    }))
                            else:
                                logger.debug(
                                    data_parser.getperfile().capitalize() +
                                    " is_perscheme_doned res:True")

                    else:
                        logger.debug("Host block:" + host)
            else:
                time.sleep(random.uniform(1, 2))
    except KeyboardInterrupt as ex:
        logger.warning("Ctrl+C was pressed ,aborted program")
Exemplo n.º 24
0
 def __init__(self, host, port):
     self.red = getredis()
     self.host_port = "{}_{}".format(host, port)
     self.count_res_key = "count_res_{}".format(self.host_port)  # list
     self.block_key = "block"  # set