Exemple #1
0
def request(**kwargs_sour):
    kwargs = copy.deepcopy(kwargs_sour)
    red = getredis()

    # print("start:",kwargs)
    if not kwargs.get("verify", None):
        kwargs["verify"] = False
    if not kwargs.get("timeout", None):
        kwargs["timeout"] = 8
    if not kwargs.get("headers", None):
        kwargs["headers"] = {
            "User-Agent":
            "Mozilla/5.0 (Windows NT 10.0; WOW64; rv:38.0) Gecko/20100101 Firefox/38.0"
        }
    if cmd_line_options.proxy:
        kwargs["proxies"] = cmd_line_options.proxy
    if cmd_line_options.timeout:
        kwargs["timeout"] = cmd_line_options.timeout
    # print("end:",kwargs)
    if kwargs.get('data', None):
        if isinstance(kwargs.get("data"), str):
            kwargs["data"] = kwargs["data"].encode("utf-8", "ignore")
    r = None
    red.hincrby("count_all", "request", amount=1)
    h, p = gethostportfromurl(kwargs.get("url"))
    block = block_info(h, p)
    # retry
    for x in range(cmd_line_options.retry + 1):
        try:
            r = requests.request(**kwargs)
            block.push_result_status(0)
            break
        except requests.exceptions.ConnectTimeout:
            pass
            # logger.debug("request connect timeout :{}".format(kwargs["url"]))
        except requests.exceptions.ReadTimeout:
            pass
            # logger.debug("request read timeout :{}".format(kwargs["url"]))
        except Exception as ex:
            # print(kwargs)
            logger.debug("Request error url:{} error:{}".format(
                kwargs["url"], ex))
        block.push_result_status(1)
        sleep(uniform(0, 0.2))
    if r != None:
        if scan_set.get("search_open", False):
            s = searchmsg(r)
            s.verify()
            s.saveresult()
    else:
        red.hincrby("count_all", "request_fail", amount=1)
    return r
Exemple #2
0
def start():
    logger.info("Myscan Python Moudle Listen ...")
    red = getredis()
    try:
        while True:
            try:
                if cmd_line_options.command == "webscan":
                    data = red.rpop("burpdata")
                    if data:
                        red.hincrby("count_all", "doned", amount=1)
                        logger.debug("Get one data from burpdata")
                        dictdata = None
                        try:
                            dictdata = json.loads(data)
                        except Exception as ex:
                            logger.warning(
                                "Process burpdata to json get error:" +
                                str(ex))
                            continue
                        if dictdata is not None:

                            # 把dictdata分配一个id
                            id = get_random_str(10) + str(get_random_num(5))
                            toredisdatas = []
                            # 开启plugin
                            if cmd_line_options.allow_plugin:
                                for pluginhash, plugin_info in cmd_line_options.allow_plugin.items(
                                ):
                                    toredisdatas.append(
                                        ("plugin_data_py",
                                         pickle.dumps({
                                             "id":
                                             id,
                                             "pochash":
                                             pluginhash,
                                             "poc":
                                             plugin_info.get("poc")
                                         })))

                            is_filter = dictdata.get("filter")
                            host = dictdata.get("url").get("host")
                            port = dictdata.get("url").get("port")
                            block = block_info(host, port)
                            if allow_host(host) and not block.is_block():
                                # 是否启动被动搜索模式
                                if scan_set.get("search_open", False):
                                    s = searchmsg(dictdata)
                                    s.verify()
                                    # s.saveresult()

                                data_parser = dictdata_parser(dictdata)
                                # perfile
                                if cmd_line_options.pocs_perfile:
                                    if not is_filter or not data_parser.is_perfile_doned(
                                    ):
                                        logger.debug(
                                            data_parser.getperfile(
                                            ).capitalize() +
                                            " is_perfile_doned res:False")
                                        for poc in cmd_line_options.pocs_perfile:
                                            toredisdatas.append(
                                                ("work_data_py",
                                                 pickle.dumps({
                                                     "id":
                                                     id,
                                                     "data":
                                                     data_parser.getperfile(),
                                                     "poc":
                                                     poc,
                                                     "type":
                                                     "perfile"
                                                 })))
                                    else:
                                        logger.debug(
                                            data_parser.getperfile(
                                            ).capitalize() +
                                            " is_perfile_doned res:True")
                                # perfolder
                                if cmd_line_options.pocs_perfoler:
                                    if not is_filter:
                                        folders = data_parser.getperfolders()
                                    else:
                                        folders = data_parser.is_perfolder_doned(
                                        )

                                    if folders != []:
                                        for folder in folders:
                                            for poc in cmd_line_options.pocs_perfoler:
                                                # red.lpush("work_data_py", pickle.dumps({
                                                #     "data": folder,
                                                #     "dictdata": dictdata,
                                                #     "type": "perfolder"
                                                # }))
                                                toredisdatas.append(
                                                    ("work_data_py",
                                                     pickle.dumps({
                                                         "id":
                                                         id,
                                                         "data":
                                                         folder,
                                                         "poc":
                                                         poc,
                                                         "type":
                                                         "perfolder"
                                                     })))
                                # scheme
                                if cmd_line_options.pocs_perscheme:
                                    if not is_filter or not data_parser.is_perscheme_doned(
                                    ):
                                        logger.debug(
                                            data_parser.getperfile(
                                            ).capitalize() +
                                            " is_perscheme_doned res:False")
                                        for poc in cmd_line_options.pocs_perscheme:
                                            toredisdatas.append((
                                                "work_data_py",
                                                pickle.dumps({
                                                    "id":
                                                    id,
                                                    "data":
                                                    None,  # 这里没有data字段,无关data字段了
                                                    "poc":
                                                    poc,
                                                    "type":
                                                    "perscheme"
                                                })))
                                    else:
                                        logger.debug(
                                            data_parser.getperfile(
                                            ).capitalize() +
                                            " is_perscheme_doned res:True")

                            else:
                                logger.debug("Host block:" + host)
                            # 分发
                            if toredisdatas:
                                # 给id新建一个hash
                                red.hmset(id, {
                                    'data': data,
                                    'count': len(toredisdatas)
                                })
                                for key, pickledata in toredisdatas:
                                    if key == "plugin_data_py":
                                        red.lpush("plugin_data_py", pickledata)
                                    else:
                                        if scan_set.get("random_test", False):
                                            red.sadd("work_data_py_set",
                                                     pickledata)
                                        else:
                                            red.lpush("work_data_py",
                                                      pickledata)
                    else:
                        time.sleep(random.uniform(0, 1))
                elif cmd_line_options.command == "hostscan":
                    data = red.rpop("hostdata")
                    if data:
                        red.hincrby("count_all", "doned", amount=1)
                        logger.debug("Get one data from hostdata")
                        dictdata = None
                        try:
                            dictdata = json.loads(data)
                        except Exception as ex:
                            logger.warning(
                                "Process hostdata to json get error:" +
                                str(ex))
                            continue
                        if dictdata is not None:
                            # 开启plugin
                            if cmd_line_options.plugins:
                                plugin(dictdata)
                            if "all" in cmd_line_options.disable:
                                continue
                            is_filter = dictdata.get("filter")
                            host = dictdata.get("addr")
                            port = dictdata.get("port")
                            block = block_info(host, port)
                            id = get_random_str(10) + str(get_random_num(5))
                            if allow_host(host):
                                toredisdatas = []
                                if is_filter:
                                    if not block.is_block():
                                        block.block_it()
                                    else:
                                        continue
                                for poc in cmd_line_options.pocs_perserver:
                                    toredisdatas.append(
                                        pickle.dumps({
                                            "id": id,
                                            "data":
                                            None,  # 这里没有data字段,无关data字段了
                                            "poc": poc,
                                            "type": "perserver"
                                        }))
                                if toredisdatas:
                                    red.hmset(id, {
                                        'data': data,
                                        'count': len(toredisdatas)
                                    })
                                red.hmset(id, {
                                    'data': data,
                                    'count': len(toredisdatas)
                                })
                                for pickledata in toredisdatas:
                                    if scan_set.get("random_test", False):
                                        red.sadd("work_data_py_set",
                                                 pickledata)
                                    else:
                                        red.lpush("work_data_py", pickledata)
                    else:
                        time.sleep(random.uniform(1, 2))

            except Exception as ex:
                logger.debug("Run start get error:{}".format(ex))
                traceback.print_exc()
                continue
    except KeyboardInterrupt as ex:
        logger.warning("Ctrl+C was pressed ,aborted program")
Exemple #3
0
def start():
    logger.info("Myscan Python Moudle Listen ...")
    red = getredis()
    try:
        while True:
            data = red.lpop("burpdata")
            if data:
                red.hincrby("count_all", "doned", amount=1)
                logger.debug("Get one data from burpdata")
                dictdata = ""
                try:
                    dictdata = json.loads(data)
                except Exception as ex:
                    logger.warning("Process burpdata to json get error:" +
                                   str(ex))
                    continue
                if dictdata != "":
                    # 开启plugin
                    if cmd_line_options.plugins:
                        plugin(dictdata)
                    if "all" in cmd_line_options.disable:
                        continue
                    is_filter = dictdata.get("filter")
                    host = dictdata.get("url").get("host")
                    port = dictdata.get("url").get("port")
                    block = block_info(host, port)
                    if allow_host(host) and not block.is_block():
                        # 是否启动被动搜索模式
                        if scan_set.get("search_open", False):
                            s = searchmsg(dictdata)
                            s.verify()
                            s.saveresult()
                        data_parser = dictdata_parser(dictdata)
                        # perfile
                        if cmd_line_options.pocs_perfile:
                            if not is_filter or not data_parser.is_perfile_doned(
                            ):
                                logger.debug(
                                    data_parser.getperfile().capitalize() +
                                    " is_perfile_doned res:False")
                                red.lpush(
                                    "work_data_py",
                                    pickle.dumps({
                                        "data":
                                        data_parser.getperfile(),
                                        "dictdata":
                                        dictdata,
                                        "type":
                                        "perfile"
                                    }))
                            else:
                                logger.debug(
                                    data_parser.getperfile().capitalize() +
                                    " is_perfile_doned res:True")
                        # perfolder
                        if cmd_line_options.pocs_perfoler:
                            if not is_filter:
                                folders = data_parser.getperfolders()
                            else:
                                folders = data_parser.is_perfolder_doned()

                            if folders != []:
                                for folder in folders:
                                    red.lpush(
                                        "work_data_py",
                                        pickle.dumps({
                                            "data": folder,
                                            "dictdata": dictdata,
                                            "type": "perfolder"
                                        }))
                        # scheme
                        if cmd_line_options.pocs_perscheme:
                            if not is_filter or not data_parser.is_perscheme_doned(
                            ):
                                logger.debug(
                                    data_parser.getperfile().capitalize() +
                                    " is_perscheme_doned res:False")
                                red.lpush(
                                    "work_data_py",
                                    pickle.dumps({
                                        "dictdata":
                                        dictdata,  # 这里没有data字段,无关data字段了
                                        "type": "perscheme"
                                    }))
                            else:
                                logger.debug(
                                    data_parser.getperfile().capitalize() +
                                    " is_perscheme_doned res:True")

                    else:
                        logger.debug("Host block:" + host)
            else:
                time.sleep(random.uniform(1, 2))
    except KeyboardInterrupt as ex:
        logger.warning("Ctrl+C was pressed ,aborted program")