Ejemplo n.º 1
0
def send_mail(content):
    smtp_config = setting_col.find_one({'key': 'mail'})
    receivers = [data.get('mail') for data in notice_col.find({})]
    try:
        if mail_notice(smtp_config, receivers, content):
            logger.info('邮件发送成功')
        else:
            logger.critical('Error: 无法发送邮件')

    except smtplib.SMTPException as error:
        logger.critical('Error: 无法发送邮件 {}'.format(error))
Ejemplo n.º 2
0
    def run(self):
        """
        Run all of the registered functions.
        """

        self._run = True

        for app_name, f in self._init_funcs.iteritems():
            try:
                f()
            except:
                logger.critical(app_name + ' server side failed to initialize with the exception:')
                logger.critical(traceback.format_exc())
Ejemplo n.º 3
0
def send_mail(content):
    smtp_config = setting_col.find_one({'key': 'mail'})
    receivers = [data.get('mail') for data in notice_col.find({})]
    elementcmd = "kmg imu zengshuai \'[GitHub警告]<br />" + content + "\'"
    try:
        if mail_notice(smtp_config, receivers, content):
            logger.info('邮件发送成功')
        else:
            os.system("kmg imu zengshuai 'haweye email Error'")
            logger.critical('Error: 无法发送邮件')
    except smtplib.SMTPException as error:
        logger.critical('Error: 无法发送邮件 {}'.format(error))
    try:
        if os.system(elementcmd):
            logger.info('element 信息发送成功')
        else:
            os.system("kmg imu zengshuai 'haweye element Error'")
            loger.critical('Error: element信息发送失败')
Ejemplo n.º 4
0
def load_applications():
    """
    Load all of the applications server side code.
    This is done by importing all of the packages inside
    the applications directory.
    Note: code which is registered using init.register()
    isn't executed yet.
    """

    path = os.path.dirname(__file__)
    for module in os.listdir(path):
        if module.startswith('_') or not os.path.isdir(os.sep.join([path, module])):
            continue

        try:
            importlib.import_module('.' + module, 'parasite.applications')
        except:
            logger.critical(module + ' server side failed to initialize with the exception:')
            logger.critical(traceback.format_exc())
Ejemplo n.º 5
0
    def target_directory(self, target_mode):
        target_directory = None
        if target_mode == TARGET_MODE_FOLDER:
            target_directory = self.target
        elif target_mode == TARGET_MODE_FILE:
            target_directory = self.target
            return target_directory
        else:
            logger.critical(
                '[PARSE-ARGS] exception target mode ({mode})'.format(
                    mode=target_mode))
            exit()

        logger.debug('[PARSE-ARGS] target directory: {directory}'.format(
            directory=target_directory))
        target_directory = os.path.abspath(target_directory)
        if target_directory[-1] == '/':
            return target_directory
        else:
            return u'{t}/'.format(t=target_directory)
Ejemplo n.º 6
0
    def target_mode(self):
        """
        Parse target mode (git/file/folder/compress)
        :return: str
        """
        target_mode = None
        target_git_cases = ['http://', 'https://', 'ssh://']
        for tgc in target_git_cases:
            if self.target[0:len(tgc)] == tgc:
                target_mode = TARGET_MODE_GIT

        if os.path.isfile(self.target):
            target_mode = TARGET_MODE_FILE
        if os.path.isdir(self.target):
            target_mode = TARGET_MODE_FOLDER
        if target_mode is None:
            logger.critical('[PARSE-ARGS] [-t <target>] can\'t empty!')
            exit()
        logger.debug(
            '[PARSE-ARGS] Target Mode: {mode}'.format(mode=target_mode))
        return target_mode
Ejemplo n.º 7
0
    def target_directory(self, target_mode):
        target_directory = None
        if target_mode == TARGET_MODE_FOLDER:
            target_directory = self.target
        elif target_mode == TARGET_MODE_FILE:
            target_directory = self.target

            # 检查目标是否为zip
            if os.path.splitext(target_directory)[-1] == '.zip':
                try:
                    logger.info("[CLI] Target {} is zip, try to unzip.".format(
                        target_directory))
                    target_directory = un_zip(target_directory)

                except zipfile.BadZipFile:
                    logger.warning(
                        "[CLI] file {} not zip".format(target_directory))

                except OSError:
                    logger.warning(
                        "[CLI] file {} unzip error".format(target_directory))

            return target_directory
        else:
            logger.critical(
                '[PARSE-ARGS] exception target mode ({mode})'.format(
                    mode=target_mode))
            exit()

        logger.debug('[PARSE-ARGS] target directory: {directory}'.format(
            directory=target_directory))
        target_directory = os.path.abspath(target_directory)
        if target_directory[-1] == '/':
            return target_directory
        else:
            return u'{t}/'.format(t=target_directory)
Ejemplo n.º 8
0
    def set_trace(self, *args, **kwargs):
        """Start a pdb debugger available via telnet, and optionally email people the endpoint

        The endpoint will always be seen in the py.test runner output.

        Keyword Args:
            recipients: A list where, if set, an email will be sent to email addresses
                in this list.
            subject: If set, an optional custom email subject

        """
        host, port = self.sock.getsockname()
        endpoint = 'host {} port {}'.format(store.my_ip_address, port)

        recipients = kwargs.pop('recipients', None)
        if recipients:
            # write and send an email
            subject = kwargs.pop('subject', 'RDB Breakpoint: Manually invoked')
            body = dedent("""\
            A py.test run encountered an error. The remote debugger is running
            on {} (TCP), waiting for telnet connection.
            """).format(endpoint)

            try:
                smtp_server = smtp_conf['server']
                smtp = smtplib.SMTP(smtp_server)
                msg = MIMEText(body)
                msg['Subject'] = subject
                msg['To'] = ', '.join(recipients)
                smtp.sendmail('*****@*****.**', recipients,
                              msg.as_string())
            except socket.error:
                logger.critical("Couldn't send email")

        msg = 'Remote debugger listening on {}'.format(endpoint)
        logger.critical(msg)
        write_line(msg, red=True, bold=True)
        self.sock.listen(1)
        (client_socket, address) = self.sock.accept()
        client_fh = client_socket.makefile('rw')
        Pdb.__init__(self,
                     completekey='tab',
                     stdin=client_fh,
                     stdout=client_fh)
        sys.stdout = sys.stdin = client_fh
        Pdb.set_trace(self, *args, **kwargs)
        msg = 'Debugger on {} shut down'.format(endpoint)
        logger.critical(msg)
        write_line(msg, green=True, bold=True)
Ejemplo n.º 9
0
    def set_trace(self, *args, **kwargs):
        """Start a pdb debugger available via telnet, and optionally email people the endpoint

        The endpoint will always be seen in the py.test runner output.

        Keyword Args:
            recipients: A list where, if set, an email will be sent to email addresses
                in this list.
            subject: If set, an optional custom email subject

        """
        host, port = self.sock.getsockname()
        endpoint = 'host {} port {}'.format(store.my_ip_address, port)

        recipients = kwargs.pop('recipients', None)
        if recipients:
            # write and send an email
            subject = kwargs.pop('subject', 'RDB Breakpoint: Manually invoked')
            body = dedent("""\
            A py.test run encountered an error. The remote debugger is running
            on {} (TCP), waiting for telnet connection.
            """).format(endpoint)

            try:
                smtp_server = smtp_conf['server']
                smtp = smtplib.SMTP(smtp_server)
                msg = MIMEText(body)
                msg['Subject'] = subject
                msg['To'] = ', '.join(recipients)
                smtp.sendmail('*****@*****.**', recipients, msg.as_string())
            except socket.error:
                logger.critical("Couldn't send email")

        msg = 'Remote debugger listening on {}'.format(endpoint)
        logger.critical(msg)
        write_line(msg, red=True, bold=True)
        self.sock.listen(1)
        (client_socket, address) = self.sock.accept()
        client_fh = client_socket.makefile('rw')
        Pdb.__init__(self, completekey='tab', stdin=client_fh, stdout=client_fh)
        sys.stdout = sys.stdin = client_fh
        Pdb.set_trace(self, *args, **kwargs)
        msg = 'Debugger on {} shut down'.format(endpoint)
        logger.critical(msg)
        write_line(msg, green=True, bold=True)
Ejemplo n.º 10
0
def search(query, page, g, github_username):
    mail_notice_list = []
    webhook_notice_list = []
    logger.info('开始抓取: tag is {} keyword is {}, page is {}'.format(
        query.get('tag'), query.get('keyword'), page + 1))
    try:
        repos = g.search_code(query=query.get('keyword'),
                              sort="indexed",
                              order="desc")
        github_col.update_one({'username': github_username}, {
            '$set': {
                'rate_remaining': int(g.get_rate_limit().search.remaining)
            }
        })

    except Exception as error:
        logger.critical(error)
        logger.critical("触发限制啦")
        return
    try:
        for repo in repos.get_page(page):
            setting_col.update_one({'key': 'task'}, {
                '$set': {
                    'key': 'task',
                    'pid': os.getpid(),
                    'last': timestamp()
                }
            },
                                   upsert=True)
            if not result_col.count({'_id': repo.sha}):
                try:
                    code = str(repo.content).replace('\n', '')
                except:
                    code = ''
                leakage = {
                    'link': repo.html_url,
                    'project': repo.repository.full_name,
                    'project_url': repo.repository.html_url,
                    '_id': repo.sha,
                    'language': repo.repository.language,
                    'username': repo.repository.owner.login,
                    'avatar_url': repo.repository.owner.avatar_url,
                    'filepath': repo.path,
                    'filename': repo.name,
                    'security': 0,
                    'ignore': 0,
                    'tag': query.get('tag'),
                    'code': code,
                }
                try:
                    leakage['affect'] = get_affect_assets(repo.decoded_content)
                except Exception as error:
                    logger.critical('{} {}'.format(error, leakage.get('link')))
                    leakage['affect'] = []
                if int(repo.raw_headers.get('x-ratelimit-remaining')) == 0:
                    logger.critical('剩余使用次数: {}'.format(
                        repo.raw_headers.get('x-ratelimit-remaining')))
                    return
                last_modified = datetime.datetime.strptime(
                    repo.last_modified, '%a, %d %b %Y %H:%M:%S %Z')
                leakage['datetime'] = last_modified
                leakage['timestamp'] = last_modified.timestamp()
                in_blacklist = False
                for blacklist in blacklist_col.find({}):
                    if blacklist.get('text').lower() in leakage.get(
                            'link').lower():
                        logger.warning('{} 包含白名单中的 {}'.format(
                            leakage.get('link'), blacklist.get('text')))
                        in_blacklist = True
                if in_blacklist:
                    continue
                if result_col.count({
                        "project": leakage.get('project'),
                        "ignore": 1
                }):
                    continue
                if not result_col.count({
                        "project": leakage.get('project'),
                        "filepath": leakage.get("filepath"),
                        "security": 0
                }):
                    mail_notice_list.append(
                        '上传时间:{} 地址: <a href={}>{}/{}</a>'.format(
                            leakage.get('datetime'), leakage.get('link'),
                            leakage.get('project'), leakage.get('filename')))
                    webhook_notice_list.append('[{}/{}]({}) 上传于 {}'.format(
                        leakage.get('project').split('.')[-1],
                        leakage.get('filename'), leakage.get('link'),
                        leakage.get('datetime')))
                try:
                    result_col.insert_one(leakage)
                    logger.info(leakage.get('project'))
                except errors.DuplicateKeyError:
                    logger.info('已存在')

                logger.info('抓取关键字:{} {}'.format(query.get('tag'),
                                                 leakage.get('link')))
    except Exception as error:
        if 'Not Found' not in error.data:
            g, github_username = new_github()
            search.schedule(args=(query, page, g, github_username),
                            delay=huey.pending_count() +
                            huey.scheduled_count())
        logger.critical(error)
        logger.error('抓取: tag is {} keyword is {}, page is {} 失败'.format(
            query.get('tag'), query.get('keyword'), page + 1))

        return
    logger.info('抓取: tag is {} keyword is {}, page is {} 成功'.format(
        query.get('tag'), query.get('keyword'), page + 1))
    query_col.update_one({'tag': query.get('tag')}, {
        '$set': {
            'last': int(time.time()),
            'status': 1,
            'reason': '抓取第{}页成功'.format(page),
            'api_total': repos.totalCount,
            'found_total': result_col.count({'tag': query.get('tag')})
        }
    })
    if setting_col.count({
            'key': 'mail',
            'enabled': True
    }) and len(mail_notice_list):
        main_content = '<h2>规则名称: {}</h2><br>{}'.format(
            query.get('tag'), '<br>'.join(mail_notice_list))
        send_mail(main_content)
    logger.info(len(webhook_notice_list))
    webhook_notice(query.get('tag'), webhook_notice_list)
Ejemplo n.º 11
0
    def __init__(self,
                 target,
                 formatter,
                 output,
                 special_rules=None,
                 language=None,
                 black_path=None,
                 a_sid=None):
        self.target = target
        self.formatter = formatter
        self.output = output if output else ""

        if special_rules != None and special_rules != '':
            self.special_rules = []
            extension = '.py'
            start_name = 'CVI_'

            if ',' in special_rules:
                # check rule name
                s_rules = special_rules.split(',')
                for sr in s_rules:
                    if extension not in sr:
                        sr += extension
                    if start_name not in sr:
                        sr = start_name + sr

                    if self._check_rule_name(sr):
                        self.special_rules.append(sr)
                    else:
                        logger.critical(
                            '[PARSE-ARGS] Rule {sr} not exist'.format(sr=sr))
            else:
                special_rules = start_name + special_rules + extension

                if self._check_rule_name(special_rules):
                    self.special_rules = [special_rules]
                else:
                    logger.critical(
                        '[PARSE-ARGS] Exception special rule name(e.g: CVI-110001): {sr}'
                        .format(sr=special_rules))
        else:
            self.special_rules = None

        # check black pth list
        if black_path != None and black_path != "":
            self.black_path_list = []

            if ',' in black_path:
                self.black_path_list = [
                    x.strip() for x in black_path.split(',') if x != ""
                ]
                logger.info("[INIT][PARSE_ARGS] Black Path list is {}".format(
                    self.black_path_list))
            else:
                self.black_path_list = []
                logger.warning("[INIT][PARSE_ARGS] Black Path parse error.")

        else:
            self.black_path_list = []

        # check and deal language
        if language != None and language != "":
            self.language = []

            if ',' in language:
                self.language = [
                    x.strip() for x in language.split(',') if x != ""
                ]
                logger.info("[INIT][PARSE_ARGS] Language is {}".format(
                    self.language))
            else:
                self.language = [language.strip()]
                logger.info("[INIT][PARSE_ARGS] Only one Language {}.".format(
                    self.language))

        self.sid = a_sid
Ejemplo n.º 12
0
def scan(target_directory, a_sid=None, s_sid=None, special_rules=None, language=None, framework=None, file_count=0,
         extension_count=0, files=None, tamper_name=None, is_unconfirm=False):
    r = Rule(language)
    vulnerabilities = r.vulnerabilities
    rules = r.rules(special_rules)
    find_vulnerabilities = []
    newcore_function_list = {}

    def store(result):
        if result is not None and isinstance(result, list) is True:
            for res in result:
                res.file_path = res.file_path
                find_vulnerabilities.append(res)
        else:
            logger.debug('[SCAN] [STORE] Not found vulnerabilities on this rule!')

    async def start_scan(target_directory, rule, files, language, tamper_name):

        result = scan_single(target_directory, rule, files, language, tamper_name, is_unconfirm, newcore_function_list)
        store(result)

    if len(rules) == 0:
        logger.critical('no rules!')
        return False
    logger.info('[PUSH] {rc} Rules'.format(rc=len(rules)))
    push_rules = []
    scan_list = []

    for idx, single_rule in enumerate(sorted(rules.keys())):

        # init rule class
        r = getattr(rules[single_rule], single_rule)
        rule = r()

        if rule.status is False and len(rules) != 1:
            logger.info('[CVI_{cvi}] [STATUS] OFF, CONTINUE...'.format(cvi=rule.svid))
            continue
        # SR(Single Rule)
        logger.debug("""[PUSH] [CVI_{cvi}] {idx}.{vulnerability}({language})""".format(
            cvi=rule.svid,
            idx=idx,
            vulnerability=rule.vulnerability,
            language=rule.language
        ))
        # result = scan_single(target_directory, rule, files, language, tamper_name)
        scan_list.append(start_scan(target_directory, rule, files, language, tamper_name))
        # store(result)

    loop = asyncio.get_event_loop()
    loop.run_until_complete(asyncio.gather(*scan_list))

    loop.stop()

    # print
    data = []
    data2 = []
    table = PrettyTable(
        ['#', 'CVI', 'Rule(ID/Name)', 'Lang/CVE-id', 'Target-File:Line-Number',
         'Commit(Author)', 'Source Code Content', 'Analysis'])

    table.align = 'l'
    trigger_rules = []
    for idx, x in enumerate(find_vulnerabilities):
        trigger = '{fp}:{ln}'.format(fp=x.file_path, ln=x.line_number)
        commit = u'@{author}'.format(author=x.commit_author)
        try:
            code_content = x.code_content[:50].strip()
        except AttributeError as e:
            code_content = x.code_content.decode('utf-8')[:100].strip()
        row = [idx + 1, x.id, x.rule_name, x.language, trigger, commit,
               code_content.replace('\r\n', ' ').replace('\n', ' '), x.analysis]
        row2 = [idx + 1, x.chain]

        is_unconfirm_result = False
        if "unconfirmed" in x.analysis.lower():
            is_unconfirm_result = True

        # save to database
        sr = ScanResultTask(scan_task_id=a_sid, result_id=idx + 1, cvi_id=x.id, language=x.language,
                            vulfile_path=trigger, source_code=code_content.replace('\r\n', ' ').replace('\n', ' '),
                            result_type=x.analysis, is_unconfirm=is_unconfirm_result)

        sr.save()

        for chain in x.chain:
            if type(chain) == tuple:
                ResultFlow = get_resultflow_class(int(a_sid))
                rf = ResultFlow(vul_id=idx + 1, node_type=chain[0], node_content=chain[1],
                                node_path=chain[2], node_lineno=chain[3])
                rf.save()

        data.append(row)
        data2.append(row2)

        table.add_row(row)

        if x.id not in trigger_rules:
            logger.debug(' > trigger rule (CVI-{cvi})'.format(cvi=x.id))
            trigger_rules.append(x.id)

        # clear
        x.chain = ""

    diff_rules = list(set(push_rules) - set(trigger_rules))
    vn = len(find_vulnerabilities)
    if vn == 0:
        logger.info('[SCAN] Not found vulnerability!')
    else:
        logger.info("[SCAN] Trigger Rules: {tr} Vulnerabilities ({vn})\r\n{table}".format(tr=len(trigger_rules),
                                                                                          vn=len(find_vulnerabilities),
                                                                                          table=table))

        # 输出chain for all
        logger.info("[SCAN] Vulnerabilities Chain list: ")
        for d in data2:
            logger.info("[SCAN] Vul {}".format(d[0]))
            for c in d[1]:
                logger.info("[Chain] {}".format(c))
                if type(c) is not tuple:
                    continue
                show_context(c[2], c[3])

            logger.info("[SCAN] ending\r\n" + '-' * (shutil.get_terminal_size().columns - 16))

        if len(diff_rules) > 0:
            logger.info(
                '[SCAN] Not Trigger Rules ({l}): {r}'.format(l=len(diff_rules), r=','.join(diff_rules)))

    # show detail about newcore function list
    table2 = PrettyTable(
        ['#', 'NewFunction', 'OriginFunction', 'Related Rules id'])

    table2.align = 'l'
    idy = 0
    for new_function_name in newcore_function_list:
        # add new evil func in database
        for svid in newcore_function_list[new_function_name]["svid"]:
            if new_function_name:

                nf = NewEvilFunc(svid=svid, scan_task_id=get_scan_id(), func_name=new_function_name,
                                 origin_func_name=newcore_function_list[new_function_name]["origin_func_name"])
                nf.save()

        table2.add_row([idy + 1, new_function_name, newcore_function_list[new_function_name]["origin_func_name"], newcore_function_list[new_function_name]["svid"]])
        idy += 1

    if len(newcore_function_list) > 0:
        logger.info("[SCAN] New evil Function list by NewCore:\r\n{}".format(table2))

    # completed running data
    if s_sid is not None:
        Running(s_sid).data({
            'code': 1001,
            'msg': 'scan finished',
            'result': {
                'vulnerabilities': [x.__dict__ for x in find_vulnerabilities],
                'language': ",".join(language),
                'framework': framework,
                'extension': extension_count,
                'file': file_count,
                'push_rules': len(rules),
                'trigger_rules': len(trigger_rules),
                'target_directory': target_directory
            }
        })
    return True
Ejemplo n.º 13
0
def crawl(query, page):
    mail_notice_list = []
    webhook_notice_list = []
    search_url = 'https://search.gitee.com/?skin=rec&type=code&q={1}&sort=last_indexed' \
                 '&pageno={0}'
    session = gitee_login()

    logger.info('Gitee开始抓取: tag is {} keyword is {}, page is {}'.format(
        query.get('tag'), query.get('keyword'), page + 1))
    totalCount = 0
    for page in range(page + 1, page + 2):
        try:
            logger.info("Gitee ------ 启动抓取: {}".format(
                search_url.format(page, query.get('keyword'))))
            resp = session.get(search_url.format(page, query.get('keyword')))
            logger.info("Gitee 启动抓取: {}".format(
                search_url.format(page, query.get('keyword'))))
            tree = etree.HTML(resp.text)
            nodes = tree.xpath('//*[@id="hits-list"]/div[@class="item"]')
            for node in nodes:
                logger.info("Gitee 开始抓取节点")
                totalCount += 1
                # i = nodes.index(node) + 1
                leakage = {}
                leakage['affect'] = []
                datetime_ = node.xpath(Gitee.DATETIME)[0].text
                # print(datetime)
                datetime_match = re.match("[^\d]*(?P<Date>\d+.*)", datetime_)
                if not datetime_match:
                    leakage['datetime'] = _format_time(
                        datetime.datetime.now().date())
                else:
                    leakage['datetime'] = _format_time(
                        datetime_match.groups("Date")[0])
                leakage['timestamp'] = leakage.get('datetime').timestamp()
                leakage['link'] = cut_tail(
                    node.xpath(Gitee.LINK)[0].attrib['href'])
                leakage['filepath'] = node.xpath(Gitee.LINK)[0].text
                leakage['filename'] = leakage.get("filepath").split("/")[-1]
                # leakage['link'] = 'https://gitee.com' + realative_link
                leakage['_id'] = _md5(leakage['link'])
                logger.info("Gitee ****** 开始抓取节点 {}".format(
                    leakage['datetime']))
                project_username = node.xpath(Gitee.USERNAME)[0].text
                leakage["vendor"] = "GITEE"
                leakage['username'] = project_username.split("/")[0]
                leakage['project'] = project_username
                leakage['project_url'] = cut_tail(
                    node.xpath(Gitee.USERNAME)[0].attrib['href'])
                logger.info("Gitee 抓取到 {}".format(leakage.get("project_url")))

                if result_col.find_one({"link": leakage['link'], "datetime": leakage['datetime']}) or \
                        result_col.find_one({'_id': leakage['_id']}):
                    continue

                in_blacklist = False
                for blacklist in blacklist_col.find({}):
                    if blacklist.get('text').lower() in leakage.get(
                            'link').lower():
                        logger.warning('{} 包含白名单中的 {}'.format(
                            leakage.get('link'), blacklist.get('text')))
                        in_blacklist = True
                if in_blacklist:
                    continue

                if result_col.count({
                        "project": leakage.get('project'),
                        "ignore": 1
                }):
                    continue

                #gitee中可以只有项目,没有代码
                leakage['avatar_url'] = 'https://gitee.com/logo-black.svg'
                raw_code = gitee_raw_code(leakage['link'])
                leakage['code'] = base64.b64encode(
                    raw_code.encode("utf-8")).decode("utf-8")
                try:
                    leakage['affect'] = get_affect_assets(raw_code)
                except Exception as error:
                    logger.critical('{} {}'.format(error, leakage.get('link')))
                    leakage['affect'] = []
                leakage['tag'] = query['tag']
                # leakage['detail'] = etree.tostring(node,encoding='unicode').replace('{{', '<<').\
                # replace('}}', '>>')
                language_node = node.xpath(Gitee.LANGUAGE)
                if language_node:
                    leakage['language'] = language_node[0].text.strip()
                else:
                    leakage['language'] = 'Unknow'
                leakage['security'] = 0
                leakage['ignore'] = 0

                if not result_col.count({
                        "project": leakage.get('project'),
                        "filepath": leakage.get("filepath"),
                        "security": 0
                }):
                    mail_notice_list.append(
                        '上传时间:{} 地址: <a href={}>{}/{}</a>'.format(
                            leakage.get('datetime'), leakage.get('link'),
                            leakage.get('project'), leakage.get('filename')))
                    webhook_notice_list.append('[{}/{}]({}) 上传于 {}'.format(
                        leakage.get('project').split('/')[-1],
                        leakage.get('filename'), leakage.get('link'),
                        leakage.get('datetime')))

                result_col.insert_one(leakage)
                logger.info("Gitee 抓取到的结果: {}".format(
                    leakage.get("project_url")))
        except Exception as e:
            raise (e)
            print(e)
            logger.error("Gitee error is {}".format(e))
            return
        logger.info('Gitee抓取: tag is {} keyword is {}, page is {} 成功'.format(
            query.get('tag'), query.get('keyword'), page + 1))
        query_col.update_one({'tag': query.get('tag')}, {
            '$set': {
                'last': int(time.time()),
                'status': 1,
                'reason': '抓取第{}页成功'.format(page),
                'api_total': totalCount,
                'found_total': result_col.count({'tag': query.get('tag')})
            }
        })
        if setting_col.count({
                'key': 'mail',
                'enabled': True
        }) and len(mail_notice_list):
            main_content = '<h2>规则名称: {}</h2><br>{}'.format(
                query.get('tag'), '<br>'.join(mail_notice_list))
            send_mail(main_content)
        logger.info(len(webhook_notice_list))
        webhook_notice(query.get('tag'), webhook_notice_list)