Exemple #1
0
def inital_all_logs():
    from xsqlmb.src.ltool.sqlconn import sql_action
    sql_action("delete from waf_alert_log;")
    sql_action("delete from waf_access_log;")
    sql_action("delete from alertlog_detail;")

    from xsqlmb.src.ltool.mongo import MongoConn
    #MongoConn().db["waf_access_log"].delete()
    #MongoConn().db["waf_alert_log"].delete()
    MongoConn().db["script_log"].remove()
    MongoConn().db["alertlog_detail"].remove()
Exemple #2
0
def get_all_info_dependon_auditid(audit_logid):
    from xsqlmb.src.ltool.mongo import MongoConn
    _data = list(MongoConn().db[WAF_ALERT_LOG_DETAILED_SQL_TABLE].find(
        {"audit_logid": audit_logid}))
    if len(_data) > 0:
        return _data[0]["detaild"]
    return None
Exemple #3
0
    def get_auditlogs(self):
        lines = []

        _datas = MongoConn().query_timestamp_datas(table_name=self.table_name)
        if not _datas:
            return []
        for data in _datas:
            lines.append(data["message"] + "\n")

        res = []
        partern = "---(.*?)---(.*?)--.*"
        middle_content = ""
        temp_auditlog_id, temp_auditlog_signal, temp_auditlog_startline, auditlog_endline = "", "", "", ""
        for line_index in range(len(lines)):
            data = re.match(partern, lines[line_index])
            if data:

                if (middle_content in ["", "\n"]):
                    # 第一次进来了但是已经收集了中间的数据
                    temp_auditlog_id = data.group(1)
                    temp_auditlog_signal = data.group(2)
                    temp_auditlog_startline = line_index
                else:
                    # 第二次进来了但是已经收集了中间的数据
                    temp_auditlog_endline = line_index - 1
                    res.append(
                        dict(
                            audit_logid=temp_auditlog_id,
                            auditlog_signal=temp_auditlog_signal,
                            auditlog_startline=temp_auditlog_startline,
                            auditlog_endline=temp_auditlog_endline,
                            auditlog_content=middle_content,
                        ))
                    # 收集完了就把这个中间内容集合置为空
                    middle_content = ""
                    data = re.match(partern, lines[line_index])
                    temp_auditlog_id = data.group(1)
                    temp_auditlog_signal = data.group(2)
                    temp_auditlog_startline = line_index
                continue

            middle_content += lines[line_index]
        # 切记这里异常捕获的结果中如果 audit_logid 不满足我们的条件应该删除。
        if temp_auditlog_id != "":
            res.append(
                dict(
                    audit_logid=temp_auditlog_id,
                    auditlog_signal=temp_auditlog_signal,
                    auditlog_startline=temp_auditlog_startline,
                    auditlog_endline=temp_auditlog_startline,
                    auditlog_content="",
                ))
        return res
Exemple #4
0
def test_save():
    inital_all_logs()

    from xsqlmb.src.ltool.mongo import MongoConn
    _datas = MongoConn().db["waf_alert_log"].find()

    from xsqlmb.api.logstash.scripts.extract_log_f_mongo import ExtractLogFromMongo
    _data = ExtractLogFromMongo(table_name="waf_alert_log").modseclog_to_detaild()

    print(_data)
    with open("test.txt", "w+", encoding="utf-8") as f:
        for x in _datas:
            f.write(x["message"] + "\n")
        f.close()
    def modseclog_to_sql(self):
        # from datetime import datetime
        # default_time = datetime(1995, 8, 14)
        nad_datas = self.get_latest_modseclog()

        from django.core.paginator import Paginator
        p = Paginator(nad_datas, self.MAX_INSERT_NUM)  # 分页列别
        page_count = p.num_pages  # 总页数
        seccess_insert_num = 0

        from xsqlmb.api.logstash.utils.get_table_columns import get_waf_alert_log_columns
        cols = get_waf_alert_log_columns()

        _columns = "`" + "`, `".join(cols) + "`"
        _keys = cols

        for x in [x + 1 for x in range(page_count)]:
            nad_list = list(p.page(x).object_list)
            try:
                _insert_num = MutiTypesInsets2SqlClass(
                    table_name=WAF_ALERT_LOG_SQL_TABLE).arrays2sql2(
                        nad_list, columns_order=_columns, keys_list=_keys)
                seccess_insert_num += _insert_num

            finally:
                import json

                # _detailed_list = [ [x["audit_logid"], json.dumps(x)] for x in nad_list ]
                # MutiTypesInsets2SqlClass(table_name=WAF_ALERT_LOG_DETAILED_SQL_TABLE).arrays2sql(
                #     _detailed_list, columns_order="`audit_logid`,`detaild`"
                # )

                _detailed_list = [
                    dict(audit_logid=x["audit_logid"], detaild=json.dumps(x))
                    for x in nad_list
                ]

                from xsqlmb.src.ltool.mongo import MongoConn
                from xsqlmb.api.logstash.cfgs.configs import WAF_ALERT_LOG_DETAILED_SQL_TABLE
                MongoConn().insert_data(WAF_ALERT_LOG_DETAILED_SQL_TABLE,
                                        _detailed_list)

        logging.info("插入【" + str(seccess_insert_num) + "】条新数据到告警日志SQL数据库成功")
Exemple #6
0
    def get_access_logs(self):
        lines = []
        _datas = MongoConn().query_timestamp_datas(table_name=self.table_name)
        if not _datas:
            return [], 0
        for data in _datas:
            lines.append(data["message"])

        # 先去掉`syslog`的发送日志头标识, 接下来才是常规的流程。
        res = []
        for _line in lines:
            import json
            alog = json.loads(_line.replace('\\', "\\\\"), encoding="utf-8")

            ua_dict = get_ua_and_os_from_User_Agent(alog['http_user_agent'])
            _temp = dict(alog, **ua_dict)

            res.append(_temp)

        return res, len(_datas)
Exemple #7
0
def test_show(table_name='waf_access_log'):
    from xsqlmb.src.ltool.mongo import MongoConn
    MongoConn().show_data(table_name)