def accesslog_to_sql(self, local=False): nad_datas = [] _accesslog_datas = self.get_latest_accsslog() for x in _accesslog_datas: obj = x.copy() try: obj["time_local"] = get_pydt_based_logdt( re.match("(.*?)\s(.*)", obj["time_local"]).group(1)) obj["timestamp"] = obj["time_local"] try: obj["request_id"] = obj["request_id"] except: obj["request_id"] = uuid4() obj["upstream_response_time"] = "0.01" if obj[ "upstream_response_time"] == "-" else "0.0" obj["request_time"] = "0.01" if obj[ "upstream_response_time"] == "-" else "0.0" except: logging.error("Error:存在AccessLog日志不一样的正则 " + obj["time_local"]) continue nad_datas.append(obj) seccess_insert_num = self.many_insert2_accesslog(nad_datas) logging.info("插入【" + str(seccess_insert_num) + "】条新数据到访问日志SQL数据库成功")
def modseclog_to_sql(self): # from datetime import datetime # default_time = datetime(1995, 8, 14) nad_datas = self.get_latest_modseclog() from django.core.paginator import Paginator p = Paginator(nad_datas, self.MAX_INSERT_NUM) # 分页列别 page_count = p.num_pages # 总页数 seccess_insert_num = 0 from xsqlmb.api.logstash.utils.get_table_columns import get_waf_alert_log_columns cols = get_waf_alert_log_columns() _columns = "`" + "`, `".join(cols) + "`" _keys = cols for x in [x + 1 for x in range(page_count)]: nad_list = list(p.page(x).object_list) try: _insert_num = MutiTypesInsets2SqlClass(table_name=WAF_ALERT_LOG_SQL_TABLE).arrays2sql2( nad_list, columns_order=_columns, keys_list=_keys) seccess_insert_num += len(_insert_num) except: logging.error("告警日志格式化存在键值对异常或者重复插入。") finally: import json _detailed_list = [ [x["audit_logid"], json.dumps(x)] for x in nad_list ] MutiTypesInsets2SqlClass(table_name=WAF_ALERT_LOG_DETAILED_SQL_TABLE).arrays2sql( _detailed_list, columns_order="`audit_logid`,`detaild`" ) logging.info("插入【" + str(seccess_insert_num) + "】条新数据到告警日志SQL数据库成功")
def convert_auditlog_detaild(audit_logid, audit_logid_datas=None): """ 根据日志产生的 audit_logid 获取到所有的日志信息 :param audit_logid: :param audit_logid_datas: :return: """ temp = {"audit_logid": audit_logid} try: alog, blog, flog, hlog = False, False, False, False for x in audit_logid_datas: # exec("""temp = dict(temp, **modsec_"""+ x["auditlog_signal"] +"""log_extract(x["auditlog_content"]))""") if x["auditlog_signal"] == "A": temp = dict(temp, **modsec_Alog_extract(re_upgrade_str(x["auditlog_content"]))) alog = True if x["auditlog_signal"] == "B": temp = dict(temp, **modsec_Blog_extract(re_upgrade_str(x["auditlog_content"]))) blog = True if x["auditlog_signal"] == "F": temp = dict(temp, **modsec_Flog_extract(re_upgrade_str(x["auditlog_content"]))) flog = True if x["auditlog_signal"] == "H": temp = dict(temp, **modsec_Hlog_extract(re_upgrade_str(x["auditlog_content"]))) hlog = True if not hlog: temp = dict(temp, **dict(msg="Sensitive Url Payload Alert.", category="异常捕获")) if alog and blog and flog: return temp except: from xsqlmb.api.logstash.scripts.txt.get_common_logs import logging logging.error("extract告警日志错误!" + str(audit_logid)) return None
def sql_action(sql): connection = pymysql.connect(**MPP_CONFIG) corsor = connection.cursor() try: corsor.execute(sql) connection.commit() # print(sql) except : try: from xsqlmb.src.cfgs.logConfig import logging except: import logging logging.error({"query_sql": sql, "stat": 0}) finally: corsor.close() connection.close() return True
def from_sql_get_data(sql): # Connect to the database connection = pymysql.connect(**MPP_CONFIG) corsor = connection.cursor() corsor.execute(sql) try: res = corsor.fetchall() try: data = {"data": res, "heads": [x[0] for x in corsor.description]} except: data = None try: from xsqlmb.src.cfgs.logConfig import logging except: import logging logging.error({"query_sql": sql, "stat": 0}) finally: ## connection.commit() corsor.close() connection.close() return data