def seclog_condition_search(request): data = request.data instance = dict( src_ip=data["src_ip"] if "src_ip" in data.keys() else None, category=data["category"] if "category" in data.keys() else None, split_type=data["split_type"] if "split_type" in data.keys() else 'date', start_time=data["start_time"] if "start_time" in data.keys() else None, end_time=data["end_time"] if "end_time" in data.keys() else None, # jl_param=data["jl_param"] if "jl_param" in data.keys() else 'audit_date', ## 进行聚类的目标 limit=int(data["limit"]) if "limit" in data.keys() else None, audit_date_value=data["audit_date_value"] if "audit_date_value" in data.keys() else None, ## 聚类后的日期查看器 ) from wafmanage.utils.db_utils import from_sql_get_data _objs = from_sql_get_data(seclog_search_condition(**instance))["data"] # return Response(_objs) ## 原来的版本就是直接获取的 _objs pager = int(data["page"]) if "page" in data.keys() else 1 p = Paginator(_objs, 10) all_counts = p.count # 对象总数 page_count = p.num_pages # 总页数 pj = p.page(pager) objs = pj.object_list return Response({ "search_params": data, "res": objs, "page_count": page_count, "pager": pager, "all_counts": all_counts })
def ip_attack_catecount_splitby_datetype(request): data = request.data instance = dict( src_ip=data["src_ip"] if "src_ip" in data.keys() else None, category=data["category"] if "category" in data.keys() else None, split_type=data["split_type"] if "split_type" in data.keys() else 'date', start_time=data["start_time"] if "start_time" in data.keys() else None, end_time=data["end_time"] if "end_time" in data.keys() else None, ) from wafmanage.utils.db_utils import from_sql_get_data _objs = from_sql_get_data(seclog_search3(**instance))["data"] # return Response(_objs) ## 原来的版本就是直接获取的 _objs pager = int(data["page"]) if "page" in data.keys() else 1 p = Paginator(_objs, 10) all_counts = p.count # 对象总数 page_count = p.num_pages # 总页数 pj = p.page(pager) objs = pj.object_list return Response({ "search_params": data, "res": objs, "page_count": page_count, "pager": pager, "all_counts": all_counts })
def jla_search1(request): data = request.data instance = dict( type=data["type"] if "type" in data.keys() else "remote_addr", accesslog_table=LocalAccessLogTable, daysdelta=int(data["daysdelta"]) if "daysdelta" in data.keys() else 90, limit=int(data["limit"]) if "limit" in data.keys() else 100, start_time=data["start_time"] if "start_time" in data.keys() else None, end_time=data["end_time"] if "end_time" in data.keys() else None, ) query_sql, _types = get_jl_accsslog(**instance) if instance["type"] not in _types: return Response({"stat": False}, {"reason": "输入类型错误"}) # print(query_sql) from wafmanage.utils.db_utils import from_sql_get_data _objs = from_sql_get_data(query_sql)["data"] pager = int(data["page"]) if "page" in data.keys() else 1 p = Paginator(_objs, 10) all_counts = p.count # 对象总数 page_count = p.num_pages # 总页数 pj = p.page(pager) objs = pj.object_list return Response({ "search_params": data, "res": objs, "page_count": page_count, "pager": pager, "all_counts": all_counts })
def get_data_from_sqls(key, limit=15): from wafmanage.utils.db_utils import from_sql_get_data return [ x[key] for x in from_sql_get_data( """select {key}, count({key}) as c from {table} group by {key} order by c desc limit {limit};""" .format(table=LocalAccessLogTable, limit=limit, key=key))["data"] ]
def accsslog_search(request): # print("========================================") data = request.data # print(data) # data = JSONParser().parse(request) # serializer = AccesslogSearchSerializer(data=data) # return Response(serializer.update(validated_data=data)) instance = dict( TableName=LocalAccessLogTable, request_method=data["request_method"] if "request_method" in data.keys() else None, request_version=data["request_version"] if "request_version" in data.keys() else None, remote_addr=data["remote_addr"] if "remote_addr" in data.keys() else None, remote_user=data["remote_user"] if "remote_user" in data.keys() else None, request_url=data["request_url"] if "request_url" in data.keys() else None, device=data["device"] if "device" in data.keys() else None, os=data["os"] if "os" in data.keys() else None, user_agent=data["user_agent"] if "user_agent" in data.keys() else None, status=data["status"] if "status" in data.keys() else None, body_bytes_sent=data["body_bytes_sent"] if "body_bytes_sent" in data.keys() else None, limit=int(data["limit"]) if "limit" in data.keys() else True, is_ignore_static=data["is_ignore_static"] if "is_ignore_static" in data.keys() else True, limit_static=data["limit_static"] if "limit_static" in data.keys() else False, start_time=data["start_time"] if "start_time" in data.keys() else None, end_time=data["end_time"] if "end_time" in data.keys() else None, orderby_dt=data["orderby_dt"] if "orderby_dt" in data.keys() else None, ) from wafmanage.utils.db_utils import from_sql_get_data from .prescan import accsslog_search2 _objs = from_sql_get_data(accsslog_search2(**instance))["data"] # return Response(_objs) ## 原来的版本就是直接获取的 _objs pager = int(data["page"]) if "page" in data.keys() else 1 p = Paginator(_objs, 10) all_counts = p.count # 对象总数 page_count = p.num_pages # 总页数 pj = p.page(pager) objs = pj.object_list return Response({ "search_params": data, "res": objs, "page_count": page_count, "pager": pager, "all_counts": all_counts })
def seclog_search(request): data = request.data instance = dict( request_method=data["request_method"] if "request_method" in data.keys() else None, request_version=data["request_version"] if "request_version" in data.keys() else None, src_host=data["src_host"] if "src_host" in data.keys() else None, src_ip=data["src_ip"] if "src_ip" in data.keys() else None, request_url=data["request_url"] if "request_url" in data.keys() else None, category=data["category"] if "category" in data.keys() else None, # 分类 content_type=data["content_type"] if "content_type" in data.keys() else None, resp_code=int(data["resp_code"]) if "resp_code" in data.keys() else None, limit=int(data["limit"]) if "limit" in data.keys() else 100, start_time=data["start_time"] if "start_time" in data.keys() else None, end_time=data["end_time"] if "end_time" in data.keys() else None, ) from wafmanage.utils.db_utils import from_sql_get_data from .seclog_search import seclog_search, seclog_search2 _objs = from_sql_get_data(seclog_search2(**instance))["data"] # return Response(_objs) ### 开始准备分页 pager = int(data["page"]) if "page" in data.keys() else 1 p = Paginator(_objs, 10) all_counts = p.count # 对象总数 page_count = p.num_pages # 总页数 pj = p.page(pager) objs = pj.object_list return Response({ "search_params": data, "res": objs, "page_count": page_count, "pager": pager, "all_counts": all_counts })
def update(self, validated_data): instance = dict( request_method=validated_data.get('request_method', None), request_version=validated_data.get('request_version', None), remote_addr=validated_data.get('remote_addr', None), remote_user=validated_data.get('language', None), request_url=validated_data.get('request_url', None), device=validated_data.get('device', None), os=validated_data.get('os', None), user_agent=validated_data.get('user_agent', None), status=validated_data.get('status', None), body_bytes_sent=validated_data.get('status', None), is_limit10=validated_data.get('is_limit10', None), is_ignore_static=validated_data.get('is_ignore_static', None), limit_static=validated_data.get('limit_static', None), ) from wafmanage.utils.db_utils import from_sql_get_data from .prescan import get_static_suffix2 res_data = from_sql_get_data(get_static_suffix2(**instance))["data"] return res_data
def accesslog_to_sql(self): from utils.django_module import django_setup django_setup() na_lists = [] from phaser1.models import NginxAccessLogDetail # NginxAccessLogDetail.objects.all().delete() from datetime import datetime from wafmanage.utils.db_utils import from_sql_get_data today_date = str(datetime.now().date()) try: # query_sql = "select request_id from accesslog where date(time_local) = '{today_date}'".format(today_date=today_date) query_sql = "select request_id from accesslog;" have_into_mysql_ids = [ x["request_id"] for x in from_sql_get_data(query_sql)["data"] ] # print(from_sql_get_data(query_sql)["data"] ) # print(have_into_mysql_ids) except: have_into_mysql_ids = [] for x in self.db[AccessLogSaveTableName].find(): obj = x.copy() del obj["_id"] if obj["request_id"] in have_into_mysql_ids: continue try: obj["time_local"] = get_pydt_based_logdt( re.match("(.*?)\s(.*)", obj["time_local"]).group(1)) except: # print(re.match("(.*?)\s(.*)", obj["time_local"]).group(1)) logging.warn("Error:存在AccessLog日志不一样的正则 " + obj["time_local"]) return ## 记录这些条目已经存储进了 Mysql na_lists.append(NginxAccessLogDetail(**obj)) if na_lists: NginxAccessLogDetail.objects.bulk_create(na_lists) logging.info("3.0: 写入【" + str(len(na_lists)) + "】条访问日志到MYSQL数据库")
def index_view_cates(request): query_sql = """select category, count(category) as ccate from (select category from (select tt4.*, modsechinfo.matched_data from (select rulecate.category, t4.* from (select audit_logid, any_value(cate_id) as cate_id, max(ccate) as mc, any_value(hid) as hid, any_value(cn_msg) as cn_msg, any_value(rule_id) as m_rid from (select audit_logid, count(cate_id) as ccate, cate_id, any_value(hid) as hid, any_value(cn_msg) as cn_msg, any_value(rule_id) as rule_id from (select c.*,ruletxt.cn_msg, if(isnull(ruletxt.cate_id), 404, ruletxt.cate_id) as cate_id from (select a11.*, modsechinfo.matched_data,modsechinfo.rule_id from (select a1.*, b1.modseclogphaserhinfo_id as hid from (select * from modseclog where id >0 order by audit_time desc ) as a1 left join modseclog_hloginfo as b1 on a1.id = b1.modseclogdetail_id) as a11 left join modsechinfo on modsechinfo.id = a11.hid) as c left join ruletxt on ruletxt.rule_id=c.rule_id) as main_t group by audit_logid, cate_id ) as t2 group by audit_logid ) as t4 left join rulecate on rulecate.id=t4.cate_id) as tt4 left join modsechinfo on modsechinfo.id = tt4.hid ) as t5 left join modseclog on modseclog.audit_logid=t5.audit_logid ) as t_cate group by category order by ccate desc;""" return Response({"datas": from_sql_get_data(query_sql)["data"]})
def jla_search2(request): data = request.data instance = dict( limit_bytes=int(data["limit_bytes"]) if "limit_bytes" in data.keys() else 10240, limit_vtimes=int(data["limit_vtimes"]) if "limit_vtimes" in data.keys() else 10240, remote_addrs=data["remote_addrs"] if "remote_addrs" in data.keys() else None, split_type=data["split_type"] if "split_type" in data.keys() else None, accesslog_table=LocalAccessLogTable, daysdelta=int(data["daysdelta"]) if "daysdelta" in data.keys() else 90, limit=int(data["limit"]) if "limit" in data.keys() else 100, start_time=data["start_time"] if "start_time" in data.keys() else None, end_time=data["end_time"] if "end_time" in data.keys() else None, extra=data["extra"] if "extra" in data.keys() else None, ) query_sql = tj_bytes_timedelta(**instance) from wafmanage.utils.db_utils import from_sql_get_data _objs = from_sql_get_data(query_sql)["data"] pager = int(data["page"]) if "page" in data.keys() else 1 p = Paginator(_objs, 10) all_counts = p.count # 对象总数 page_count = p.num_pages # 总页数 pj = p.page(pager) objs = pj.object_list return Response({ "search_params": data, "res": objs, "page_count": page_count, "pager": pager, "all_counts": all_counts })