def sxf_branch_count(): args = request.args.to_dict() method = args.get('method') url = config.SXF_BASE_URL % (config.SXF_IP, method) token = sxf_token() ru = request.url.replace('&cache_del=1', '') redis_key = TokenMaker().generate_token(ru, request.method) cache_del = args.get('cache_del') if not cache_del: cache_data = redis.get(redis_key) if cache_data != None: return Response(json.dumps( { "code": 1, "data": demjson.decode(cache_data) }, cls=DateEncoder), mimetype='application/json') params = {"token": token} resp = requests.get(url, params=params, verify=False).json() resp_len = len(resp['data'].values()) redis.set(redis_key, demjson.encode(resp_len, strict=False)) redis.expire(redis_key, 86400) return Response(json.dumps({ "code": 1, "data": resp_len }, cls=DateEncoder), mimetype='application/json')
def sxf_get_everytime(): # 请求参数 args = request.args.to_dict() method = args.get('method') target = args.get('target') target = target.split(".") if target else target backcount = int(args.get('backcount')) today = datetime.datetime.now() max_count = args.get('maxCount') url = config.SXF_BASE_URL % (config.SXF_IP, method) ru = request.url.replace('&cache_del=1', '') redis_key = TokenMaker().generate_token(ru, request.method) cache_del = args.get('cache_del') if not cache_del: cache_data = redis.get(redis_key) if cache_data != None: return Response(json.dumps( { "code": 1, "data": demjson.decode(cache_data) }, cls=DateEncoder), mimetype='application/json') token = sxf_token() resp_time_list = [] count_resp_list = [] while backcount: target_deal = copy.deepcopy(target) from_time_pre = today - datetime.timedelta(days=backcount) from_time = int(from_time_pre.timestamp()) to_time = int( (today - datetime.timedelta(days=backcount - 1)).timestamp()) backcount -= 1 params = { "token": token, "fromActionTime": str(from_time), "toActionTime": str(to_time), "maxCount": max_count } resp = requests.get(url, params=params, verify=False).json() while target_deal: resp = resp.get(target_deal.pop(0)) resp_time_list.append(from_time_pre.date()) count_resp_list.append(resp) redis.set(redis_key, demjson.encode([resp_time_list, count_resp_list], strict=False)) redis.expire(redis_key, 86400) return Response(json.dumps( { "code": 1, "data": [resp_time_list, count_resp_list] }, cls=DateEncoder), mimetype='application/json')
def general_query(): # 请求参数 args = request.args.to_dict() # 接口名 service_id = args.get('serviceId') webapi_msg = Cwebapi.query.filter(Cwebapi.api == service_id).first() # 缓存 cache_time = webapi_msg.cache_time cachekey = TokenMaker().generate_token(config.WEBPICACHEKEY, request.url) # 从缓存拿数据 if cache_time: cache_data = redis.get(cachekey) if cache_data != None: return Response(json.dumps( { "code": 1, "data": demjson.decode(cache_data) }, cls=DateEncoder), mimetype='application/json') # 参数列表 params_list = webapi_msg.params.split(',') if webapi_msg.params else None sqlview = webapi_msg.sql_view # 处理参数 deal_params = webapi_msg.deal_params # 替换参数 if params_list: for i in params_list: sqlview = sqlview.replace("[%s]" % i, args.get(i)) if deal_params: for i in params_list: deal_params = deal_params.replace("[%s]" % i, args.get(i)) deal_params_dict = demjson.decode(deal_params) # k=area_num , v="南海区" for k, v in deal_params_dict.items(): deal_params_met = getattr(api_util, k) pre_deal_params_met = deal_params_met(v) sqlview = sqlview.replace(v, str(pre_deal_params_met)) # g['args'] = args g.args = args print('sqlview', sqlview) # 数据库连接池 datasource_id = webapi_msg.datasource_id datasource_msg = Datasource.query.filter(Datasource.id == datasource_id, Datasource.flag == 1).first() # 数据库连接方式 con_type = datasource_msg.type dsn = datasource_msg.connect user = datasource_msg.account password = datasource_msg.passwd if con_type == 'mysql': host, el = dsn.split(':') port, database = el.split('/') try: conn = pymysql.connect(host=host, port=int(port), db=database, user=user, password=password) except Exception: return jsonify({'code': -1, 'data': "连接失败"}) else: # 定义游标 cur = conn.cursor(cursor=pymysql.cursors.DictCursor) cur.execute(sqlview) res = cur.fetchmany(200) else: try: conn = cx_Oracle.connect(user=user, password=password, dsn=dsn, encoding="UTF-8") except cx_Oracle.DatabaseError: oracle_ip, oracle_exp = dsn.split(':', 1) oracle_port, oracle_sid = oracle_exp.split('/', 1) dsn = cx_Oracle.makedsn(oracle_ip, int(oracle_port), sid=oracle_sid) conn = cx_Oracle.connect(user=user, password=password, dsn=dsn, encoding="UTF-8") except Exception as e: return jsonify({'code': -1, 'data': "连接失败"}) from utils.dbutils import makeDictFactory # 定义游标 cur = conn.cursor() cur.execute(sqlview) cur.rowfactory = makeDictFactory(cur) res = cur.fetchmany(200) cur.close() conn.close() # 格式处理 f, b = service_id.split('@', 1) if b: deal_data_met = getattr(api_util, b) res = deal_data_met(res) # 缓存数据 if cache_time: redis.set(cachekey, demjson.encode(res)) redis.expire(cachekey, cache_time) return Response(json.dumps({ "code": 1, "data": res }, cls=DateEncoder), mimetype='application/json')
def sxf_riskeven_by_field(): args = request.args.to_dict() method = args.get('method') target = args.get('target') target = target.split(".") if target else target from_time = args.get('fromTime') from_time = int(from_time) if from_time else int( (datetime.datetime.now() - datetime.timedelta(days=8)).timestamp()) to_time = args.get('toTime') to_time = int(to_time) if to_time else int( (datetime.datetime.now() - datetime.timedelta(days=1)).timestamp()) max_count = args.get('maxCount') url = config.SXF_BASE_URL % (config.SXF_IP, method) interval = int(args.get('interval')) token = sxf_token() ru = request.url.replace('&cache_del=1', '') redis_key = TokenMaker().generate_token(ru, request.method) cache_del = args.get('cache_del') if not cache_del: cache_data = redis.get(redis_key) if cache_data != None: return Response(json.dumps( { "code": 1, "data": demjson.decode(cache_data) }, cls=DateEncoder), mimetype='application/json') resp_data = [] duplicate_moval = [] calculation_list = [] if interval: from_datetime = datetime.datetime.fromtimestamp(from_time) # 时间戳转日期 to_datetime = datetime.datetime.fromtimestamp(to_time) # 时间戳转日期 iter_time = ceil((to_datetime - from_datetime).days / interval) while iter_time: iter_target = copy.deepcopy(target) from_time = int(from_datetime.timestamp()) to_mid_time = int((from_datetime + datetime.timedelta(days=interval)).timestamp()) params = { "token": token, "fromActionTime": str(from_time), "toActionTime": str(to_mid_time), "maxCount": max_count } resp = requests.get(url, params=params, verify=False).json() while iter_target: resp = resp.get(iter_target.pop(0)) while len(resp): bad_guy = resp.pop() if bad_guy['ip'] not in duplicate_moval and bad_guy[ 'eventType'] == 1: duplicate_moval.append(bad_guy['ip']) calculation_list.append(bad_guy) while len(calculation_list): cl = calculation_list.pop() try: resp_data.append({ 'eventDes': cl['eventDes'], 'dealStatus': cl['dealStatus'], 'damage': cl['damage'], 'principle': cl['principle'], 'solution': cl['solution'], 'tag': cl['tag'], 'hostRisk': cl['hostRisk'] }) except Exception as e: continue from_datetime = from_datetime + datetime.timedelta(days=interval) iter_time -= 1 else: params = { "token": token, "fromActionTime": str(from_time), "toActionTime": str(to_time), "maxCount": max_count } resp = requests.get(url, params=params, verify=False).json() while target: resp = resp.get(target.pop(0)) while len(resp): bad_guy = resp.pop() if bad_guy['ip'] not in duplicate_moval and bad_guy[ 'eventType'] == 1: duplicate_moval.append(bad_guy['ip']) calculation_list.append(bad_guy) while len(calculation_list): cl = calculation_list.pop() try: resp_data.append({ 'eventDes': cl['eventDes'], 'dealStatus': cl['dealStatus'], 'damage': cl['damage'], 'principle': cl['principle'], 'solution': cl['solution'], 'tag': cl['tag'], 'hostRisk': cl['hostRisk'] }) except Exception as e: continue redis.set(redis_key, demjson.encode(resp_data, strict=False)) redis.expire(redis_key, 86400) return Response(json.dumps({ "code": 1, "data": resp_data }, cls=DateEncoder), mimetype='application/json')
def sxf_riskeven_detail_list(): args = request.args.to_dict() method = args.get('method') target = args.get('target') target = target.split(".") if target else target from_time = args.get('fromTime') from_time = int(from_time) if from_time else int( (datetime.datetime.now() - datetime.timedelta(days=8)).timestamp()) to_time = args.get('toTime') to_time = int(to_time) if to_time else int( (datetime.datetime.now() - datetime.timedelta(days=1)).timestamp()) max_count = args.get('maxCount') url = config.SXF_BASE_URL % (config.SXF_IP, method) interval = int(args.get('interval')) token = sxf_token() ru = request.url.replace('&cache_del=1', '') redis_key = TokenMaker().generate_token(ru, request.method) cache_del = args.get('cache_del') if not cache_del: cache_data = redis.get(redis_key) if cache_data != None: return Response(json.dumps( { "code": 1, "data": demjson.decode(cache_data) }, cls=DateEncoder), mimetype='application/json') resp_data = [] duplicate_moval = [] calculation_list = [] if interval: from_datetime = datetime.datetime.fromtimestamp(from_time) # 时间戳转日期 to_datetime = datetime.datetime.fromtimestamp(to_time) # 时间戳转日期 iter_time = ceil((to_datetime - from_datetime).days / interval) while iter_time: iter_target = copy.deepcopy(target) from_time = int(from_datetime.timestamp()) to_mid_time = int((from_datetime + datetime.timedelta(days=interval)).timestamp()) params = { "token": token, "fromActionTime": str(from_time), "toActionTime": str(to_mid_time), "maxCount": max_count } resp = requests.get(url, params=params, verify=False).json() while iter_target: resp = resp.get(iter_target.pop(0)) while len(resp): bad_guy = resp.pop() if bad_guy['ip'] not in duplicate_moval: duplicate_moval.append(bad_guy['ip']) calculation_list.append(bad_guy) while len(calculation_list): cl = calculation_list.pop() data = { "asset_ip": str(cl["ip"]), "branch_id": int(cl["branchId"]), "event_key": str(cl["eventKey"]), "rule_id": int(cl["ruleId"]), "data_type": int(cl["type"]), "group": int(cl["groupId"]), "token": token } respp = \ requests.post(config.SXF_BASE_URL % (config.SXF_IP, "detail"), verify=False, json=data).json()[ "data"]["top10"] while respp: bigbadguy = respp.pop() if bigbadguy['level'] == 3: try: resp_data.append({ "recordTime": bigbadguy["recordTime"], "srcIp": bigbadguy["srcIp"], "dstIp": bigbadguy["dstIp"], "attackType": bigbadguy["attackType"], "level": bigbadguy["level"], "attackCount": bigbadguy["attackCount"] }) except Exception as e: continue from_datetime = from_datetime + datetime.timedelta(days=interval) iter_time -= 1 else: params = { "token": token, "fromActionTime": str(from_time), "toActionTime": str(to_time), "maxCount": max_count } resp = requests.get(url, params=params, verify=False).json() while target: resp = resp.get(target.pop(0)) while len(resp): bad_guy = resp.pop() if bad_guy['ip'] not in duplicate_moval: duplicate_moval.append(bad_guy['ip']) calculation_list.append(bad_guy) while len(calculation_list): cl = calculation_list.pop() data = { "asset_ip": str(cl["ip"]), "branch_id": int(cl["branchId"]), "event_key": str(cl["eventKey"]), "rule_id": int(cl["ruleId"]), "data_type": int(cl["type"]), "group": int(cl["groupId"]), "token": token } respp = \ requests.post(config.SXF_BASE_URL % (config.SXF_IP, "detail"), verify=False, json=data).json()["data"][ "top10"] while respp: bigbadguy = respp.pop() if bigbadguy['level'] == 3: try: resp_data.append({ "recordTime": bigbadguy["recordTime"], "srcIp": bigbadguy["srcIp"], "dstIp": bigbadguy["dstIp"], "attackType": bigbadguy["attackType"], "level": bigbadguy["level"], "attackCount": bigbadguy["attackCount"] }) except Exception as e: continue resp_data = sorted(resp_data, key=lambda k: k['attackCount'], reverse=True) resp_data = sorted(resp_data[:20], key=lambda k: k['recordTime'], reverse=True) redis.set(redis_key, demjson.encode(resp_data, strict=False)) redis.expire(redis_key, 86400) return Response(json.dumps({ "code": 1, "data": resp_data }, cls=DateEncoder), mimetype='application/json')
def sxf_get_once(): # 请求参数 args = request.args.to_dict() method = args.get('method') target = args.get('target') target = target.split(".") if target else target from_time = args.get('fromTime') from_time = int(from_time) if from_time else int( (datetime.datetime.now() - datetime.timedelta(days=8)).timestamp()) to_time = args.get('toTime') to_time = int(to_time) if to_time else int( (datetime.datetime.now() - datetime.timedelta(days=1)).timestamp()) max_count = args.get('maxCount') url = config.SXF_BASE_URL % (config.SXF_IP, method) interval = int(args.get('interval')) token = sxf_token() count_resp = 0 ru = request.url.replace('&cache_del=1', '') redis_key = TokenMaker().generate_token(ru, request.method) cache_del = args.get('cache_del') if not cache_del: cache_data = redis.get(redis_key) if cache_data != None: return Response(json.dumps({ "code": 1, "data": cache_data }, cls=DateEncoder), mimetype='application/json') if interval: from_datetime = datetime.datetime.fromtimestamp(from_time) # 时间戳转日期 to_datetime = datetime.datetime.fromtimestamp(to_time) # 时间戳转日期 iter_time = ceil((to_datetime - from_datetime).days / interval) while iter_time: iter_target = copy.deepcopy(target) from_time = int(from_datetime.timestamp()) to_mid_time = int((from_datetime + datetime.timedelta(days=interval)).timestamp()) params = { "token": token, "fromActionTime": from_time, "toActionTime": to_mid_time, "maxCount": max_count } resp = requests.get(url, params=params, verify=False).json() while iter_target: resp = resp.get(iter_target.pop(0)) count_resp += int(resp) from_datetime = from_datetime + datetime.timedelta(days=interval) iter_time -= 1 else: params = { "token": token, "fromActionTime": str(from_time), "toActionTime": str(to_time), "maxCount": max_count } resp = requests.get(url, params=params, verify=False).json() while target: resp = resp.get(target.pop(0)) count_resp = resp redis.set(redis_key, count_resp) redis.expire(redis_key, 86400) return Response(json.dumps({ "code": 1, "data": count_resp }, cls=DateEncoder), mimetype='application/json')