def patch_before(sender, request, instance, **kwargs): '''新增数据前 ''' logger.info('signals(patch_before)') if request.data.get('group'): raise Exception( '字段(group)由系统聚合规则自动匹配关联 暂不允许用户主动修改 发送PPTCH请求时请勿定义此字段')
def get_aggregate_group(): '''根据聚合规则获取匹配到的group 无匹配结果则返回新创建的group ''' # logger.info('get_aggregate_group') model_rule = mapping_model.get(sender).get('rule') # 查询聚合规则 instance_rule_list = model_rule.objects.filter(is_enable=True) if len(instance_rule_list): # 合并规则 rule = {"operator": "and", "value": []} for instance_rule in instance_rule_list: rule['value'].extend(instance_rule.rule.get('value', [])) # logger.info('rule', rule['value']) # 匹配聚合规则 result = ruler.ruler.filter( rule, instance, filter_model=model_group) if result != None: logger.info( '聚合规则(%s)匹配到了一条已有的告警聚合数据(%s)' % (instance_rule, result.pk)) return result logger.info( '(%s)表中无规则或无处于启用状态的规则, 或未匹配到已有的告警聚合数据' % (model_rule,)) return create_group()
def post_before(sender, request, **kwargs): '''新增数据前 ''' logger.info('signals(post_before)接收到了一条(%s)数据' % sender,) group = request.data.get('group') if group: raise Exception( '字段(group)由系统聚合规则自动匹配关联 暂不允许用户主动指定 发送POST请求时请勿定义此字段')
def post_save(sender, instance, **kwargs): '''POST数据存库后 ''' logger.info('signals(post_save)', ) # 向前端Websocket推送数据的mq中推送数据 EmergencyMQ.send(mapping_model.get(sender).get('group'))
def create_group(): '''根据告警元数据创建一条新的聚合数据 ''' lens_model = lens._registry.get(model_group) data = inject_fields() response = lens_model._api('post', data) # logger.info('create_group data response', data, response) if response.get('code') == 1: group_pk = response.get('data').get('pk') logger.info('成功创建了一条聚合数据(%s)(%s)' % (sender, group_pk)) return model_group.objects.get(pk=group_pk) else: raise Exception('valid_after钩子函数阶段 创建聚合数据失败(%s)' % str(sender))
def post_before(sender, request, **kwargs): '''在新增一条数据之前 ''' rule = request.data.get('rule') logger.info('signals(post_before)接收到了一条(%s)数据' % sender, # type(rule), # rule, ) # 校验rule合法性 result_valid = ruler.ruler.verify_rule(rule) logger.info( 'signals(post_before) rule合法性结果(%s)' % # type(result_valid), result_valid, ) if not result_valid: raise Exception('rule不合法')
def emergencyovertime(): '''模拟向EmergencyOvertime表增加一条数据 提供给celery定时任务调用 用以在没有kafka数据推送的环境下模拟测试项目整个流程 ''' logger.info('\n阶段(schedule Emergencyovertime post) 开发环境测试任务插入一条数据') data = { "is_checkbox": True, "hash_id": "123456qwert", # "group": 1, "storage_timestamp": "1602479851.062", "start_time": "10:13:00", "end_time": "10:23:00", "error_count": 121, "system": "76290wq", "system_cn": "告警渠道(来源)", "qudao": "ABSC12345", "qudao_cn": "影像前端系统", "server": "NBEA123456", "server_cn": "BEAI系统", "branch_cn": "机构码翻译", "code": "S00130123456", "code_cn": "交易码翻译S001303123456", "rtcode": "ESB-E-123456", "rtcode_cn": "超时未得到服务系统应答", "key": "dbapp009-rtyu-20201012123456-123456", "route": "路由", # "contact": "操作室值班人员" } # response = requests.post( # 'http://127.0.0.1:8000/api/v1/emergency/emergencyovertime/', # headers={ # 'Content-Type': 'application/json', # }, # data=json.dumps(data), # ) # return response.json() model_obj = lens._registry.get(models.EmergencyOvertime) response = model_obj._api('post', data) logger.info('开发环境测试任务在(%s)中入库了一条告警数据 返回结果(%s)' % (models.EmergencyOvertime, response.get('code') or response.get('data'))) return response
def emergencyomnibusrule(): data = { "is_enable": False, "title": "综合类告警聚合规则01", "describe": "字段(system_cn)相同,且字段(occurrence_time)相差在一分钟内。", "rule": { "operator": "and", "value": [{ "field": "system_cn", "operator": "==", "value": { "operator": "get", "field": "system_cn" } }, { "operator": "and", "value": [{ "field": "occurrence_time", "operator": ">=", "value": { "operator": "get", "field": "occurrence_time", "offset": -60 } }, { "field": "occurrence_time", "operator": "<=", "value": { "operator": "get", "field": "occurrence_time", "offset": 0 } }] }] } } model_obj = lens._registry.get(ruler.models.EmergencyOmnibusRule) response = model_obj._api('post', data) logger.info('开发环境测试任务在(%s)中插入一条数据 返回结果(%s)' % (ruler.models.EmergencyOmnibusRule, response.get('data'))) return response
def emergencyomnibusrule02(): data = { "is_enable": True, "title": "综合类告警聚合规则02", "describe": "对字段(system_cn)相同的告警进行聚合", "rule": { "operator": "and", "value": [{ "field": "system_cn", "operator": "==", "value": { "operator": "get", "field": "system_cn" } }] } } model_obj = lens._registry.get(ruler.models.EmergencyOmnibusRule) response = model_obj._api('post', data) logger.info('开发环境测试任务在(%s)中插入一条数据 返回结果(%s)' % (ruler.models.EmergencyOmnibusRule, response.get('data'))) return response
def api(self, request, pk=None): """查询通用API(前后端分离版本) GET: 返回当前model所有数据, 若指定pk值则返回指定数据 POST: 添加一条数据, 若request中含pk值则忽略pk值 PATCH: 更新一条数据, 需指定pk值 DELETE: 删除一条数据, 需指定pk值 Arguments: request {[type]} -- [description] pk {int / string} -- 单条数据操作时所需要的主键值 Returns: [type] -- [description] """ if not hasattr(request, 'data'): request.data = {} code = -1 msg = None data = {} # print( # 'request', # request.method, # request.content_type, # request.content_params, # # request.body, # request.data, # ) try: # 解析请求信息为dict并挂载到request.data self.gen_request_data(request, pk) except Exception as e: msg = '解析请求时发生异常 请参考data中的提示信息' data[e.__class__.__name__] = str(e) return JsonResponse( utils.ApiResponse(code, msg=msg, data=data).__dict__, json_dumps_params={'ensure_ascii': False}, ) if request.method == 'POST': # from django.db import transaction # with transaction.atomic(): try: # 触发基于signals自定义的钩子 utils.post_before.send(sender=self.model_class, request=request) # lens自定义钩子 # self.hook_post_before(request) AddModelForm = self.get_model_form_class() form = AddModelForm(request.data) if form.is_valid(): # utils.valid_after.send(sender=self.model_class, request=request, old_instance=None, form=form) instance = form.save(commit=False) utils.valid_after.send(sender=self.model_class, request=request, instance=instance) instance.save() # Django不支持序列化单个model对象 # 因此用单个对象来构造一个只有一个对象的数组(类似QuerySet对象) # 由于序列化QuerySet会被'[]'所包围 # 因此使用string[1:-1]来去除由于序列化QuerySet而带入的'[]' data = json.loads(serializers.serialize('json',[instance])[1:-1]) code = 1 msg = 'success' # self.hook_post_after(request) else: data = json.loads(form.errors.as_json()) except Exception as e: msg = '处理%s请求时发生异常 请参考data中的提示信息' % request.method data[e.__class__.__name__] = str(e) elif request.method in ['GET', 'PATCH', 'DELETE',]: if request.method == 'GET': try: print(pk,self.is_pagination,'9090') data = self.get_model_data(request, pk=pk, pagination=self.is_pagination) code = 1 msg = 'success' # self.hook_get_after(request, pk=pk) except Exception as e: msg = '处理%s请求时发生异常 请参考data中的提示信息' % request.method data[e.__class__.__name__] = str(e) else: if not pk: # 若未指定pk就执行操作方法 request.data中是否包含action 已经在gen_request_data中进行了校验 action_name = request.data.get('action') return getattr(self, action_name)(request) else: # 指定了pk就进行常规单条数据的操作 # obj = self.model_class.objects.get(pk=pk) obj = self.model_class.objects.filter(pk=pk).first() if not obj: msg = '数据表(%s)中无主键值为(%s)的项' % (self.model_class._meta.model_name, pk) elif request.method == 'PATCH': try: utils.patch_before.send(sender=self.model_class, request=request, instance=obj) # print('lens request', request.data) # obj.update(**request.data) # 针对单个对象 将update更换为__dict__.update + save 后可正常执行save钩子 # obj.__dict__.update(**request.data) # obj.save() AddModelForm = self.get_model_form_class() for k,v in forms.models.model_to_dict(obj).items(): if k not in request.data: request.data[k] = v form = AddModelForm(data=request.data, instance=obj) if form.is_valid(): form.save(commit=False) utils.valid_after.send(sender=self.model_class, request=request, instance=obj) obj = form.save() utils.patch_save.send(sender=self.model_class, request=request, instance=obj) data = json.loads(serializers.serialize('json',[obj])[1:-1]) msg = '更新成功' code = 1 # self.hook_patch_after(request, pk=pk) else: data = json.loads(form.errors.as_json()) except Exception as e: msg = '处理(%s)请求时发生异常 请参考data中的提示信息' % request.method data[e.__class__.__name__] = str(e) elif request.method == 'DELETE': logger.info('DELETE 2', request.data) obj.delete() msg = '删除成功' code = 1 # self.hook_delete_after(request, pk=pk) # print('response.__dict__', data, response.__dict__) return JsonResponse( utils.ApiResponse(code, msg=msg, data=data).__dict__, json_dumps_params={'ensure_ascii': False}, )
def valid_after(sender, request, instance, **kwargs): '''modelform校验之后存库前(这个钩子函数本身不区分request.method 请手动区分) ''' logger.info('signals(valid_after)',) model_group = mapping_model.get(sender).get('group') # 告警表元数据注入group的fileds_func映射 mapping_fileds_func = { models.EmergencyOmnibusGroup: { 'status': 0, 'storage_datetime': time.strftime( '%Y-%m-%d %H:%M:%S', time.localtime(float(request.data.get('storage_timestamp')))), 'level': request.data.get('level', 10), }, models.EmergencyOvertimeGroup: { 'status': 0, 'storage_datetime': time.strftime( '%Y-%m-%d %H:%M:%S', time.localtime(float(request.data.get('storage_timestamp')))), }, } def inject_fields(): '''根据告警表元数据构造group的post数据 ''' data = {} for field in model_group._meta.fields: func = mapping_fileds_func[model_group].get(field.name) # logger.info('lambda_func func --------', field.name, func) if func == None: # logger.info('lambda_func func if', func, request.data.get(field.name)) data[field.name] = request.data.get(field.name) else: # logger.info('lambda_func func else', func) data[field.name] = func return data def create_group(): '''根据告警元数据创建一条新的聚合数据 ''' lens_model = lens._registry.get(model_group) data = inject_fields() response = lens_model._api('post', data) # logger.info('create_group data response', data, response) if response.get('code') == 1: group_pk = response.get('data').get('pk') logger.info('成功创建了一条聚合数据(%s)(%s)' % (sender, group_pk)) return model_group.objects.get(pk=group_pk) else: raise Exception('valid_after钩子函数阶段 创建聚合数据失败(%s)' % str(sender)) def get_aggregate_group(): '''根据聚合规则获取匹配到的group 无匹配结果则返回新创建的group ''' # logger.info('get_aggregate_group') model_rule = mapping_model.get(sender).get('rule') # 查询聚合规则 instance_rule_list = model_rule.objects.filter(is_enable=True) if len(instance_rule_list): # 合并规则 rule = {"operator": "and", "value": []} for instance_rule in instance_rule_list: rule['value'].extend(instance_rule.rule.get('value', [])) # logger.info('rule', rule['value']) # 匹配聚合规则 result = ruler.ruler.filter( rule, instance, filter_model=model_group) if result != None: logger.info( '聚合规则(%s)匹配到了一条已有的告警聚合数据(%s)' % (instance_rule, result.pk)) return result logger.info( '(%s)表中无规则或无处于启用状态的规则, 或未匹配到已有的告警聚合数据' % (model_rule,)) return create_group() if request.method == 'POST': # 根据规则进行group的关联或创建关联 instance.group = get_aggregate_group() logger.info('将当前告警纳入到聚合数据(%s)' % instance.group.pk)
def wrapper(*args, **kwargs): time_start = time.time() res = func(*args, **kwargs) cost_time = time.time() - time_start logger.info("(%s)运行时间:(%s)秒" % (func.__name__, cost_time)) return res
def emergencyomnibus(): '''模拟向EmergencyOmnibus表增加一条数据 提供给celery定时任务调用 用以在没有kafka数据推送的环境下模拟测试项目整个流程 ''' data = { "hash_id": "123456qwert", "occurrence_time": datetime.datetime.strftime(datetime.datetime.now(), '%H:%M:%S'), "summary": "迎春是小狗!", "storage_timestamp": "1602479851.062", "is_checkbox": True, "mastertid": "告警事件的告警组", # "contact": "联系人", "contact": '操作室值班人员', "is_import": True, "operation": 1, "swapiden": "HXB_123456qwert", "tally": 1, "ntlogged": 2, "dep": 9999, "first_occurrence": "1602479551.062", "last_occurrence": "1602479857.022", "severity": 1, "server_name": "告警来源", "bapp_system": "告警机器所属系统", # "level": 20, "operator": "", "jyresult": "", "node": random.choice(["127.0.0.1", "127.0.0.2", "127.0.0.3"]), # "confirm_msg": "告警事件处理信息(预留字段 不建议存值 留空即可)", "confirm_msg": None, "system": "00338", "system_cn": random.choice(['系统01', '系统02', '系统03']), "acknowledged": "", "identifier": "", # "group": "3" } model_obj = lens._registry.get(models.EmergencyOmnibus) response = model_obj._api('post', data) logger.info( '告警数据(%s)完成所有的处理流程(字段转换-格式校验-聚合匹配-入库-推送到前端), 类型(%s), 返回结果(%s)' % (response.get('data').get('pk'), models.EmergencyOmnibus, response.get('msg') if (int(response.get('code'))) else response.get('data'))) logger.info('-------end--------') return response