def test_get_id_error(self): self.event["source_type"] = 'something_else' error_desc = ( "source_type should be one of 'connector', 'resource' or " "'component' not {}.".format(self.event["source_type"])) with self.assertRaisesRegexp(ValueError, error_desc): ContextGraph.get_id(self.event)
def test_get_id_resource(self): self.event["source_type"] = 'resource' expected_id = "{0}/{1}".format(self.event["resource"], self.event["component"]) result = ContextGraph.get_id(self.event) self.assertEquals(result, expected_id)
def test_event_processing(self): event = { "event_type": "pbehavior", "pbehavior_name": "downtime", "start": timegm(datetime.utcnow().timetuple()), "end": timegm((datetime.utcnow() + timedelta(days=1)).timetuple()), "action": PBEHAVIOR_CREATE, "connector": "test_connector", "connector_name": "test_connector_name", "author": "test_author", "component": 'test_component', "source_type": "resource", "resource": "a_resource", "action": PBEHAVIOR_CREATE } query = { 'name': event['pbehavior_name'], 'filter': dumps({'_id': ContextGraph.get_id(event)}), 'tstart': event['start'], 'tstop': event['end'], 'connector': event['connector'], 'connector_name': event['connector_name'], 'author': event['author'] } event_processing(MockEngine(), event, pbm=self.pbm, logger=Mock()) pbehavior = list(self.pbm.pb_storage.get_elements(query=query)) self.assertEqual(len(pbehavior), 1) self.assertDictContainsSubset(query, pbehavior[0]) event.update({'action': PBEHAVIOR_DELETE}) event_processing(MockEngine(), event, pbm=self.pbm, logger=Mock()) pbehavior = list(self.pbm.pb_storage.get_elements(query=query)) self.assertEqual(len(pbehavior), 0)
def test__get_disable_entity(self): event = { 'connector': '03-K64_Firefly', 'connector_name': 'serenity', 'component': 'Malcolm_Reynolds', 'output': 'the big red recall button', 'timestamp': int(time.time()) - 100, "source_type": "component" } alarm_id = '/strawberry' alarm = self.manager.make_alarm( alarm_id, event ) context_manager = ContextGraph(logger=LoggerMock()) ent_id = context_manager.get_id(event) entity = context_manager.create_entity_dict(ent_id, "inara", "component") entity["enabled"] = False context_manager._put_entities(entity) alarms = self.reader.get(opened=True) print(alarms) self.assertEqual(len(alarms["alarms"]), 0)
def test_get_id_connector(self): self.event["source_type"] = 'connector' expected_id = "{0}/{1}".format(self.event["connector"], self.event["connector_name"]) result = ContextGraph.get_id(self.event) self.assertEquals(result, expected_id)
def get_entity_id(): """ Get the generated id tfrom an event. """ event = request.json if event is None: return gen_json_error({'description': 'no event givent'}, HTTP_ERROR) return gen_json(ContextGraph.get_id(event))
def test_get_id_component(self): self.event["source_type"] = 'component' expected_id = self.event["component"] result = ContextGraph.get_id(self.event) self.assertEquals(result, expected_id)
def event_processing(engine, event, pbm=_pb_manager, logger=None, **kwargs): """ Event processing. """ if event.get('event_type') == EVENT_TYPE: entity_id = ContextGraph.get_id(event) engine.logger.debug("Start processing event {}".format(event)) logger.debug("entity_id: {}\naction: {}".format( entity_id, event.get('action'))) try: pb_start = event.get('start') pb_end = event.get('end') pb_connector = event.get('connector') pb_name = event.get('pbehavior_name') pb_connector_name = event.get('connector_name') except KeyError as ex: logger.error('missing key in event: {}'.format(ex)) return event pb_rrule = event.get('rrule', None) pb_comments = event.get('comments', None) pb_author = event.get('author', DEFAULT_AUTHOR) try: filter_ = {'_id': entity_id} pbehavior_id, pb_source = pb_id(event) if event.get( 'action' ) == PBEHAVIOR_CREATE and pbehavior_id is None and pb_source is None: result = pbm.create(pb_name, filter_, pb_author, pb_start, pb_end, connector=pb_connector, comments=pb_comments, connector_name=pb_connector_name, rrule=pb_rrule) if not result: logger.error(ERROR_MSG.format(event['action'], event)) else: watcher_manager.compute_watchers() elif event.get( 'action' ) == PBEHAVIOR_CREATE and pbehavior_id is not None and pb_source is not None: pbehavior = PBehaviorModel(pbehavior_id, pb_name, filter_, pb_start, pb_end, pb_rrule, pb_author, connector=pb_connector, connector_name=pb_connector_name, source=pb_source) success, result = pbm.upsert(pbehavior) if not success: logger.critical('pbehavior upsert: {}'.format(result)) elif event.get('action') == PBEHAVIOR_DELETE: result = pbm.delete( _filter={ PBehavior.FILTER: dumps(filter_), PBehavior.NAME: pb_name, PBehavior.TSTART: pb_start, PBehavior.TSTOP: pb_end, PBehavior.RRULE: pb_rrule, PBehavior.CONNECTOR: pb_connector, PBehavior.CONNECTOR_NAME: pb_connector_name, }) if not result: logger.error(ERROR_MSG.format(event['action'], event)) else: watcher_manager.compute_watchers() else: logger.error( ERROR_MSG.format(event.get('action', 'no_action'), event)) except ValueError as err: logger.error('cannot handle event: {}'.format(err)) return event
def get_entity_id(event): """ get entity id from event. """ return ContextGraph.get_id(event)
class engine(Engine): etype = 'event_filter' def __init__(self, *args, **kargs): super(engine, self).__init__(*args, **kargs) self.mg_store = MongoStore.get_default() self.collection = MongoCollection( self.mg_store.get_collection("object")) self.name = kargs['name'] self.drop_event_count = 0 self.pass_event_count = 0 self.__load_rules() def pre_run(self): self.beat() def a_override(self, event, action): """Override a field from event or add a new one if it does not have one. """ afield = action.get('field', None) avalue = action.get('value', None) # This must be a hard check because value can be a boolean or a null # integer if afield is None or avalue is None: self.logger.error( "Malformed action ('field' and 'value' required): {}".format( action)) return False if afield not in event: self.logger.debug("Overriding: '{}' -> '{}'".format( afield, avalue)) event[afield] = avalue return True # afield is in event if not isinstance(avalue, list): if isinstance(event[afield], list): self.logger.debug("Appending: '{}' to '{}'".format( avalue, afield)) event[afield].append(avalue) else: self.logger.debug("Overriding: '{}' -> '{}'".format( afield, avalue)) event[afield] = avalue return True else: # operation field is supported only for list values op = action.get('operation', 'append') if op == 'override': self.logger.debug("Overriding: '{}' -> '{}'".format( afield, avalue)) event[afield] = avalue return True elif op == 'append': self.logger.debug("Appending: '{}' to '{}'".format( avalue, afield)) if isinstance(event[afield], list): event[afield] += avalue else: event[afield] = [event[afield]] + avalue return True else: self.logger.error( "Operation '{}' unsupported (action '{}')".format( op, action)) return False def a_remove(self, event, action): """Remove an event from a field in event or the whole field if no element is specified. """ akey = action.get('key', None) aelement = action.get('element', None) del_met = action.get('met', 0) if akey: if aelement: if del_met: for i, met in enumerate(event[akey]): if met['name'] == aelement: del event[akey][i] break elif isinstance(event[akey], dict): del event[akey][aelement] elif isinstance(event[akey], list): del event[akey][event[akey].index(aelement)] self.logger.debug(u" + {}: Removed: '{}' from '{}'".format( event['rk'], aelement, akey)) else: del event[akey] self.logger.debug(u" + {}: Removed: '{}'".format( event['rk'], akey)) return True else: self.logger.error( u"Action malformed (needs 'key' and/or 'element'): {}".format( action)) return False def a_modify(self, event, action, name): """ Args: event map of the event to be modified action map of type action _name of the rule Returns: ``None`` """ derogated = False atype = action.get('type') actionMap = {'override': self.a_override, 'remove': self.a_remove} if atype in actionMap: derogated = actionMap[atype](event, action) else: self.logger.warning(u"Unknown action '{}'".format(atype)) # If the event was derogated, fill some informations if derogated: self.logger.debug(u"Event changed by rule '{}'".format(name)) return None def a_drop(self, event, action, name): """ Drop the event. Args: event map of the event to be modified action map of type action _name of the rule Returns: ``None`` """ self.logger.debug(u"Event dropped by rule '{}'".format(name)) self.drop_event_count += 1 return DROP def a_pass(self, event, action, name): """Pass the event to the next queue. Args: event map of the event to be modified action map of type action _name of the rule Returns: ``None`` """ self.logger.debug(u"Event passed by rule '{}'".format(name)) self.pass_event_count += 1 return event def a_route(self, event, action, name): """ Change the route to which an event will be sent Args: event: map of the event to be modified action: map of type action name: of the rule Returns: ``None`` """ if "route" in action: self.next_amqp_queues = [action["route"]] self.logger.debug(u"Event re-routed by rule '{}'".format(name)) else: self.logger.error( u"Action malformed (needs 'route'): {}".format(action)) return None def a_exec_job(self, event, action, name): records = self.collection.find({ 'crecord_type': 'job', '_id': action['job'] }) for record in records: job = copy.deepcopy(record) job['context'] = event try: self.work_amqp_publisher.direct_event(job, 'Engine_scheduler') except Exception as e: self.logger.exception("Unable to send job") time.sleep(1) return True def a_snooze(self, event, action, name): """ Snooze event checks :param dict event: event to be snoozed :param dict action: action :param str name: name of the rule :returns: True if a snooze has been sent, False otherwise :rtype: boolean """ if event.get('event_type') == 'snooze': return False # Only check events can trigger an auto-snooze if event.get('event_type') != 'check': return False # A check OK cannot trigger an auto-snooze if event.get('state') == 0: return False # Alerts manager caching if not hasattr(self, 'am'): self.am = Alerts(*Alerts.provide_default_basics()) # Context manager caching if not hasattr(self, 'cm'): self.cm = ContextGraph(self.logger) entity_id = self.cm.get_id(event) current_alarm = self.am.get_current_alarm(entity_id) if current_alarm is None: snooze = { 'connector': event.get('connector', ''), 'connector_name': event.get('connector_name', ''), 'source_type': event.get('source_type', ''), 'component': event.get('component', ''), 'event_type': 'snooze', 'duration': action['duration'], 'author': 'event_filter', 'output': 'Auto snooze generated by rule "{}"'.format(name), 'timestamp': int(time.time()) } if event.get('resource', ''): snooze['resource'] = event['resource'] try: self.work_amqp_publisher.direct_event(snooze, 'Engine_event_filter') except Exception as e: self.logger.exception("Unable to send snooze event") return True return False def a_baseline(self, event, actions, name): """a_baseline :param event: :param action: baseline conf in event filter :param name: """ event['baseline_name'] = actions['baseline_name'] event['check_frequency'] = actions['check_frequency'] try: self.work_amqp_publisher.direct_event(event, 'Engine_baseline') except Exception as e: self.logger.exception("Unable to send baseline event") def apply_actions(self, event, actions): pass_event = False actionMap = { 'drop': self.a_drop, 'pass': self.a_pass, 'override': self.a_modify, 'remove': self.a_modify, 'execjob': self.a_exec_job, 'route': self.a_route, 'snooze': self.a_snooze, 'baseline': self.a_baseline } for name, action in actions: if action['type'] in actionMap: ret = actionMap[action['type'].lower()](event, action, name) if ret: pass_event = True else: self.logger.warning(u"Unknown action '{}'".format(action)) return pass_event def work(self, event, *xargs, **kwargs): rk = get_routingkey(event) default_action = self.configuration.get('default_action', 'pass') # list of supported actions rules = self.configuration.get('rules', []) to_apply = [] self.logger.debug(u'event {}'.format(event)) # When list configuration then check black and # white lists depending on json configuration for filterItem in rules: actions = filterItem.get('actions') name = filterItem.get('name', 'no_name') self.logger.debug(u'rule {}'.format(filterItem)) self.logger.debug(u'filter is {}'.format(filterItem['mfilter'])) # Try filter rules on current event if filterItem['mfilter'] and check(filterItem['mfilter'], event): self.logger.debug(u'Event: {}, filter matches'.format( event.get('rk', event))) if 'pbehaviors' in filterItem: pbehaviors = filterItem.get('pbehaviors', {}) list_in = pbehaviors.get('in', []) list_out = pbehaviors.get('out', []) if list_in or list_out: pbm = singleton_per_scope(PBehaviorManager) cm = singleton_per_scope(ContextGraph) entity = cm.get_entity(event) entity_id = cm.get_entity_id(entity) result = pbm.check_pbehaviors(entity_id, list_in, list_out) if not result: break for action in actions: if action['type'].lower() == 'drop': self.apply_actions(event, to_apply) return self.a_drop(event, None, name) to_apply.append((name, action)) if filterItem.get('break', 0): self.logger.debug( u' + Filter {} broke the next filters processing'. format(filterItem.get('name', 'filter'))) break if len(to_apply): if self.apply_actions(event, to_apply): self.logger.debug(u'Event before sent to next engine: %s' % event) event['rk'] = event['_id'] = get_routingkey(event) return event # No rules matched if default_action == 'drop': self.logger.debug("Event '%s' dropped by default action" % (rk)) self.drop_event_count += 1 return DROP self.logger.debug("Event '%s' passed by default action" % (rk)) self.pass_event_count += 1 self.logger.debug(u'Event before sent to next engine: %s' % event) event['rk'] = event['_id'] = get_routingkey(event) return event def __load_rules(self): tmp_rules = [] records = self.collection.find({ 'crecord_type': 'filter', 'enable': True }) records.sort('priority', 1) for record in records: record_dump = copy.deepcopy(record) self.set_loaded(record_dump) try: record_dump["mfilter"] = loads(record_dump["mfilter"]) except Exception: self.logger.info(u'Invalid mfilter {}, filter {}'.format( record_dump['mfilter'], record_dump['name'], )) self.logger.debug(u'Loading record_dump:') self.logger.debug(record_dump) tmp_rules.append(record_dump) self.configuration = { 'rules': tmp_rules, 'default_action': self.find_default_action() } def beat(self, *args, **kargs): """ Configuration reload for realtime ui changes handling """ self.logger.debug(u'Reload configuration rules') self.__load_rules() self.logger.debug('Loaded {} rules'.format( len(self.configuration['rules']))) self.send_stat_event() def set_loaded(self, record): if 'run_once' in record and not record['run_once']: self.collection.update({"_id": record['_id']}, {"$set": { 'run_once': True }}) self.logger.info('record {} has been run once'.format( record['_id'])) def send_stat_event(self): """ Send AMQP Event for drop and pass metrics """ message_dropped = '{} event dropped since {}'.format( self.drop_event_count, self.beat_interval) message_passed = '{} event passed since {}'.format( self.pass_event_count, self.beat_interval) event = forger(connector='Engine', connector_name='engine', event_type='check', source_type='resource', resource=self.amqp_queue + '_data', state=0, state_type=1, output=message_dropped, perf_data_array=[{ 'metric': 'pass_event', 'value': self.pass_event_count, 'type': 'GAUGE' }, { 'metric': 'drop_event', 'value': self.drop_event_count, 'type': 'GAUGE' }]) self.logger.debug(message_dropped) self.logger.debug(message_passed) try: self.beat_amqp_publisher.canopsis_event(event) except Exception as e: self.logger.exception("Unable to send stat event") self.drop_event_count = 0 self.pass_event_count = 0 def find_default_action(self): """Find the default action stored and returns it, else assume it default action is pass. """ records = self.collection.find_one({'crecord_type': 'defaultrule'}) if records: return records[0]["action"] self.logger.debug( "No default action found. Assuming default action is pass") return 'pass'
def event_processing(engine, event, pbm=_pb_manager, logger=None, **kwargs): """ Event processing. """ # This is a hack to make sure that new entities created by the go engine # che are immediately added to the pbehaviors. It is required to know # immediately if an entity is in maintenance, and to prevent tickets from # being declared in this case. # The pbehavior engine needs to receive events from che and publish them to # axe for this to work. if os.environ.get(ENV_RECOMPUTE_ON_NEW_ENTITY) == '1': entity_id = event.get('current_entity', {}).get('_id') if entity_id and entity_id not in known_entities: try: pbm.compute_pbehaviors_filters() except Exception as ex: engine.logger.exception('Processing error {}'.format(str(ex))) known_entities.add(entity_id) if event.get('event_type') == EVENT_TYPE: entity_id = ContextGraph.get_id(event) engine.logger.debug("Start processing event {}".format(event)) logger.debug("entity_id: {}\naction: {}".format( entity_id, event.get('action'))) try: pb_start = event.get('start') pb_end = event.get('end') pb_connector = event.get('connector') pb_name = event.get('pbehavior_name') pb_connector_name = event.get('connector_name') except KeyError as ex: logger.error('missing key in event: {}'.format(ex)) return event pb_rrule = event.get('rrule', None) pb_comments = event.get('comments', None) pb_author = event.get('author', DEFAULT_AUTHOR) try: filter_ = {'_id': entity_id} pbehavior_id, pb_source = pb_id(event) if event.get( 'action' ) == PBEHAVIOR_CREATE and pbehavior_id is None and pb_source is None: result = pbm.create(pb_name, filter_, pb_author, pb_start, pb_end, connector=pb_connector, comments=pb_comments, connector_name=pb_connector_name, rrule=pb_rrule) if not result: logger.error(ERROR_MSG.format(event['action'], event)) else: watcher_manager.compute_watchers() elif event.get( 'action' ) == PBEHAVIOR_CREATE and pbehavior_id is not None and pb_source is not None: pbehavior = PBehaviorModel(pbehavior_id, pb_name, filter_, pb_start, pb_end, pb_rrule, pb_author, connector=pb_connector, connector_name=pb_connector_name, source=pb_source) success, result = pbm.upsert(pbehavior) if not success: logger.critical('pbehavior upsert: {}'.format(result)) elif event.get('action') == PBEHAVIOR_DELETE: result = pbm.delete( _filter={ PBehavior.FILTER: dumps(filter_), PBehavior.NAME: pb_name, PBehavior.TSTART: pb_start, PBehavior.TSTOP: pb_end, PBehavior.RRULE: pb_rrule, PBehavior.CONNECTOR: pb_connector, PBehavior.CONNECTOR_NAME: pb_connector_name, }) if not result: logger.error(ERROR_MSG.format(event['action'], event)) else: watcher_manager.compute_watchers() else: logger.error( ERROR_MSG.format(event.get('action', 'no_action'), event)) except ValueError as err: logger.error('cannot handle event: {}'.format(err)) # if event.get('event_type') in ['pbhleave', 'pbhenter']: # return DROP return event
def test_get_id_error(self): self.event["source_type"] = 'something_else' error_desc = ("source_type should be one of 'connector', 'resource' or " "'component' not {}.".format(self.event["source_type"])) with self.assertRaisesRegexp(ValueError, error_desc): ContextGraph.get_id(self.event)
def event_processing(engine, event, pbm=_pb_manager, logger=None, **kwargs): """ Event processing. """ if event.get('event_type') == EVENT_TYPE: entity_id = ContextGraph.get_id(event) engine.logger.debug("Start processing event {}".format(event)) logger.debug("entity_id: {}\naction: {}".format( entity_id, event.get('action'))) try: pb_start = event.get('start') pb_end = event.get('end') pb_connector = event.get('connector') pb_name = event.get('pbehavior_name') pb_connector_name = event.get('connector_name') except KeyError as ex: logger.error('missing key in event: {}'.format(ex)) return event pb_rrule = event.get('rrule', None) pb_comments = event.get('comments', None) pb_author = event.get('author', DEFAULT_AUTHOR) try: filter_ = {'_id': entity_id} pbehavior_id, pb_source = pb_id(event) if event.get('action') == PBEHAVIOR_CREATE and pbehavior_id is None and pb_source is None: result = pbm.create( pb_name, filter_, pb_author, pb_start, pb_end, connector=pb_connector, comments=pb_comments, connector_name=pb_connector_name, rrule=pb_rrule ) if not result: logger.error(ERROR_MSG.format(event['action'], event)) else: watcher_manager.compute_watchers() elif event.get('action') == PBEHAVIOR_CREATE and pbehavior_id is not None and pb_source is not None: pbehavior = PBehaviorModel( pbehavior_id, pb_name, filter_, pb_start, pb_end, pb_rrule, pb_author, connector=pb_connector, connector_name=pb_connector_name, source=pb_source ) success, result = pbm.upsert(pbehavior) if not success: logger.critical('pbehavior upsert: {}'.format(result)) elif event.get('action') == PBEHAVIOR_DELETE: result = pbm.delete(_filter={ PBehavior.FILTER: dumps(filter_), PBehavior.NAME: pb_name, PBehavior.TSTART: pb_start, PBehavior.TSTOP: pb_end, PBehavior.RRULE: pb_rrule, PBehavior.CONNECTOR: pb_connector, PBehavior.CONNECTOR_NAME: pb_connector_name, }) if not result: logger.error(ERROR_MSG.format(event['action'], event)) else: watcher_manager.compute_watchers() else: logger.error(ERROR_MSG.format(event.get('action', 'no_action'), event)) except ValueError as err: logger.error('cannot handle event: {}'.format(err)) return event
class engine(Engine): etype = 'event_filter' def __init__(self, *args, **kargs): super(engine, self).__init__(*args, **kargs) self.mg_store = MongoStore.get_default() self.collection = MongoCollection(self.mg_store.get_collection("object")) self.name = kargs['name'] self.drop_event_count = 0 self.pass_event_count = 0 self.__load_rules() def pre_run(self): self.beat() def a_override(self, event, action): """Override a field from event or add a new one if it does not have one. """ afield = action.get('field', None) avalue = action.get('value', None) # This must be a hard check because value can be a boolean or a null # integer if afield is None or avalue is None: self.logger.error( "Malformed action ('field' and 'value' required): {}".format( action ) ) return False if afield not in event: self.logger.debug("Overriding: '{}' -> '{}'".format( afield, avalue)) event[afield] = avalue return True # afield is in event if not isinstance(avalue, list): if isinstance(event[afield], list): self.logger.debug("Appending: '{}' to '{}'".format( avalue, afield)) event[afield].append(avalue) else: self.logger.debug("Overriding: '{}' -> '{}'".format( afield, avalue)) event[afield] = avalue return True else: # operation field is supported only for list values op = action.get('operation', 'append') if op == 'override': self.logger.debug("Overriding: '{}' -> '{}'".format( afield, avalue)) event[afield] = avalue return True elif op == 'append': self.logger.debug("Appending: '{}' to '{}'".format( avalue, afield)) if isinstance(event[afield], list): event[afield] += avalue else: event[afield] = [event[afield]] + avalue return True else: self.logger.error( "Operation '{}' unsupported (action '{}')".format( op, action ) ) return False def a_remove(self, event, action): """Remove an event from a field in event or the whole field if no element is specified. """ akey = action.get('key', None) aelement = action.get('element', None) del_met = action.get('met', 0) if akey: if aelement: if del_met: for i, met in enumerate(event[akey]): if met['name'] == aelement: del event[akey][i] break elif isinstance(event[akey], dict): del event[akey][aelement] elif isinstance(event[akey], list): del event[akey][event[akey].index(aelement)] self.logger.debug(u" + {}: Removed: '{}' from '{}'".format( event['rk'], aelement, akey)) else: del event[akey] self.logger.debug(u" + {}: Removed: '{}'".format( event['rk'], akey)) return True else: self.logger.error( u"Action malformed (needs 'key' and/or 'element'): {}".format( action)) return False def a_modify(self, event, action, name): """ Args: event map of the event to be modified action map of type action _name of the rule Returns: ``None`` """ derogated = False atype = action.get('type') actionMap = { 'override': self.a_override, 'remove': self.a_remove } if atype in actionMap: derogated = actionMap[atype](event, action) else: self.logger.warning(u"Unknown action '{}'".format(atype)) # If the event was derogated, fill some informations if derogated: self.logger.debug(u"Event changed by rule '{}'".format(name)) return None def a_drop(self, event, action, name): """ Drop the event. Args: event map of the event to be modified action map of type action _name of the rule Returns: ``None`` """ self.logger.debug(u"Event dropped by rule '{}'".format(name)) self.drop_event_count += 1 return DROP def a_pass(self, event, action, name): """Pass the event to the next queue. Args: event map of the event to be modified action map of type action _name of the rule Returns: ``None`` """ self.logger.debug(u"Event passed by rule '{}'".format(name)) self.pass_event_count += 1 return event def a_route(self, event, action, name): """ Change the route to which an event will be sent Args: event: map of the event to be modified action: map of type action name: of the rule Returns: ``None`` """ if "route" in action: self.next_amqp_queues = [action["route"]] self.logger.debug(u"Event re-routed by rule '{}'".format(name)) else: self.logger.error( u"Action malformed (needs 'route'): {}".format(action)) return None def a_exec_job(self, event, action, name): records = self.collection.find( {'crecord_type': 'job', '_id': action['job']} ) for record in records: job = copy.deepcopy(record) job['context'] = event try: self.work_amqp_publisher.direct_event(job, 'Engine_scheduler') except Exception as e: self.logger.exception("Unable to send job") time.sleep(1) return True def a_snooze(self, event, action, name): """ Snooze event checks :param dict event: event to be snoozed :param dict action: action :param str name: name of the rule :returns: True if a snooze has been sent, False otherwise :rtype: boolean """ if event.get('event_type') == 'snooze': return False # Only check events can trigger an auto-snooze if event.get('event_type') != 'check': return False # A check OK cannot trigger an auto-snooze if event.get('state') == 0: return False # Alerts manager caching if not hasattr(self, 'am'): self.am = Alerts(*Alerts.provide_default_basics()) # Context manager caching if not hasattr(self, 'cm'): self.cm = ContextGraph(self.logger) entity_id = self.cm.get_id(event) current_alarm = self.am.get_current_alarm(entity_id) if current_alarm is None: snooze = { 'connector': event.get('connector', ''), 'connector_name': event.get('connector_name', ''), 'source_type': event.get('source_type', ''), 'component': event.get('component', ''), 'event_type': 'snooze', 'duration': action['duration'], 'author': 'event_filter', 'output': 'Auto snooze generated by rule "{}"'.format(name), 'timestamp': int(time.time()) } if event.get('resource', ''): snooze['resource'] = event['resource'] try: self.work_amqp_publisher.direct_event( snooze, 'Engine_event_filter') except Exception as e: self.logger.exception("Unable to send snooze event") return True return False def a_baseline(self, event, actions, name): """a_baseline :param event: :param action: baseline conf in event filter :param name: """ event['baseline_name'] = actions['baseline_name'] event['check_frequency'] = actions['check_frequency'] try: self.work_amqp_publisher.direct_event( event, 'Engine_baseline') except Exception as e: self.logger.exception("Unable to send baseline event") def apply_actions(self, event, actions): pass_event = False actionMap = { 'drop': self.a_drop, 'pass': self.a_pass, 'override': self.a_modify, 'remove': self.a_modify, 'execjob': self.a_exec_job, 'route': self.a_route, 'snooze': self.a_snooze, 'baseline': self.a_baseline } for name, action in actions: if action['type'] in actionMap: ret = actionMap[action['type'].lower()](event, action, name) if ret: pass_event = True else: self.logger.warning(u"Unknown action '{}'".format(action)) return pass_event def work(self, event, *xargs, **kwargs): rk = get_routingkey(event) default_action = self.configuration.get('default_action', 'pass') # list of supported actions rules = self.configuration.get('rules', []) to_apply = [] self.logger.debug(u'event {}'.format(event)) # When list configuration then check black and # white lists depending on json configuration for filterItem in rules: actions = filterItem.get('actions') name = filterItem.get('name', 'no_name') self.logger.debug(u'rule {}'.format(filterItem)) self.logger.debug(u'filter is {}'.format(filterItem['mfilter'])) # Try filter rules on current event if filterItem['mfilter'] and check(filterItem['mfilter'], event): self.logger.debug( u'Event: {}, filter matches'.format(event.get('rk', event)) ) if 'pbehaviors' in filterItem: pbehaviors = filterItem.get('pbehaviors', {}) list_in = pbehaviors.get('in', []) list_out = pbehaviors.get('out', []) if list_in or list_out: pbm = singleton_per_scope(PBehaviorManager) cm = singleton_per_scope(ContextGraph) entity = cm.get_entity(event) entity_id = cm.get_entity_id(entity) result = pbm.check_pbehaviors( entity_id, list_in, list_out ) if not result: break for action in actions: if action['type'].lower() == 'drop': self.apply_actions(event, to_apply) return self.a_drop(event, None, name) to_apply.append((name, action)) if filterItem.get('break', 0): self.logger.debug( u' + Filter {} broke the next filters processing' .format( filterItem.get('name', 'filter') ) ) break if len(to_apply): if self.apply_actions(event, to_apply): self.logger.debug( u'Event before sent to next engine: %s' % event ) event['rk'] = event['_id'] = get_routingkey(event) return event # No rules matched if default_action == 'drop': self.logger.debug("Event '%s' dropped by default action" % (rk)) self.drop_event_count += 1 return DROP self.logger.debug("Event '%s' passed by default action" % (rk)) self.pass_event_count += 1 self.logger.debug(u'Event before sent to next engine: %s' % event) event['rk'] = event['_id'] = get_routingkey(event) return event def __load_rules(self): tmp_rules = [] records = self.collection.find( {'crecord_type': 'filter', 'enable': True}) records.sort('priority', 1) for record in records: record_dump = copy.deepcopy(record) self.set_loaded(record_dump) try: record_dump["mfilter"] = loads(record_dump["mfilter"]) except Exception: self.logger.info(u'Invalid mfilter {}, filter {}'.format( record_dump['mfilter'], record_dump['name'], )) self.logger.debug(u'Loading record_dump:') self.logger.debug(record_dump) tmp_rules.append(record_dump) self.configuration = { 'rules': tmp_rules, 'default_action': self.find_default_action() } def beat(self, *args, **kargs): """ Configuration reload for realtime ui changes handling """ self.logger.debug(u'Reload configuration rules') self.__load_rules() self.logger.debug( 'Loaded {} rules'.format(len(self.configuration['rules'])) ) self.send_stat_event() def set_loaded(self, record): if 'run_once' in record and not record['run_once']: self.collection.update({"_id": record['_id']}, {"$set": {'run_once': True}}) self.logger.info( 'record {} has been run once'.format(record['_id']) ) def send_stat_event(self): """ Send AMQP Event for drop and pass metrics """ message_dropped = '{} event dropped since {}'.format( self.drop_event_count, self.beat_interval ) message_passed = '{} event passed since {}'.format( self.pass_event_count, self.beat_interval ) event = forger( connector='Engine', connector_name='engine', event_type='check', source_type='resource', resource=self.amqp_queue + '_data', state=0, state_type=1, output=message_dropped, perf_data_array=[ {'metric': 'pass_event', 'value': self.pass_event_count, 'type': 'GAUGE'}, {'metric': 'drop_event', 'value': self.drop_event_count, 'type': 'GAUGE'} ] ) self.logger.debug(message_dropped) self.logger.debug(message_passed) try: self.beat_amqp_publisher.canopsis_event(event) except Exception as e: self.logger.exception("Unable to send stat event") self.drop_event_count = 0 self.pass_event_count = 0 def find_default_action(self): """Find the default action stored and returns it, else assume it default action is pass. """ records = self.collection.find_one({'crecord_type': 'defaultrule'}) if records: return records[0]["action"] self.logger.debug( "No default action found. Assuming default action is pass" ) return 'pass'