def test_update_context_case5_re_none(self): conn_id = "conn_id" conn_name = "conn_name" comp_id = "comp_id" ids = process.gen_ids(create_event(conn_id, conn_name, comp_id)) event = create_event(conn_id, conn_name, comp_id) expected_conn = ContextGraph.create_entity_dict(ids["conn_id"], conn_name, "connector", impact=[ids["comp_id"]]) expected_comp = ContextGraph.create_entity_dict(ids["comp_id"], comp_id, "component", impact=[], depends=[ids["conn_id"]]) comp = ContextGraph.create_entity_dict(comp_id, comp_id, "component", impact=[], depends=[]) res = process.update_context_case5(ids, [comp], event) result_conn, result_comp, result_re = prepare_test_update_context(res) self.assertDictEqual(expected_conn, result_conn) self.assertDictEqual(expected_comp, result_comp)
def update_context_case2(ids, in_db, event): """Case 2 update entities. A component exist in the context but no connector nor resource. :param ids: the tuple of ids. :param type: a tuple :param in_db: a list of the rest of entity in the event. :type in_db: a tuple :return: a list of entities (as a dict)""" LOGGER.debug("Case 2") comp = ContextGraph.create_entity_dict( ids['comp_id'], event['component'], 'component', depends=[ids['re_id']], impact=[]) re = ContextGraph.create_entity_dict( ids['re_id'], event['resource'], 'resource', depends=[], impact=[ids['comp_id']]) update_links_conn_res(in_db[0], re) update_links_conn_comp(in_db[0], comp) return [comp, re, in_db[0]]
def test_get_id_error(self): self.event["source_type"] = 'something_else' error_desc = ( "source_type should be one of 'connector', 'resource' or " "'component' not {}.".format(self.event["source_type"])) with self.assertRaisesRegexp(ValueError, error_desc): ContextGraph.get_id(self.event)
def setUp(self): logger = Logger.get("", None, output_cls=OutputNull) setattr(process, 'LOGGER', logger) self.conf_file = "etc/context_graph/manager.conf" self.category = "CONTEXTGRAPH" self.extra_fields = "extra_fields" self.authorized_info_keys = "authorized_info_keys" self.gctx_man = ContextGraph(logger) setattr(process, 'context_graph_manager', self.gctx_man)
def a_snooze(self, event, action, name): """ Snooze event checks :param dict event: event to be snoozed :param dict action: action :param str name: name of the rule :returns: True if a snooze has been sent, False otherwise :rtype: boolean """ if event.get('event_type') == 'snooze': return False # Only check events can trigger an auto-snooze if event.get('event_type') != 'check': return False # A check OK cannot trigger an auto-snooze if event.get('state') == 0: return False # Alerts manager caching if not hasattr(self, 'am'): self.am = Alerts(*Alerts.provide_default_basics()) # Context manager caching if not hasattr(self, 'cm'): self.cm = ContextGraph(self.logger) entity_id = self.cm.get_id(event) current_alarm = self.am.get_current_alarm(entity_id) if current_alarm is None: snooze = { 'connector': event.get('connector', ''), 'connector_name': event.get('connector_name', ''), 'source_type': event.get('source_type', ''), 'component': event.get('component', ''), 'event_type': 'snooze', 'duration': action['duration'], 'author': 'event_filter', 'output': 'Auto snooze generated by rule "{}"'.format(name), 'timestamp': int(time.time()) } if event.get('resource', ''): snooze['resource'] = event['resource'] try: self.work_amqp_publisher.direct_event(snooze, 'Engine_event_filter') except Exception as e: self.logger.exception("Unable to send snooze event") return True return False
def __init__(self, *args, **kwargs): super(engine, self).__init__(*args, **kwargs) self.context_manager = ContextGraph(self.logger) self.influxdb_client = InfluxDBClient.from_configuration(self.logger) cfg = Configuration.load(os.path.join(root_path, self.CONF_PATH), Ini).get(self.CONF_SECTION, {}) self.tags = cfg_to_array(cfg.get('tags', ''))
def test_info_field(self): conn_id = "conn_id" conn_name = "conn_name" comp_id = "comp_id" re_id = "re_id" event = create_event(conn_id, conn_name, comp_id, re_id) ids = process.gen_ids(event) expected_conn = ContextGraph.create_entity_dict(ids["conn_id"], conn_name, "connector", impact=sorted([ids["comp_id"], ids["re_id"]]), infos={}) expected_comp = ContextGraph.create_entity_dict(ids["comp_id"], comp_id, "component", depends=sorted([ids["conn_id"], ids["re_id"]]), infos={}) expected_re = ContextGraph.create_entity_dict(ids["re_id"], re_id, "resource", impact=[ids["comp_id"]], depends=[ids["conn_id"]], infos={}) process.update_context((False, False, False), ids, [], event) result_re = self.gctx_man.get_entities_by_id(ids["re_id"])[0] result_conn = self.gctx_man.get_entities_by_id(ids["conn_id"])[0] result_comp = self.gctx_man.get_entities_by_id(ids["comp_id"])[0] # A fields links is added in the entity returned by the # get_entities_by_id methods. It is not relievent to this test and his # value fluctuate with specific configuration. So we delete it in the # entity returned by get_entities_by_id del result_comp["links"] del result_conn["links"] del result_re["links"] self.assertEqualEntities(expected_re, result_re) self.assertEqualEntities(expected_conn, result_conn) self.assertEqualEntities(expected_comp, result_comp)
def test_get_id_connector(self): self.event["source_type"] = 'connector' expected_id = "{0}/{1}".format(self.event["connector"], self.event["connector_name"]) result = ContextGraph.get_id(self.event) self.assertEquals(result, expected_id)
def test_get_id_resource(self): self.event["source_type"] = 'resource' expected_id = "{0}/{1}".format(self.event["resource"], self.event["component"]) result = ContextGraph.get_id(self.event) self.assertEquals(result, expected_id)
def test_event_processing(self): event = { "event_type": "pbehavior", "pbehavior_name": "downtime", "start": timegm(datetime.utcnow().timetuple()), "end": timegm((datetime.utcnow() + timedelta(days=1)).timetuple()), "action": PBEHAVIOR_CREATE, "connector": "test_connector", "connector_name": "test_connector_name", "author": "test_author", "component": 'test_component', "source_type": "resource", "resource": "a_resource", "action": PBEHAVIOR_CREATE } query = { 'name': event['pbehavior_name'], 'filter': dumps({'_id': ContextGraph.get_id(event)}), 'tstart': event['start'], 'tstop': event['end'], 'connector': event['connector'], 'connector_name': event['connector_name'], 'author': event['author'] } event_processing(MockEngine(), event, pbm=self.pbm, logger=Mock()) pbehavior = list(self.pbm.pb_storage.get_elements(query=query)) self.assertEqual(len(pbehavior), 1) self.assertDictContainsSubset(query, pbehavior[0]) event.update({'action': PBEHAVIOR_DELETE}) event_processing(MockEngine(), event, pbm=self.pbm, logger=Mock()) pbehavior = list(self.pbm.pb_storage.get_elements(query=query)) self.assertEqual(len(pbehavior), 0)
def create_ent_metric(event): """Create a metric entity from an event. :param event: the event to use to create the metric entity :type event: a dict. :return: an event :return type: a dict""" result = [] for perf in event["perf_data_array"]: id_ = "/metric/{0}/{1}/{2}/{3}".format( event["connector"], event["connector_name"], event["component"], perf["metric"]) ent_metric = ContextGraph.create_entity_dict( id=id_, name=perf["metric"], etype="metric", depends=[], impact=[event["resource"]], measurements={}, infos={}, resource=event["resource"], component=event["component"], connector=event["connector"], connector_name=event["connector_name"]) result.append(ent_metric) return result
def create_ent_metric(event): """Create a metric entity from an event. :param event: the event to use to create the metric entity :type event: a dict. :return: an event :return type: a dict""" result = [] for perf in event["perf_data_array"]: id_ = "/metric/{0}/{1}/{2}/{3}".format(event["connector"], event["connector_name"], event["component"], perf["metric"]) ent_metric = ContextGraph.create_entity_dict( id=id_, name=perf["metric"], etype="metric", depends=[], impact=[event["resource"]], measurements={}, infos={}, resource=event["resource"], component=event["component"], connector=event["connector"], connector_name=event["connector_name"]) result.append(ent_metric) return result
def update_context_case3(ids, in_db, event): """Case 3 update entities. A component and connector exist in the context but no resource. :param ids: the tuple of ids. :param type: a tuple :param in_db: a list of the rest of entity in the event. :type in_db: a tuple :return: a list of entities (as a dict)""" LOGGER.debug("Case 3") comp = {} conn = {} for i in in_db: if i['type'] == 'connector': conn = i elif i['type'] == 'component': comp = i re = ContextGraph.create_entity_dict(ids['re_id'], event['resource'], 'resource', depends=[], impact=[]) update_links_res_comp(re, comp) update_links_conn_res(conn, re) return [comp, re, conn]
def update_context_case6(ids, in_db, event): """Case 6 update entities. A connector and a resource exist in the context but no component. :param ids: the tuple of ids. :param type: a tuple :param in_db: a list of the rest of entity in the event. :type in_db: a tuple :return: a list of entities (as a dict)""" LOGGER.debug("Update context case 6.") resource = None for entity in in_db: if entity["type"] == "resource": resource = entity elif entity["type"] == "component": component = entity connector = ContextGraph.create_entity_dict(ids["conn_id"], event["connector_name"], "connector", impact=[], depends=[]) update_links_conn_comp(connector, component) if resource is not None: update_links_conn_res(connector, resource) return [connector, component, resource] return [connector, component]
def update_context_case6(ids, in_db, event): """Case 6 update entities. A connector and a resource exist in the context but no component. :param ids: the tuple of ids. :param type: a tuple :param in_db: a list of the rest of entity in the event. :type in_db: a tuple :return: a list of entities (as a dict)""" LOGGER.debug("Update context case 6.") resource = None for entity in in_db: if entity["type"] == "resource": resource = entity elif entity["type"] == "component": component = entity connector = ContextGraph.create_entity_dict( ids["conn_id"], event["connector_name"], "connector", impact=[], depends=[]) update_links_conn_comp(connector, component) if resource is not None: update_links_conn_res(connector, resource) return [connector, component, resource] return [connector, component]
def test_update_context_case3(self): conn_id = "conn_id" conn_name = "conn_name" comp_id = "comp_id" re_id = "re_id" event = create_event(conn_id, conn_name, comp_id, re_id) ids = process.gen_ids(create_event(conn_id, conn_name, comp_id, re_id)) expected_conn = ContextGraph.create_entity_dict(ids["conn_id"], conn_name, "connector", impact=sorted([ids["comp_id"], ids["re_id"]])) expected_comp = ContextGraph.create_entity_dict(ids["comp_id"], comp_id, "component", depends=sorted([ids["conn_id"], ids["re_id"]])) expected_re = ContextGraph.create_entity_dict(ids["re_id"], re_id, "resource", impact=[ids["comp_id"]], depends=[ids["conn_id"]]) conn = ContextGraph.create_entity_dict("{0}/{1}".format(conn_id, conn_name), conn_name, "connector", impact=[comp_id], depends=[]) comp = ContextGraph.create_entity_dict(comp_id, comp_id, "component", impact=[], depends=["{0}/{1}".format(conn_id, conn_name)]) res = process.update_context_case3(ids, [conn, comp], event) result_conn, result_comp, result_re = prepare_test_update_context(res) self.assertDictEqual(expected_conn, result_conn) self.assertDictEqual(expected_comp, result_comp) self.assertDictEqual(expected_re, result_re)
def test_get_watcher(self): self.assertIsNone(self.manager.get_watcher('watcher-one')) watcher_entity = ContextGraph.create_entity_dict( 'watcher-one', 'one', 'watcher' ) self.context_graph_manager.create_entity(watcher_entity)
def __init__(self, *args, **kwargs): super(engine, self).__init__(*args, **kwargs) # get a context self.context = ContextGraph() """ TODO: sla # get a storage for sla macro #self.storage = Middleware.get_middleware( # protocol='storage', data_scope='global') #self.sla = None """ self.entities_by_entity_ids = {} self.lock = Lock() self.beat()
def a_snooze(self, event, action, name): """ Snooze event checks :param dict event: event to be snoozed :param dict action: action :param str name: name of the rule :returns: True if a snooze has been sent, False otherwise :rtype: boolean """ if event.get('event_type') == 'snooze': return False # Only check events can trigger an auto-snooze if event.get('event_type') != 'check': return False # A check OK cannot trigger an auto-snooze if event.get('state') == 0: return False # Alerts manager caching if not hasattr(self, 'am'): self.am = Alerts(*Alerts.provide_default_basics()) # Context manager caching if not hasattr(self, 'cm'): self.cm = ContextGraph(self.logger) entity_id = self.cm.get_id(event) current_alarm = self.am.get_current_alarm(entity_id) if current_alarm is None: snooze = { 'connector': event.get('connector', ''), 'connector_name': event.get('connector_name', ''), 'source_type': event.get('source_type', ''), 'component': event.get('component', ''), 'event_type': 'snooze', 'duration': action['duration'], 'author': 'event_filter', 'output': 'Auto snooze generated by rule "{}"'.format(name), 'timestamp': int(time.time()) } if event.get('resource', ''): snooze['resource'] = event['resource'] try: self.work_amqp_publisher.direct_event( snooze, 'Engine_event_filter') except Exception as e: self.logger.exception("Unable to send snooze event") return True return False
def update_context_case1_re_none(ids, event): """Case 1 update entities. No component or connector exist in the context and no resource are define in the event. :param ids: the tuple of ids. :type ids: a tuple :return: a list of entities (as a dict) """ LOGGER.debug("Case 1 re none.") comp = ContextGraph.create_entity_dict(ids['comp_id'], event['component'], 'component', depends=[ids['conn_id']], impact=[]) conn = ContextGraph.create_entity_dict(ids['conn_id'], event['connector_name'], 'connector', depends=[], impact=[ids['comp_id']]) return [comp, conn]
def __init__(self, amqp_pub=None): """ :param amqp_pub canopsis.common.amqp.AmqpPublisher: """ self.logger = Logger.get('watcher', LOG_PATH) self.watcher_storage = Middleware.get_middleware_by_uri( 'mongodb-default-watcher://') self.alert_storage = Middleware.get_middleware_by_uri( 'mongodb-periodical-alarm://') self.sla_storage = Middleware.get_middleware_by_uri( 'storage-default-sla://') self.context_graph = ContextGraph(self.logger) self.pbehavior_manager = PBehaviorManager( *PBehaviorManager.provide_default_basics()) self.amqp_pub = amqp_pub if amqp_pub is None: self.amqp_pub = AmqpPublisher(get_default_amqp_conn(), self.logger)
def __init__(self, *args, **kwargs): super(engine, self).__init__(*args, **kwargs) self.context_manager = ContextGraph(self.logger) self.influxdb_client = InfluxDBClient.from_configuration(self.logger) cfg = Configuration.load( os.path.join(root_path, self.CONF_PATH), Ini ).get(self.CONF_SECTION, {}) self.tags = cfg_to_array(cfg.get('tags', ''))
def update_context_case1(ids, event): """Case 1 update entities. No resource, component or connector exist in the context. :param ids: the tuple of ids. :type ids: a tuple :param event: the event to process :type event: a dict. :return: a list of entities (as a dict) """ result = [] LOGGER.debug("Case 1.") comp = ContextGraph.create_entity_dict( ids['comp_id'], event['component'], 'component', depends=[ids['conn_id'], ids['re_id']], impact=[]) conn = ContextGraph.create_entity_dict( ids['conn_id'], event['connector_name'], 'connector', depends=[], impact=[ids['re_id'], ids['comp_id']]) result.append(comp) result.append(conn) if event["event_type"] == "perf": result += create_ent_metric(event) re = ContextGraph.create_entity_dict( ids['re_id'], event['resource'], 'resource', depends=[ids['conn_id']], impact=[ids['comp_id']]) result.append(re) return result
def get_entity_id(): """ Get the generated id tfrom an event. """ event = request.json if event is None: return gen_json_error({'description': 'no event givent'}, HTTP_ERROR) return gen_json(ContextGraph.get_id(event))
def update_context_case1(ids, event): """Case 1 update entities. No resource, component or connector exist in the context. :param ids: the tuple of ids. :type ids: a tuple :param event: the event to process :type event: a dict. :return: a list of entities (as a dict) """ result = [] LOGGER.debug("Case 1.") comp = ContextGraph.create_entity_dict( ids['comp_id'], event['component'], 'component', depends=[ids['conn_id'], ids['re_id']], impact=[]) conn = ContextGraph.create_entity_dict( ids['conn_id'], event['connector_name'], 'connector', depends=[], impact=[ids['re_id'], ids['comp_id']]) result.append(comp) result.append(conn) if event["event_type"] == "perf": result += create_ent_metric(event) re = ContextGraph.create_entity_dict(ids['re_id'], event['resource'], 'resource', depends=[ids['conn_id']], impact=[ids['comp_id']]) result.append(re) return result
def update_context_case1_re_none(ids, event): """Case 1 update entities. No component or connector exist in the context and no resource are define in the event. :param ids: the tuple of ids. :type ids: a tuple :return: a list of entities (as a dict) """ LOGGER.debug("Case 1 re none.") comp = ContextGraph.create_entity_dict( ids['comp_id'], event['component'], 'component', depends=[ids['conn_id']], impact=[]) conn = ContextGraph.create_entity_dict( ids['conn_id'], event['connector_name'], 'connector', depends=[], impact=[ids['comp_id']]) return [comp, conn]
def setUp(self): self.manager = ContextGraph(logger) self.entities_storage = Middleware.get_middleware_by_uri( 'storage-default-testentities://') self.organisations_storage = Middleware.get_middleware_by_uri( 'storage-default-testorganisations://') self.users_storage = Middleware.get_middleware_by_uri( 'storage-default-testusers://') self.manager.ent_storage = self.entities_storage self.template = { '_id': None, 'type': 'connector', 'name': 'conn-name1', 'depends': [], 'impact': [], 'measurements': [], 'links': {}, 'infos': {} }
def test__get_disable_entity(self): event = { 'connector': '03-K64_Firefly', 'connector_name': 'serenity', 'component': 'Malcolm_Reynolds', 'output': 'the big red recall button', 'timestamp': int(time.time()) - 100, "source_type": "component" } alarm_id = '/strawberry' alarm = self.manager.make_alarm( alarm_id, event ) context_manager = ContextGraph(logger=LoggerMock()) ent_id = context_manager.get_id(event) entity = context_manager.create_entity_dict(ent_id, "inara", "component") entity["enabled"] = False context_manager._put_entities(entity) alarms = self.reader.get(opened=True) print(alarms) self.assertEqual(len(alarms["alarms"]), 0)
def setUp(self): pbehavior_storage = Middleware.get_middleware_by_uri( 'storage-default-testpbehavior://') entities_storage = Middleware.get_middleware_by_uri( 'storage-default-testentities://') logger = Logger.get('test_pb', None, output_cls=OutputNull) self.pbm = PBehaviorManager(logger=logger, pb_storage=pbehavior_storage) self.context = ContextGraph(logger) self.context.ent_storage = entities_storage self.pbm.context = self.context
def update_context_case2(ids, in_db, event): """Case 2 update entities. A component exist in the context but no connector nor resource. :param ids: the tuple of ids. :param type: a tuple :param in_db: a list of the rest of entity in the event. :type in_db: a tuple :return: a list of entities (as a dict)""" LOGGER.debug("Case 2") comp = ContextGraph.create_entity_dict(ids['comp_id'], event['component'], 'component', depends=[ids['re_id']], impact=[]) re = ContextGraph.create_entity_dict(ids['re_id'], event['resource'], 'resource', depends=[], impact=[ids['comp_id']]) update_links_conn_res(in_db[0], re) update_links_conn_comp(in_db[0], comp) return [comp, re, in_db[0]]
def test_update_context_case1(self): conn_id = "conn_id" conn_name = "conn_name" comp_id = "comp_id" re_id = "re_id" event = create_event(conn_id, conn_name, comp_id, re_id) ids = process.gen_ids(event) impact = sorted([ids["comp_id"], ids["re_id"]]) expected_conn = ContextGraph.create_entity_dict(ids["conn_id"], conn_name, "connector", impact=impact) depends = sorted([ids["conn_id"], ids["re_id"]]) expected_comp = ContextGraph.create_entity_dict(ids["comp_id"], comp_id, "component", depends=depends) impact = [ids["comp_id"]] depends = [ids["conn_id"]] expected_re = ContextGraph.create_entity_dict(ids["re_id"], re_id, "resource", impact=impact, depends=depends) res = process.update_context_case1(ids, event) result_conn, result_comp, result_re = prepare_test_update_context(res) self.assertDictEqual(expected_comp, result_comp) self.assertDictEqual(expected_conn, result_conn) self.assertDictEqual(expected_re, result_re)
def setUp(self): logger = Logger.get('', None, output_cls=OutputNull) self.manager = Watcher() self.context_graph_manager = ContextGraph(logger) self.alerts_storage = Middleware.get_middleware_by_uri( 'mongodb-periodical-testalarm://') self.watcher_storage = Middleware.get_middleware_by_uri( 'storage-default-testwatcher://') self.entities_storage = Middleware.get_middleware_by_uri( 'storage-default-testentities://') self.context_graph_manager.ent_storage = self.entities_storage self.manager.alert_storage = self.alerts_storage self.manager.context_graph = self.context_graph_manager self.manager.watcher_storage = self.watcher_storage
def setUp(self): mongo = MongoStore.get_default() collection = mongo.get_collection("default_testpbehavior") pb_coll = MongoCollection(collection) entities_storage = Middleware.get_middleware_by_uri( 'storage-default-testentities://') logger = Logger.get('test_pb', None, output_cls=OutputNull) conf = Configuration.load(PBehaviorManager.CONF_PATH, Ini) self.pbm = PBehaviorManager(config=conf, logger=logger, pb_collection=pb_coll) self.context = ContextGraph(logger) self.context.ent_storage = entities_storage self.pbm.context = self.context
def create_watcher(self, body): """ Create watcher entity in context and link to entities. :param dict body: watcher conf """ watcher_id = body['_id'] try: watcher_finder = json.loads(body['mfilter']) except ValueError: self.logger.error('can t decode mfilter') return None except KeyError: self.logger.error('no filter') return None depends_list = self.context_graph.get_entities( query=watcher_finder, projection={'_id': 1} ) self.watcher_storage.put_element(body) depend_list = [] for entity_id in depends_list: depend_list.append(entity_id['_id']) entity = ContextGraph.create_entity_dict( id=watcher_id, name=body['display_name'], etype='watcher', impact=[], depends=depend_list ) # adding the fields specific to the Watcher entities entity['mfilter'] = body['mfilter'] entity['state'] = 0 try: self.context_graph.create_entity(entity) except ValueError: self.context_graph.update_entity(entity) self.compute_state(watcher_id) return True # TODO: return really something
def test_create_component(self): id_ = "id_1" name = "name_1" etype = "component" depends = ["id_2", "id_3", "id_4", "id_5"] impacts = ["id_6", "id_7", "id_8", "id_9"] measurements = {"tag_1": "data_1", "tag_2": "data_2"} infos = {"info_1": "foo_1", "info_2": "bar_2"} ent = ContextGraph.create_entity_dict(id_, name, etype, depends, impacts, measurements, infos) self.assertEqual(id_, ent["_id"]) self.assertEqual(name, ent["name"]) self.assertEqual(etype, ent["type"]) self.assertEqual(depends, ent["depends"]) self.assertEqual(impacts, ent["impact"]) self.assertNotIn("measurements", ent.keys()) self.assertEqual(infos, ent["infos"])
def setUp(self): self.logger = logging.getLogger('alerts') self.alerts_storage = Middleware.get_middleware_by_uri( 'storage-periodical-testalarm://') self.config_storage = Middleware.get_middleware_by_uri( 'storage-default-testconfig://') self.config_storage.put_element(element={ '_id': 'test_config', 'crecord_type': 'statusmanagement', 'bagot_time': 3600, 'bagot_freq': 10, 'stealthy_time': 300, 'restore_event': True, 'auto_snooze': False, 'snooze_default_time': 300, }, _id='test_config') self.filter_storage = Middleware.get_middleware_by_uri( 'storage-default-testalarmfilter://') self.context_graph_storage = Middleware.get_middleware_by_uri( 'storage-default-testentities://') self.cg_manager = ContextGraph(self.logger) self.cg_manager.ent_storage = self.context_graph_storage self.watcher_manager = Watcher() conf = Configuration.load(Alerts.CONF_PATH, Ini) filter_ = {'crecord_type': 'statusmanagement'} self.config_data = EtherealData(collection=MongoCollection( self.config_storage._backend), filter_=filter_) self.event_publisher = Mock(spec=StatEventPublisher) self.manager = Alerts(config=conf, logger=self.logger, alerts_storage=self.alerts_storage, config_data=self.config_data, filter_storage=self.filter_storage, context_graph=self.cg_manager, watcher=self.watcher_manager, event_publisher=self.event_publisher)
def exports(ws): manager = ContextGraph(ws.logger) @ws.application.route( '/api/v2/context/<_filter>', ) @ws.application.route( '/api/v2/context/', ) def context(_filter=None, ): """ get entities in graph_context with a filter payload: limit: limit of return size start: skip fields sort: sort :param _filter: mongo filter :return: list of object """ if _filter is None: filter = '{}' limit = request.query.limit or 0 sort = request.query.sort or None start = request.query.start or 0 query = {} if _filter is not None: try: query = loads(_filter) except ValueError: return gen_json_error({'description': 'can t load filter'}, HTTP_ERROR) cursor, count = manager.get_entities(query=query, limit=int(limit), start=int(start), sort=sort, with_count=True) data = [] for ent in cursor: data.append(ent) return gen_json(data)
def __init__(self, amqp_pub=None): """ :param amqp_pub canopsis.common.amqp.AmqpPublisher: """ self.logger = Logger.get('watcher', LOG_PATH) self.watcher_storage = Middleware.get_middleware_by_uri( 'mongodb-default-watcher://') self.alert_storage = Middleware.get_middleware_by_uri( 'mongodb-periodical-alarm://') self.sla_storage = Middleware.get_middleware_by_uri( 'storage-default-sla://') self.context_graph = ContextGraph(self.logger) self.pbehavior_manager = PBehaviorManager( *PBehaviorManager.provide_default_basics() ) self.amqp_pub = amqp_pub if amqp_pub is None: self.amqp_pub = AmqpPublisher(get_default_amqp_conn(), self.logger)
def setUp(self): self.manager = ContextGraph(logger) self.entities_storage = Middleware.get_middleware_by_uri( 'storage-default-testentities://' ) self.organisations_storage = Middleware.get_middleware_by_uri( 'storage-default-testorganisations://' ) self.users_storage = Middleware.get_middleware_by_uri( 'storage-default-testusers://' ) self.manager.ent_storage = self.entities_storage self.template = {'_id': None, 'type': 'connector', 'name': 'conn-name1', 'depends': [], 'impact': [], 'measurements': [], 'links': {}, 'infos': {}}
def update_context_case3(ids, in_db, event): """Case 3 update entities. A component and connector exist in the context but no resource. :param ids: the tuple of ids. :param type: a tuple :param in_db: a list of the rest of entity in the event. :type in_db: a tuple :return: a list of entities (as a dict)""" LOGGER.debug("Case 3") comp = {} conn = {} for i in in_db: if i['type'] == 'connector': conn = i elif i['type'] == 'component': comp = i re = ContextGraph.create_entity_dict( ids['re_id'], event['resource'], 'resource', depends=[], impact=[]) update_links_res_comp(re, comp) update_links_conn_res(conn, re) return [comp, re, conn]
def event_processing(engine, event, pbm=_pb_manager, logger=None, **kwargs): """ Event processing. """ if event.get('event_type') == EVENT_TYPE: entity_id = ContextGraph.get_id(event) engine.logger.debug("Start processing event {}".format(event)) logger.debug("entity_id: {}\naction: {}".format( entity_id, event.get('action'))) try: pb_start = event.get('start') pb_end = event.get('end') pb_connector = event.get('connector') pb_name = event.get('pbehavior_name') pb_connector_name = event.get('connector_name') except KeyError as ex: logger.error('missing key in event: {}'.format(ex)) return event pb_rrule = event.get('rrule', None) pb_comments = event.get('comments', None) pb_author = event.get('author', DEFAULT_AUTHOR) try: filter_ = {'_id': entity_id} pbehavior_id, pb_source = pb_id(event) if event.get('action') == PBEHAVIOR_CREATE and pbehavior_id is None and pb_source is None: result = pbm.create( pb_name, filter_, pb_author, pb_start, pb_end, connector=pb_connector, comments=pb_comments, connector_name=pb_connector_name, rrule=pb_rrule ) if not result: logger.error(ERROR_MSG.format(event['action'], event)) else: watcher_manager.compute_watchers() elif event.get('action') == PBEHAVIOR_CREATE and pbehavior_id is not None and pb_source is not None: pbehavior = PBehaviorModel( pbehavior_id, pb_name, filter_, pb_start, pb_end, pb_rrule, pb_author, connector=pb_connector, connector_name=pb_connector_name, source=pb_source ) success, result = pbm.upsert(pbehavior) if not success: logger.critical('pbehavior upsert: {}'.format(result)) elif event.get('action') == PBEHAVIOR_DELETE: result = pbm.delete(_filter={ PBehavior.FILTER: dumps(filter_), PBehavior.NAME: pb_name, PBehavior.TSTART: pb_start, PBehavior.TSTOP: pb_end, PBehavior.RRULE: pb_rrule, PBehavior.CONNECTOR: pb_connector, PBehavior.CONNECTOR_NAME: pb_connector_name, }) if not result: logger.error(ERROR_MSG.format(event['action'], event)) else: watcher_manager.compute_watchers() else: logger.error(ERROR_MSG.format(event.get('action', 'no_action'), event)) except ValueError as err: logger.error('cannot handle event: {}'.format(err)) return event
def update_context(presence, ids, in_db, event): """Update the context. :param presence: information about a entity exist in the database :param ids: a list of ids. :param in_db: the related entities from the context :param event: the current event """ extra_infos = {} for field in context_graph_manager.extra_fields: if field in event.keys(): extra_infos[field] = event[field] event_id = get_event_id(ids, event) if "infos" in event: event_info = event["infos"] else: event_info = {} to_update = None if presence == (False, False, False): # Case 1 to_update = update_context_case1(ids, event) elif presence == (False, False, None): # Case 1 to_update = update_context_case1_re_none(ids, event) elif presence == (True, False, False): # Case 2 to_update = update_context_case2(ids, in_db, event) elif presence == (True, False, None): to_update = update_context_case2_re_none(ids, in_db, event) elif presence == (True, True, False): # Case 3 to_update = update_context_case3(ids, in_db, event) elif presence == (True, True, True): # Case 4 pass elif presence == (False, True, False) or presence == (False, True, None): # Case 5 to_update = update_context_case5(ids, in_db, event) elif presence == (False, True, True) or presence == (False, True, None): # Case 6 to_update = update_context_case6(ids, in_db, event) else: LOGGER.warning("No case for the given presence : {0} and ids {1}". format(presence, ids)) raise ValueError("No case for the given ids and data.") evt_entity = None for entity in to_update: # If there is no "enabled_history" field in "infos", we assume # the entity was just create if "enable_history" not in entity: ContextGraph._enable_entity(entity, event["timestamp"]) if entity["_id"] == event_id: evt_entity = entity for key in extra_infos: evt_entity['infos'][key] = extra_infos[key] for key in event_info: evt_entity["infos"][key] = event_info[key] context_graph_manager._put_entities(to_update)
def get_entity_id(event): """ get entity id from event. """ return ContextGraph.get_id(event)
class Test(TestCase): GRACE_PERIOD = 3 def assertEqualEntities(self, expected, result): expected["depends"] = sorted(expected["depends"]) expected["impact"] = sorted(expected["impact"]) result["depends"] = sorted(result["depends"]) result["impact"] = sorted(result["impact"]) # check infos.enabled_history field result_ts = result[u"enable_history"][-1] expected_ts = expected[u"enable_history"][-1] self.assertTrue(result_ts - expected_ts < self.GRACE_PERIOD) # result["infos"].pop("enable_history") # expected["infos"].pop("enable_history") self.assertDictEqual(expected, result) def setUp(self): logger = Logger.get("", None, output_cls=OutputNull) setattr(process, 'LOGGER', logger) self.conf_file = "etc/context_graph/manager.conf" self.category = "CONTEXTGRAPH" self.extra_fields = "extra_fields" self.authorized_info_keys = "authorized_info_keys" self.gctx_man = ContextGraph(logger) setattr(process, 'context_graph_manager', self.gctx_man) def tearDown(self): process.cache.clear() def test_check_type(self): re_entity = {'_id': 'conn_1', 'type': 'resource'} con_entity = {'_id': 'conn_1', 'type': 'connector'} comp_entity = {'_id': 'conn_1', 'type': 'component'} self.assertTrue(process.check_type(con_entity, 'connector')) self.assertTrue(process.check_type(re_entity, 'resource')) self.assertTrue(process.check_type(comp_entity, 'component')) with self.assertRaises(TypeError): process.check_type(con_entity, "not_a_connector") with self.assertRaises(TypeError): process.check_type(comp_entity, "not_a_component") with self.assertRaises(TypeError): process.check_type(re_entity, "not_a_resource") def test_update_depends_links(self): e_1 = { '_id': 'comp_1', 'type': 'component', 'impact': [], 'depends': [] } e_2 = { '_id': 'conn_1', 'type': 'connector', 'impact': [], 'depends': [] } process.update_depends_links(e_1, e_2) self.assertTrue(e_2['_id'] in e_1['depends']) process.update_depends_links(e_1, e_2) self.assertTrue(e_1['depends'] == [e_2['_id']]) def test_update_impact_links(self): e_1 = { '_id': 'comp_1', 'type': 'component', 'impact': [], 'depends': [] } e_2 = { '_id': 'conn_1', 'type': 'connector', 'impact': [], 'depends': [] } process.update_impact_links(e_1, e_2) self.assertTrue(e_2['_id'] in e_1['impact']) process.update_impact_links(e_1, e_2) self.assertTrue(e_1['impact'] == [e_2['_id']]) def test_update_case_1(self): pass def test_update_case_2(self): pass def test_update_case_3(self): entities_t1 = [{'_id': 'comp_1', 'type': 'component', 'impact': [], 'depends': []}, {'_id': 'conn_1', 'type': 'connector', 'impact': [], 'depends': []}] entities_t2 = [{'_id': 'conn_1', 'type': 'connector'}, {'_id': 'comp_1', 'type': 'component'}, {'_id': 're_1', 'type': 'resource'}] ids = {'re_id': 're_1', 'comp_id': 'comp_1', 'conn_id': 'conn_1'} #self.assertEquals(process.update_case3(entities_t1, ids), 0) #self.assertEquals(process.update_case3(entities_t2, ids), 1) def test_update_case_5(self): pass def test_determine_presence(self): """Determine the case with the list of id ids and the data as a set of ids. :param ids: a list of ids :parama data: a set of ids :return: a tuple with the case number and the ids related. """ cache = set(['comp_1', 're_1', 'conn_1']) ids_test1 = { 'comp_id': 'comp_2', 're_id': 're_2', 'conn_id': 'conn_2'} ids_test2 = { 'conn_id': 'conn_1', 'comp_id': 'comp_2', 're_id': 're_2'} ids_test3 = { 'conn_id': 'conn_1', 'comp_id': 'comp_1', 're_id': 're_2'} ids_test4 = { 'conn_id': 'conn_1', 'comp_id': 'comp_1', 're_id': 're_1'} ids_test5 = { 'comp_id': 'comp_1', 're_id': 're_2', 'conn_id': 'conn_2'} ids_test6 = { 're_id': 're_1', 'comp_id': 'comp_1', 'conn_id': 'conn_2'} self.assertEqual( process.determine_presence(ids_test1, cache), (False, False, False)) self.assertEqual( process.determine_presence(ids_test2, cache), (True, False, False)) self.assertEqual( process.determine_presence(ids_test3, cache), (True, True, False)) self.assertEqual( process.determine_presence(ids_test4, cache), (True, True, True)) self.assertEqual( process.determine_presence(ids_test5, cache), (False, True, False)) self.assertEqual( process.determine_presence(ids_test6, cache), (False, True, True)) ids_test1_none = { 'comp_id': 'comp_2', 're_id': None, 'conn_id': 'conn_2'} ids_test2_none = { 'conn_id': 'conn_1', 'comp_id': 'comp_2', 're_id': None} ids_test3_none = { 'conn_id': 'conn_1', 'comp_id': 'comp_1', 're_id': None} ids_test4_none = { 'conn_id': 'conn_1', 'comp_id': 'comp_1', 're_id': None} ids_test5_none = { 'comp_id': 'comp_1', 're_id': None, 'conn_id': 'conn_2'} ids_test6_none = { 're_id': None, 'comp_id': 'comp_1', 'conn_id': 'conn_2'} self.assertEqual( process.determine_presence(ids_test1_none, cache), (False, False, None)) self.assertEqual( process.determine_presence(ids_test2_none, cache), (True, False, None)) self.assertEqual( process.determine_presence(ids_test3_none, cache), (True, True, None)) self.assertEqual( process.determine_presence(ids_test4_none, cache), (True, True, None)) self.assertEqual( process.determine_presence(ids_test5_none, cache), (False, True, None)) self.assertEqual( process.determine_presence(ids_test6_none, cache), (False, True, None)) def test_add_missing_ids(self): res_id = "re_id" comp_id = "comp_id" conn_id = "conn_id" ids = {"re_id": res_id, "comp_id": comp_id, "conn_id": conn_id} # check function behaviour for the connector process.add_missing_ids((True, False, False), ids) self.assertNotIn(conn_id, process.cache) process.cache.clear() process.add_missing_ids((False, False, False), ids) self.assertIn(conn_id, process.cache) process.cache.clear() with self.assertRaises(KeyError): process.add_missing_ids((False, True, True), { "re_id": res_id, "comp_id": comp_id}) process.cache.clear() # check function behaviour for the component process.add_missing_ids((False, True, False), ids) self.assertNotIn(comp_id, process.cache) process.cache.clear() process.add_missing_ids((False, False, False), ids) self.assertIn(conn_id, process.cache) process.cache.clear() with self.assertRaises(KeyError): process.add_missing_ids((True, False, True), { "conn_id": conn_id, "re_id": res_id}) process.cache.clear() # check function behaviour for the component process.add_missing_ids((False, False, True), ids) self.assertNotIn(res_id, process.cache) process.cache.clear() process.add_missing_ids((False, False, False), ids) self.assertIn(conn_id, process.cache) process.cache.clear() with self.assertRaises(KeyError): process.add_missing_ids((True, True, False), { "conn_id": conn_id, "comp_id": comp_id}) process.cache.clear() def test_gen_ids(self): conn_id = "conn_id" conn_name = "conn_name" comp_id = "comp_id" re_id = "re_id" event = create_event(conn_id, conn_name, comp_id, re_id) event_re_none = create_event(conn_id, conn_name, comp_id, None) expected = {"comp_id": comp_id, "conn_id": "{0}/{1}".format(conn_id, conn_name), "re_id": "{0}/{1}".format(re_id, comp_id)} expected_re_none = {"comp_id": comp_id, "conn_id": "{0}/{1}".format(conn_id, conn_name), "re_id": None} self.assertEqual(process.gen_ids(event), expected) self.assertEqual(process.gen_ids(event_re_none), expected_re_none) def test_update_context_case1(self): conn_id = "conn_id" conn_name = "conn_name" comp_id = "comp_id" re_id = "re_id" event = create_event(conn_id, conn_name, comp_id, re_id) ids = process.gen_ids(event) impact = sorted([ids["comp_id"], ids["re_id"]]) expected_conn = ContextGraph.create_entity_dict(ids["conn_id"], conn_name, "connector", impact=impact) depends = sorted([ids["conn_id"], ids["re_id"]]) expected_comp = ContextGraph.create_entity_dict(ids["comp_id"], comp_id, "component", depends=depends) impact = [ids["comp_id"]] depends = [ids["conn_id"]] expected_re = ContextGraph.create_entity_dict(ids["re_id"], re_id, "resource", impact=impact, depends=depends) res = process.update_context_case1(ids, event) result_conn, result_comp, result_re = prepare_test_update_context(res) self.assertDictEqual(expected_comp, result_comp) self.assertDictEqual(expected_conn, result_conn) self.assertDictEqual(expected_re, result_re) def test_update_context_case1_re_none(self): conn_id = "conn_id" conn_name = "conn_name" comp_id = "comp_id" event = create_event(conn_id, conn_name, comp_id) ids = process.gen_ids(event) expected_conn = ContextGraph.create_entity_dict(ids["conn_id"], conn_name, "connector", impact=[ids["comp_id"]]) expected_comp = ContextGraph.create_entity_dict(ids["comp_id"], comp_id, "component", depends=[ids["conn_id"]]) res = process.update_context_case1_re_none(ids, event) result_conn, result_comp, result_re = prepare_test_update_context(res) self.assertDictEqual(expected_comp, result_comp) self.assertDictEqual(expected_conn, result_conn) def test_update_context_case2(self): conn_id = "conn_id" conn_name = "conn_name" comp_id = "comp_id" re_id = "re_id" event = create_event(conn_id, conn_name, comp_id, re_id) ids = process.gen_ids(create_event(conn_id, conn_name, comp_id, re_id)) expected_conn = ContextGraph.create_entity_dict(ids["conn_id"], conn_name, "connector", impact=sorted([ids["comp_id"], ids["re_id"]])) expected_comp = ContextGraph.create_entity_dict(ids["comp_id"], comp_id, "component", depends=sorted([ids["conn_id"], ids["re_id"]])) expected_re = ContextGraph.create_entity_dict(ids["re_id"], re_id, "resource", impact=[ids["comp_id"]], depends=[ids["conn_id"]]) conn = ContextGraph.create_entity_dict("{0}/{1}".format(conn_id, conn_name), conn_name, "connector", impact=[], depends=[]) res = process.update_context_case2(ids, [conn], event) result_conn, result_comp, result_re = prepare_test_update_context(res) self.assertDictEqual(expected_conn, result_conn) self.assertDictEqual(expected_comp, result_comp) self.assertDictEqual(expected_re, result_re) def test_update_context_case2_re_none(self): conn_id = "conn_id" conn_name = "conn_name" comp_id = "comp_id" event = create_event(conn_id, conn_name, comp_id) ids = process.gen_ids(create_event(conn_id, conn_name, comp_id)) expected_conn = ContextGraph.create_entity_dict(ids["conn_id"], conn_name, "connector", impact=sorted([ids["comp_id"]])) expected_comp = ContextGraph.create_entity_dict(ids["comp_id"], comp_id, "component", depends=sorted([ids["conn_id"]])) conn = ContextGraph.create_entity_dict("{0}/{1}".format(conn_id, conn_name), conn_name, "connector", impact=[], depends=[]) res = process.update_context_case2_re_none(ids, [conn], event) result_conn, result_comp, result_re = prepare_test_update_context(res) self.assertDictEqual(expected_conn, result_conn) self.assertDictEqual(expected_comp, result_comp) def test_update_context_case3(self): conn_id = "conn_id" conn_name = "conn_name" comp_id = "comp_id" re_id = "re_id" event = create_event(conn_id, conn_name, comp_id, re_id) ids = process.gen_ids(create_event(conn_id, conn_name, comp_id, re_id)) expected_conn = ContextGraph.create_entity_dict(ids["conn_id"], conn_name, "connector", impact=sorted([ids["comp_id"], ids["re_id"]])) expected_comp = ContextGraph.create_entity_dict(ids["comp_id"], comp_id, "component", depends=sorted([ids["conn_id"], ids["re_id"]])) expected_re = ContextGraph.create_entity_dict(ids["re_id"], re_id, "resource", impact=[ids["comp_id"]], depends=[ids["conn_id"]]) conn = ContextGraph.create_entity_dict("{0}/{1}".format(conn_id, conn_name), conn_name, "connector", impact=[comp_id], depends=[]) comp = ContextGraph.create_entity_dict(comp_id, comp_id, "component", impact=[], depends=["{0}/{1}".format(conn_id, conn_name)]) res = process.update_context_case3(ids, [conn, comp], event) result_conn, result_comp, result_re = prepare_test_update_context(res) self.assertDictEqual(expected_conn, result_conn) self.assertDictEqual(expected_comp, result_comp) self.assertDictEqual(expected_re, result_re) def test_update_context_case6(self): ids2 = { 're_id': None, 'conn_id': 'conn_1', 'comp_id': 'comp_1' } ids1 = { 're_id': 're_1', 'conn_id': 'conn_1', 'comp_id': 'comp_1' } in_db_1 = [ { '_id': 're_1', 'name': 're_1', 'type': 'resource', 'impact': ['comp_1'], 'depends': [] }, { '_id': 'comp_1', 'name': 'comp_1', 'type': 'component', 'impact': [], 'depends': ['re_1']}] in_db_2 = [{ '_id': 'comp_1', 'name': 'comp_1', 'type': 'component', 'impact': [], 'depends': []}] event = create_event("conn_1", "conn_1", "comp_1") res_1 = process.update_context_case6(ids1, in_db_1, event) res_2 = process.update_context_case6(ids2, in_db_2, event) comp_res_1 = None conn_res_1 = None re_res_1 = None comp_res_2 = None conn_res_2 = None re_res_2 = None for i in res_1: if i['type'] == 'component': comp_res_1 = i if i['type'] == 'resource': re_res_1 = i if i['type'] == 'connector': conn_res_1 = i for i in res_2: if i['type'] == 'component': comp_res_2 = i if i['type'] == 'resource': re_res_2 = i if i['type'] == 'connector': conn_res_2 = i for i in comp_res_1: if isinstance(comp_res_1[i], list): comp_res_1[i] = sorted(comp_res_1[i]) for i in conn_res_1: if isinstance(conn_res_1[i], list): conn_res_1[i] = sorted(conn_res_1[i]) for i in re_res_1: if isinstance(re_res_1[i], list): re_res_1[i] = sorted(re_res_1[i]) for i in comp_res_2: if isinstance(comp_res_2[i], list): comp_res_2[i] = sorted(comp_res_2[i]) for i in conn_res_2: if isinstance(conn_res_2[i], list): conn_res_2[i] = sorted(conn_res_2[i]) expected_comp_res_1 = { '_id': 'comp_1', 'name': 'comp_1', 'type': 'component', 'impact': [], 'depends': sorted(['re_1', 'conn_1']), } expected_re_res_1 = { '_id': 're_1', 'name': 're_1', 'type': 'resource', 'impact': ['comp_1'], 'depends': ['conn_1'], } expected_conn_res_1 = { '_id': 'conn_1', 'name': 'conn_1', 'type': 'connector', 'impact': sorted(['comp_1', 're_1']), 'depends': [], 'measurements': {}, 'infos': {}} self.assertDictEqual(expected_comp_res_1, comp_res_1) self.assertDictEqual(expected_re_res_1, re_res_1) self.assertTrue(conn_res_1["enabled"]) self.assertIn("enable_history", conn_res_1) self.assertIs(type(conn_res_1["enable_history"][0]), int) del conn_res_1["enabled"] del conn_res_1["enable_history"] self.assertDictEqual(expected_conn_res_1, conn_res_1) self.assertDictEqual(comp_res_2, { '_id': 'comp_1', 'name': 'comp_1', 'type': 'component', 'impact': [], 'depends': sorted(['conn_1'])}) self.assertEqual(re_res_2, None) self.assertTrue(conn_res_2["enabled"]) self.assertIn("enable_history", conn_res_2) self.assertIs(type(conn_res_2["enable_history"][0]), int) del conn_res_2["enabled"] del conn_res_2["enable_history"] self.assertDictEqual(conn_res_2, {'_id': 'conn_1', 'name': 'conn_1', 'type': 'connector', 'impact': sorted(['comp_1']), 'depends': [], 'measurements': {}, 'infos': {}}) def test_update_context_case5(self): conn_id = "conn_id" conn_name = "conn_name" comp_id = "comp_id" re_id = "re_id" event = create_event(conn_id, conn_name, comp_id, re_id) ids = process.gen_ids(create_event(conn_id, conn_name, comp_id, re_id)) expected_conn = ContextGraph.create_entity_dict(ids["conn_id"], conn_name, "connector", impact=sorted([ids["comp_id"], ids["re_id"]])) expected_comp = ContextGraph.create_entity_dict(ids["comp_id"], comp_id, "component", impact=[], depends=sorted([ids["conn_id"], ids["re_id"]])) expected_re = ContextGraph.create_entity_dict(ids["re_id"], re_id, "resource", impact=[ids["comp_id"]], depends=[ids["conn_id"]]) comp = ContextGraph.create_entity_dict(comp_id, comp_id, "component", impact=[], depends=[]) res = process.update_context_case5(ids, [comp], event) result_conn, result_comp, result_re = prepare_test_update_context(res) self.assertDictEqual(expected_conn, result_conn) self.assertDictEqual(expected_comp, result_comp) self.assertDictEqual(expected_re, result_re) def test_update_context_case5_re_none(self): conn_id = "conn_id" conn_name = "conn_name" comp_id = "comp_id" ids = process.gen_ids(create_event(conn_id, conn_name, comp_id)) event = create_event(conn_id, conn_name, comp_id) expected_conn = ContextGraph.create_entity_dict(ids["conn_id"], conn_name, "connector", impact=[ids["comp_id"]]) expected_comp = ContextGraph.create_entity_dict(ids["comp_id"], comp_id, "component", impact=[], depends=[ids["conn_id"]]) comp = ContextGraph.create_entity_dict(comp_id, comp_id, "component", impact=[], depends=[]) res = process.update_context_case5(ids, [comp], event) result_conn, result_comp, result_re = prepare_test_update_context(res) self.assertDictEqual(expected_conn, result_conn) self.assertDictEqual(expected_comp, result_comp) def test_info_field(self): conn_id = "conn_id" conn_name = "conn_name" comp_id = "comp_id" re_id = "re_id" event = create_event(conn_id, conn_name, comp_id, re_id) ids = process.gen_ids(event) expected_conn = ContextGraph.create_entity_dict(ids["conn_id"], conn_name, "connector", impact=sorted([ids["comp_id"], ids["re_id"]]), infos={}) expected_comp = ContextGraph.create_entity_dict(ids["comp_id"], comp_id, "component", depends=sorted([ids["conn_id"], ids["re_id"]]), infos={}) expected_re = ContextGraph.create_entity_dict(ids["re_id"], re_id, "resource", impact=[ids["comp_id"]], depends=[ids["conn_id"]], infos={}) process.update_context((False, False, False), ids, [], event) result_re = self.gctx_man.get_entities_by_id(ids["re_id"])[0] result_conn = self.gctx_man.get_entities_by_id(ids["conn_id"])[0] result_comp = self.gctx_man.get_entities_by_id(ids["comp_id"])[0] # A fields links is added in the entity returned by the # get_entities_by_id methods. It is not relievent to this test and his # value fluctuate with specific configuration. So we delete it in the # entity returned by get_entities_by_id del result_comp["links"] del result_conn["links"] del result_re["links"] self.assertEqualEntities(expected_re, result_re) self.assertEqualEntities(expected_conn, result_conn) self.assertEqualEntities(expected_comp, result_comp)
class BaseTest(TestCase): def setUp(self): self.manager = ContextGraph(logger) self.entities_storage = Middleware.get_middleware_by_uri( 'storage-default-testentities://' ) self.organisations_storage = Middleware.get_middleware_by_uri( 'storage-default-testorganisations://' ) self.users_storage = Middleware.get_middleware_by_uri( 'storage-default-testusers://' ) self.manager.ent_storage = self.entities_storage self.template = {'_id': None, 'type': 'connector', 'name': 'conn-name1', 'depends': [], 'impact': [], 'measurements': [], 'links': {}, 'infos': {}} def tearDown(self): self.entities_storage.remove_elements() self.organisations_storage.remove_elements() self.users_storage.remove_elements() def assertEqualEntities(self, entity1, entity2): sorted(entity1["depends"]) sorted(entity1["impact"]) sorted(entity2["depends"]) sorted(entity2["impact"]) self.assertDictEqual(entity1, entity2) def _insertion_filter_test(self, function, expected=None): infos = {} entity = create_entity("id", "a name", "resource", infos=infos) if expected is None: expected = entity.copy() function(entity) result = self.manager.get_entities_by_id("id")[0] # pop non predictable fields try: result.pop("enable_history") except KeyError: pass self.assertEqualEntities(result, expected) def _insertion_filter_test_not_allowed_field(self, function, expected=None): infos = {"I am not allowed to be here": [1], "me too": [1]} entity = create_entity("id", "a name", "resource", infos=infos) if expected is None: expected = entity.copy() function(entity) result = self.manager.get_entities_by_id("id")[0] # pop non predictable fields try: result.pop("enable_history") except KeyError: pass self.assertEqualEntities(result, expected)
def setUp(self): super(ComputeState, self).setUp() pbehavior_storage = Middleware.get_middleware_by_uri( 'storage-default-testpbehavior://' ) filter_storage = Middleware.get_middleware_by_uri( 'storage-default-testalarmfilter://' ) config_storage = Middleware.get_middleware_by_uri( 'storage-default-testconfig://' ) config_storage.put_element( element={ '_id': 'test_config', 'crecord_type': 'statusmanagement', 'bagot_time': 3600, 'bagot_freq': 10, 'stealthy_time': 300, 'restore_event': True, 'auto_snooze': False, 'snooze_default_time': 300, }, _id='test_config' ) logger = Logger.get('test_pb', None, output_cls=OutputNull) config = Configuration.load(PBehaviorManager.CONF_PATH, Ini) self.pbm = PBehaviorManager(config=config, logger=logger, pb_storage=pbehavior_storage) self.pbm.context = self.context_graph_manager self.manager.pbehavior_manager = self.pbm conf = Configuration.load(Alerts.CONF_PATH, Ini) filter_ = {'crecord_type': 'statusmanagement'} config_data = EtherealData(collection=config_storage._backend, filter_=filter_) event_publisher = Mock(spec=StatEventPublisher) self.alert_manager = Alerts(config=conf, logger=logger, alerts_storage=self.alerts_storage, config_data=config_data, filter_storage=filter_storage, context_graph=self.context_graph_manager, watcher=self.manager, event_publisher=event_publisher) # Creating entity self.type_ = 'resource' self.name = 'morticia' entity = ContextGraph.create_entity_dict( id=self.name, etype=self.type_, name=self.name ) self.context_graph_manager.create_entity(entity) # Creating coresponding alarm event = { 'connector': self.type_, 'connector_name': 'connector_name', 'component': self.name, 'output': 'tadaTaDA tic tic', 'timestamp': 0 } alarm = self.alert_manager.make_alarm(self.name, event) self.state = 2 alarm = self.alert_manager.update_state(alarm, self.state, event) new_value = alarm[self.alert_manager.alerts_storage.VALUE] self.alert_manager.update_current_alarm(alarm, new_value)