def setUp(self): super(TestConsistencyFunctional, self).setUp() self.conf_reregister_opts(self.CONSISTENCY_OPTS, 'consistency') self.conf_reregister_opts(self.EVALUATOR_OPTS, 'evaluator') self.add_db() self.load_datasources() self.graph = NXGraph("Entity Graph") self.processor = Processor(self.graph) self.event_queue = queue.Queue() def actions_callback(event_type, data): """Mock notify method Mocks vitrage.messaging.VitrageNotifier.notify(event_type, data) :param event_type: is currently always the same and is ignored :param data: """ self.event_queue.put(data) scenario_repo = ScenarioRepository() self.evaluator = ScenarioEvaluator(self.processor.entity_graph, scenario_repo, actions_callback) self.consistency_enforcer = ConsistencyEnforcer( self.processor.entity_graph, actions_callback)
def test_delete_placeholder_vertex(self): entity_graph = NXGraph("Entity Graph") # create vertex properties vertex = self._update_vertex_to_graph(entity_graph, EntityCategory.RESOURCE, 'INSTANCE', '12345', False, True, {}) # deal with placeholder vertex PUtils.delete_placeholder_vertex(entity_graph, vertex) vertex = entity_graph.get_vertex(vertex.vertex_id) self.assertTrue(vertex is None) # create vertex properties vertex = self._update_vertex_to_graph(entity_graph, EntityCategory.RESOURCE, 'INSTANCE', '12345', False, False, {}) # deal with non placeholder vertex PUtils.delete_placeholder_vertex(entity_graph, vertex) vertex = entity_graph.get_vertex(vertex.vertex_id) self.assertTrue(vertex is not None)
def _read_db_graph(self): db = storage.get_connection_from_config(self._conf) graph_snapshot = db.graph_snapshots.query() NXGraph.read_gpickle(graph_snapshot.graph_snapshot, self._entity_graph) GraphPersistency.do_replay_events(db, self._entity_graph, graph_snapshot.event_id) self._entity_graph.ready = True
def _restart_from_stored_graph(self, graph_snapshot): LOG.info('Main process - loading graph from database snapshot (%sKb)', len(graph_snapshot.graph_snapshot) / 1024) NXGraph.read_gpickle(graph_snapshot.graph_snapshot, self.graph) self.persist.replay_events(self.graph, graph_snapshot.event_id) self._recreate_transformers_id_cache() LOG.info("%s vertices loaded", self.graph.num_vertices()) self.subscribe_presist_notifier()
def __init__(self, worker_id, conf, task_queues): super(GraphCloneWorkerBase, self).__init__(worker_id, conf) self._conf = conf self._task_queue = task_queues[worker_id] self._entity_graph = NXGraph()
def test_scenario_3(self): observed_scenarios = self._load_scenarios('valid_actions.yaml') expected_scenario = Scenario( 'valid actions-scenario3', '3', None, [ ActionSpecs('valid actions-scenario3-action0', 'mark_down', {'target': 'host'}, {}), ], [ NXGraph(vertices=[ Vertex( 'instance', { 'vitrage_is_placeholder': False, 'vitrage_type': 'nova.instance', 'vitrage_is_deleted': False, }), Vertex( 'host', { 'vitrage_is_placeholder': False, 'vitrage_type': 'nova.host', 'vitrage_is_deleted': False, }), ], edges=[ Edge( 'host', 'instance', 'contains', { 'vitrage_is_deleted': False, 'negative_condition': False }) ]), NXGraph(vertices=[ Vertex( 'host_ssh_alarm', { 'rawtext': 'host ssh is down', 'vitrage_is_placeholder': False, 'vitrage_type': 'zabbix', 'vitrage_is_deleted': False, }), Vertex( 'host', { 'vitrage_is_placeholder': False, 'vitrage_type': 'nova.host', 'vitrage_is_deleted': False, }), ], edges=[ Edge( 'host_ssh_alarm', 'host', 'on', { 'vitrage_is_deleted': True, 'negative_condition': True, }), ]), ], TemplateLoaderV3Test.expected_entities, TemplateLoaderV3Test.expected_relationships) self._assert_scenario_equal(expected_scenario, observed_scenarios[3])
def test_scenario_4(self): observed_scenarios = self._load_scenarios('valid_actions.yaml') expected_scenario = Scenario( 'valid actions-scenario4', '3', None, [ ActionSpecs( 'valid actions-scenario4-action0', 'mark_down', {'target': 'host'}, {}), ], [ NXGraph( vertices=[ Vertex('host', { 'vitrage_is_placeholder': False, 'vitrage_type': 'nova.host', 'vitrage_is_deleted': False, }), ]), ], TemplateLoaderV3Test.expected_entities, TemplateLoaderV3Test.expected_relationships) self._assert_scenario_equal( expected_scenario, observed_scenarios[4])
def setUpClass(cls): super(TestConsistencyFunctional, cls).setUpClass() cls.conf = cfg.ConfigOpts() cls.conf.register_opts(cls.CONSISTENCY_OPTS, group='consistency') cls.conf.register_opts(cls.PROCESSOR_OPTS, group='entity_graph') cls.conf.register_opts(cls.EVALUATOR_OPTS, group='evaluator') cls.conf.register_opts(cls.DATASOURCES_OPTS, group='datasources') cls.add_db(cls.conf) cls.load_datasources(cls.conf) cls.graph = NXGraph("Entity Graph") cls.processor = Processor(cls.conf, cls.graph) cls.event_queue = queue.Queue() def actions_callback(event_type, data): """Mock notify method Mocks vitrage.messaging.VitrageNotifier.notify(event_type, data) :param event_type: is currently always the same and is ignored :param data: """ cls.event_queue.put(data) scenario_repo = ScenarioRepository(cls.conf) cls.evaluator = ScenarioEvaluator(cls.conf, cls.processor.entity_graph, scenario_repo, actions_callback) cls.consistency_enforcer = ConsistencyEnforcer( cls.conf, cls.processor.entity_graph, actions_callback)
def test_scenario_0(self): observed_scenarios = self._load_scenarios('valid_actions.yaml') expected_scenario = Scenario( 'valid actions-scenario0', '3', None, [ ActionSpecs( 'valid actions-scenario0-action0', 'set_state', {'target': 'host'}, {'state': 'ERROR'}), ActionSpecs( 'valid actions-scenario0-action1', 'raise_alarm', {'target': 'host'}, {'severity': 'WARNING', 'alarm_name': 'ddd'}), ActionSpecs( 'valid actions-scenario0-action2', 'mark_down', {'target': 'host'}, {}), ActionSpecs( 'valid actions-scenario0-action3', 'execute_mistral', {'target': 'host'}, {'input': {'farewell': 'get_attr(host, name) bla bla'}, 'workflow': 'wf_1234'}), ], [ NXGraph( vertices=[ Vertex('host_ssh_alarm', { 'rawtext': 'host ssh is down', 'vitrage_is_placeholder': False, 'vitrage_type': 'zabbix', 'vitrage_is_deleted': False, }), Vertex('host', { 'vitrage_is_placeholder': False, 'vitrage_type': 'nova.host', 'vitrage_is_deleted': False, }) ], edges=[ Edge('host_ssh_alarm', 'host', 'on', { 'vitrage_is_deleted': False, 'negative_condition': False }) ]) ], TemplateLoaderV3Test.expected_entities, TemplateLoaderV3Test.expected_relationships) self._assert_scenario_equal( expected_scenario, observed_scenarios[0])
def create_graph(self): graph = NXGraph() v1 = self._file_to_vertex('openstack-cluster.json') graph.add_vertex(v1) networks = self._create_n_vertices(graph, self._num_of_networks, 'neutron.network.json') zones = self._create_n_neighbors(graph, self._num_of_zones_per_cluster, [v1], 'nova.zone.json', 'contains.json') hosts = self._create_n_neighbors(graph, self._num_of_hosts_per_zone, zones, 'nova.host.json', 'contains.json') self._create_n_neighbors(graph, self._num_of_zabbix_alarms_per_host, hosts, 'zabbix.json', 'on.json', Direction.IN) instances = self._create_n_neighbors(graph, self._num_of_instances_per_host, hosts, 'nova.instance.json', 'contains.json') ports = self._create_n_neighbors(graph, self._num_of_ports_per_instance, instances, 'neutron.port.json', 'attached.json', direction=Direction.IN) self._round_robin_edges(graph, networks, ports, 'contains.json') self._create_n_neighbors(graph, self._num_of_volumes_per_instance, instances, 'cinder.volume.json', 'attached.json', Direction.IN) self._create_n_neighbors(graph, self._num_of_vitrage_alarms_per_instance, instances, 'vitrage.alarm.json', 'on.json', Direction.IN) # Also create non connected components: tripleo_controller = \ self._create_n_vertices(graph, self._num_of_tripleo_controllers, 'tripleo.controller.json') self._create_n_neighbors(graph, self._num_of_zabbix_alarms_per_controller, tripleo_controller, 'zabbix.json', 'on.json', Direction.IN) return graph
def test_scenario_1(self): observed_scenarios = self._load_scenarios('valid_actions.yaml') expected_scenario = Scenario( 'valid actions-scenario1', '3', None, [ ActionSpecs( 'valid actions-scenario1-action0', 'add_causal_relationship', { 'target': 'host_ssh_alarm', 'source': 'host_network_alarm', }, {}), ], [ NXGraph( vertices=[ Vertex('host_ssh_alarm', { 'rawtext': 'host ssh is down', 'vitrage_is_placeholder': False, 'vitrage_type': 'zabbix', 'vitrage_is_deleted': False, }), Vertex('host_network_alarm', { 'rawtext': 'host network interface is down', 'vitrage_is_placeholder': False, 'vitrage_type': 'zabbix', 'vitrage_is_deleted': False, }), Vertex('host', { 'vitrage_is_placeholder': False, 'vitrage_type': 'nova.host', 'vitrage_is_deleted': False, }) ], edges=[ Edge('host_ssh_alarm', 'host', 'on', { 'vitrage_is_deleted': False, 'negative_condition': False }), Edge('host_network_alarm', 'host', 'on', { 'vitrage_is_deleted': False, 'negative_condition': False }) ]) ], TemplateLoaderV3Test.expected_entities, TemplateLoaderV3Test.expected_relationships) self._assert_scenario_equal( expected_scenario, observed_scenarios[1])
def __init__(self, conf, initialization_status, e_graph=None, uuid=False): super(Processor, self).__init__() self.conf = conf self.transformer_manager = TransformerManager(self.conf) self.state_manager = DatasourceInfoMapper(self.conf) self._initialize_events_actions() self.initialization_status = initialization_status self.entity_graph = e_graph if e_graph is not None\ else NXGraph("Entity Graph", uuid=uuid) self._notifier = GraphNotifier(conf)
def create_graph(name, root_id=None): """Create a Graph instance For now only return NXGraph :param root_id: :type name: str :rtype: Graph """ return NXGraph(name, root_id)
def from_clause(cls, clause, extract_var): condition_g = NXGraph("scenario condition") for term in clause: variable, var_type = extract_var(term.symbol_name) if var_type == ENTITY: vertex = variable.copy() vertex[VProps.VITRAGE_IS_DELETED] = False vertex[VProps.VITRAGE_IS_PLACEHOLDER] = False condition_g.add_vertex(vertex) else: # type = relationship # prevent overwritten of NEG_CONDITION and # VITRAGE_IS_DELETED property when there are both "not A" # and "A" in same template edge_desc = cls._copy_edge_desc(variable) cls._set_edge_relationship_info(edge_desc, term.positive) cls._add_edge_relationship(condition_g, edge_desc) return condition_g
def test_mark_vertex_as_deleted(self): entity_graph = NXGraph("Entity Graph") # create vertex properties vertex = self._update_vertex_to_graph(entity_graph, 'RESOURCE', 'INSTANCE', '12345', False, True, {}) # check vitrage deleted self.assertFalse(PUtils.is_deleted(vertex)) PUtils.mark_deleted(entity_graph, vertex) self.assertTrue(PUtils.is_deleted(vertex))
def create_processor_with_graph(self): conf = cfg.ConfigOpts() conf.register_opts(self.PROCESSOR_OPTS, group='entity_graph') events = self._create_mock_events() e_graph = NXGraph("Entity Graph") init = VitrageInit(conf) processor = proc.Processor(conf, init, e_graph) for event in events: processor.process_event(event) return processor
def get_rca(self, ctx, root, all_tenants): LOG.debug("RcaApis get_rca - root: %s, all_tenants=%s", root, all_tenants) project_id = ctx.get(TenantProps.TENANT, None) is_admin_project = ctx.get(TenantProps.IS_ADMIN, False) if all_tenants: db_nodes, db_edges = self.db.history_facade.alarm_rca(root) else: db_nodes, db_edges = self.db.history_facade.alarm_rca( root, project_id=project_id, admin=is_admin_project) for n in db_nodes: start_timestamp = \ self.db.history_facade.add_utc_timezone(n.start_timestamp) n.payload[HProps.START_TIMESTAMP] = str(start_timestamp) if n.end_timestamp <= db_time(): end_timestamp = \ self.db.history_facade.add_utc_timezone(n.end_timestamp) n.payload[HProps.END_TIMESTAMP] = str(end_timestamp) # TODO(annarez): implement state change in processor and DB n.payload[VProps.STATE] = AProps.INACTIVE_STATE vertices = [ Vertex(vertex_id=n.vitrage_id, properties=n.payload) for n in db_nodes ] edges = [ Edge(source_id=e.source_id, target_id=e.target_id, label=e.label, properties=e.payload) for e in db_edges ] rca_graph = NXGraph(vertices=vertices, edges=edges) json_graph = rca_graph.json_output_graph( inspected_index=self._find_rca_index(rca_graph, root)) return json_graph
def test_scenario_2(self): observed_scenarios = self._load_scenarios('valid_actions.yaml') expected_scenario = Scenario( 'valid actions-scenario2', '3', None, [ ActionSpecs( 'valid actions-scenario2-action0', 'raise_alarm', {'target': 'instance'}, { 'severity': 'WARNING', 'alarm_name': 'instance is down', 'causing_alarm': 'get_attr(host_ssh_alarm, vitrage_id)', }), ActionSpecs('valid actions-scenario2-action1', 'set_state', {'target': 'instance'}, {'state': 'SUBOPTIMAL'}), ], [ NXGraph(vertices=[ Vertex( 'host_ssh_alarm', { 'rawtext': 'host ssh is down', 'vitrage_is_placeholder': False, 'vitrage_type': 'zabbix', 'vitrage_is_deleted': False, }), Vertex( 'instance', { 'vitrage_is_placeholder': False, 'vitrage_type': 'nova.instance', 'vitrage_is_deleted': False, }), Vertex( 'host', { 'vitrage_is_placeholder': False, 'vitrage_type': 'nova.host', 'vitrage_is_deleted': False, }), ], edges=[ Edge( 'host_ssh_alarm', 'host', 'on', { 'vitrage_is_deleted': False, 'negative_condition': False }), Edge( 'host', 'instance', 'contains', { 'vitrage_is_deleted': False, 'negative_condition': False }) ]) ], TemplateLoaderV3Test.expected_entities, TemplateLoaderV3Test.expected_relationships) self._assert_scenario_equal(expected_scenario, observed_scenarios[2])
def test_mark_edge_as_deleted(self): entity_graph = NXGraph("Entity Graph") # create vertex properties vertex1 = self._update_vertex_to_graph(entity_graph, 'RESOURCE', 'INSTANCE', '12345', False, True, {}) vertex2 = self._update_vertex_to_graph(entity_graph, 'RESOURCE', 'HOST', '54321', False, True, {}) edge = self._update_edge_to_graph(entity_graph, vertex1.vertex_id, vertex2.vertex_id, 'contains') # check vitrage deleted self.assertFalse(PUtils.is_deleted(edge)) PUtils.mark_deleted(entity_graph, edge) self.assertTrue(PUtils.is_deleted(edge))
def topology_to_graph(topology): graph = NXGraph() nodes = topology['nodes'] for n in nodes: graph.add_vertex(Vertex(n['vitrage_id'], n)) edges = topology['links'] for i in range(len(edges)): s_id = nodes[edges[i]['source']]['vitrage_id'] t_id = nodes[edges[i]['target']]['vitrage_id'] graph.add_edge(Edge(s_id, t_id, edges[i]['relationship_type'])) return graph
def test_find_neighbor_types(self): neighbors = [] entity_graph = NXGraph("Entity Graph") entities_details = [('RESOURCE', 'HOST', '1', False, True), ('RESOURCE', 'STORAGE', '2', False, True), ('RESOURCE', 'APPLICATION', '3', False, True), ('RESOURCE', 'STORAGE', '4', False, True), ('ALARM', 'INSTANCE_AT_RISK', '5', False, True)] # add neighbors for details in entities_details: # neighbor vertex = self._update_vertex_to_graph(entity_graph, details[0], details[1], details[2], details[3], details[4], {}) neighbors.append((vertex, None)) # get neighbors types types = PUtils.find_neighbor_types(neighbors) self.assertEqual(4, len(types))
def _create_graph_from_graph_dictionary(self, api_graph): self.assertIsNotNone(api_graph) graph = NXGraph() nodes = api_graph['nodes'] for i in range(len(nodes)): graph.add_vertex(Vertex(str(i), nodes[i])) edges = api_graph['links'] for i in range(len(edges)): graph.add_edge(Edge(str(edges[i]['source']), str(edges[i]['target']), edges[i][EdgeProperties.RELATIONSHIP_TYPE])) return graph
def test_find_neighbor_types(self): neighbors = [] entity_graph = NXGraph("Entity Graph") entities_details = \ [(EntityCategory.RESOURCE, 'HOST', '1', False, True), (EntityCategory.RESOURCE, 'STORAGE', '2', False, True), (EntityCategory.RESOURCE, 'APPLICATION', '3', False, True), (EntityCategory.RESOURCE, 'STORAGE', '4', False, True), (EntityCategory.ALARM, 'INSTANCE_AT_RISK', '5', False, True)] # add neighbors for details in entities_details: # neighbor vertex = self._update_vertex_to_graph(entity_graph, details[0], details[1], details[2], details[3], details[4], {}) neighbors.append((vertex, None)) # get neighbors types types = PUtils.find_neighbor_types(neighbors) self.assertThat(types, matchers.HasLength(4))
def setUpClass(cls): super(TestConsistencyFunctional, cls).setUpClass() cls.conf = cfg.ConfigOpts() cls.conf.register_opts(cls.CONSISTENCY_OPTS, group='consistency') cls.conf.register_opts(cls.PROCESSOR_OPTS, group='entity_graph') cls.conf.register_opts(cls.EVALUATOR_OPTS, group='evaluator') cls.conf.register_opts(cls.DATASOURCES_OPTS, group='datasources') cls.load_datasources(cls.conf) cls.graph = NXGraph("Entity Graph") cls.initialization_status = VitrageInit(cls.conf, cls.graph) cls.processor = Processor(cls.conf, cls.initialization_status, cls.graph) cls.event_queue = queue.Queue() scenario_repo = ScenarioRepository(cls.conf) cls.evaluator = ScenarioEvaluator(cls.conf, cls.processor.entity_graph, scenario_repo, cls.event_queue) cls.consistency_enforcer = ConsistencyEnforcer( cls.conf, cls.event_queue, cls.processor.entity_graph)
def _create_graph_from_tree_dictionary(self, api_graph, graph=None, ancestor=None): children = [] graph = NXGraph() if not graph else graph if 'children' in api_graph: children = api_graph.copy()['children'] del api_graph['children'] vertex = Vertex(api_graph[VProps.VITRAGE_ID], api_graph) graph.add_vertex(vertex) if ancestor: graph.add_edge(Edge(ancestor[VProps.VITRAGE_ID], vertex[VProps.VITRAGE_ID], 'label')) for entity in children: self._create_graph_from_tree_dictionary(entity, graph, vertex) return graph
def _create_entity_graph(cls, name, num_of_alarms_per_host, num_of_alarms_per_vm, num_of_hosts_per_node, num_of_vms_per_host, num_of_tests_per_host): start = time.time() g = NXGraph(name) g.add_vertex(v_node) g.add_vertex(v_switch) g.add_edge(e_node_to_switch) # Add Hosts for host_id in range(num_of_hosts_per_node): host_to_add = add_connected_vertex(g, RESOURCE, NOVA_HOST_DATASOURCE, host_id, ELabel.CONTAINS, v_node, True) g.add_edge(graph_utils.create_edge(host_to_add.vertex_id, v_switch.vertex_id, 'USES')) # Add Host Alarms for j in range(num_of_alarms_per_host): add_connected_vertex(g, ALARM, ALARM_ON_HOST, cls.host_alarm_id, ELabel.ON, host_to_add, False, {VProps.RESOURCE_ID: host_id, VProps.NAME: host_id}) cls.host_alarm_id += 1 # Add Host Tests for j in range(num_of_tests_per_host): add_connected_vertex(g, TEST, TEST_ON_HOST, cls.host_test_id, ELabel.ON, host_to_add) cls.host_test_id += 1 # Add Host Vms for j in range(num_of_vms_per_host): vm_to_add = add_connected_vertex(g, RESOURCE, NOVA_INSTANCE_DATASOURCE, cls.vm_id, ELabel.CONTAINS, host_to_add, True) cls.vm_id += 1 cls.vms.append(vm_to_add) # Add Instance Alarms for k in range(num_of_alarms_per_vm): add_connected_vertex(g, ALARM, ALARM_ON_VM, cls.vm_alarm_id, ELabel.ON, vm_to_add, False, {VProps.RESOURCE_ID: cls.vm_id - 1, VProps.NAME: cls.vm_id - 1}) cls.vm_alarm_id += 1 end = time.time() LOG.debug('Graph creation took ' + str(end - start) + ' seconds, size is: ' + str(len(g))) expected_graph_size = \ 2 + num_of_hosts_per_node + num_of_hosts_per_node * \ num_of_alarms_per_host + num_of_hosts_per_node * \ num_of_vms_per_host + num_of_hosts_per_node * \ num_of_vms_per_host * num_of_alarms_per_vm + \ num_of_tests_per_host * num_of_hosts_per_node if not expected_graph_size == len(g): raise VitrageError('Init failed, graph size unexpected {0} != {1}' .format(expected_graph_size, len(g))) return g
def _create_new_graph(cls, *args, **kwargs): from vitrage.graph.driver.networkx_graph import NXGraph return NXGraph(args, **kwargs)
class TestConsistencyFunctional(TestFunctionalBase, TestConfiguration): CONSISTENCY_OPTS = [ cfg.IntOpt('min_time_to_delete', default=1, min=1), ] EVALUATOR_OPTS = [ cfg.StrOpt( 'templates_dir', default=utils.get_resources_dir() + '/templates/consistency', ), cfg.StrOpt( 'equivalences_dir', default='equivalences', ), cfg.StrOpt( 'notifier_topic', default='vitrage.evaluator', ), ] def setUp(self): super(TestConsistencyFunctional, self).setUp() self.conf_reregister_opts(self.CONSISTENCY_OPTS, 'consistency') self.conf_reregister_opts(self.EVALUATOR_OPTS, 'evaluator') self.add_db() self.load_datasources() self.graph = NXGraph("Entity Graph") self.processor = Processor(self.graph) self.event_queue = queue.Queue() def actions_callback(event_type, data): """Mock notify method Mocks vitrage.messaging.VitrageNotifier.notify(event_type, data) :param event_type: is currently always the same and is ignored :param data: """ self.event_queue.put(data) scenario_repo = ScenarioRepository() self.evaluator = ScenarioEvaluator(self.processor.entity_graph, scenario_repo, actions_callback) self.consistency_enforcer = ConsistencyEnforcer( self.processor.entity_graph, actions_callback) def test_periodic_process(self): # Setup consistency_interval = self.conf.datasources.snapshots_interval self._periodic_process_setup_stage(consistency_interval) self._add_alarms_by_type(consistency_interval=consistency_interval, alarm_type='prometheus') # Action time.sleep(2 * consistency_interval + 1) self.consistency_enforcer.periodic_process() self._process_events() # Test Assertions instance_vertices = self.processor.entity_graph.get_vertices({ VProps.VITRAGE_CATEGORY: EntityCategory.RESOURCE, VProps.VITRAGE_TYPE: NOVA_INSTANCE_DATASOURCE }) deleted_instance_vertices = \ self.processor.entity_graph.get_vertices({ VProps.VITRAGE_CATEGORY: EntityCategory.RESOURCE, VProps.VITRAGE_TYPE: NOVA_INSTANCE_DATASOURCE, VProps.VITRAGE_IS_DELETED: True }) self.assertThat(instance_vertices, matchers.HasLength(self.NUM_INSTANCES - 3)) # number of resources: # number of vertices - 3 (deleted instances) # number of nics - 1 # number of volumes - 1 # number of prometheus alarms - 1 self.assertThat( self.processor.entity_graph.get_vertices(), matchers.HasLength( # 3 instances deleted self._num_total_expected_vertices() - 3 + 3 - 1 + # one nic deleted 3 - 1 + # one cinder.volume deleted 3 - 1) # one prometheus deleted ) self.assertThat(deleted_instance_vertices, matchers.HasLength(3)) # one nic was deleted, one marked as deleted, one untouched # same for cinder.volume self._assert_vertices_status(EntityCategory.RESOURCE, 'nic', 2, 1) self._assert_vertices_status(EntityCategory.RESOURCE, 'cinder.volume', 2, 1) # one prometheus deleted, other two are untouched # prometheus vertices should not be marked as deleted, since the # datasource did not ask to delete outdated vertices. self._assert_vertices_status(EntityCategory.ALARM, 'prometheus', 2, 0) def test_should_delete_vertex(self): # should be deleted because the static datasource asks to delete its # outdated vertices static_vertex = {VProps.VITRAGE_DATASOURCE_NAME: 'static'} self.assertTrue( self.consistency_enforcer._should_delete_vertex(static_vertex)) # should be deleted because the cinder datasource asks to delete its # outdated vertices volume_vertex = {VProps.VITRAGE_DATASOURCE_NAME: 'cinder.volume'} self.assertTrue( self.consistency_enforcer._should_delete_vertex(volume_vertex)) # should not be deleted because the prometheus datasource does not ask # to delete its outdated vertices prometheus_vertex = {VProps.VITRAGE_DATASOURCE_NAME: 'prometheus'} self.assertFalse( self.consistency_enforcer._should_delete_vertex(prometheus_vertex)) # should be deleted because it is a placeholder placeholder_vertex = { VProps.VITRAGE_IS_PLACEHOLDER: True, VProps.VITRAGE_TYPE: 'prometheus' } self.assertTrue( self.consistency_enforcer._should_delete_vertex( placeholder_vertex)) # should not be deleted because it is an openstack.cluster cluster_vertex = { VProps.VITRAGE_IS_PLACEHOLDER: True, VProps.VITRAGE_TYPE: 'openstack.cluster' } self.assertFalse( self.consistency_enforcer._should_delete_vertex(cluster_vertex)) vertices = [ static_vertex, volume_vertex, prometheus_vertex, placeholder_vertex, cluster_vertex ] vertices_to_mark_deleted = self.consistency_enforcer.\ _filter_vertices_to_be_marked_as_deleted(vertices) self.assertThat(vertices_to_mark_deleted, matchers.HasLength(3)) self.assertTrue(static_vertex in vertices_to_mark_deleted) self.assertTrue(placeholder_vertex in vertices_to_mark_deleted) self.assertTrue(volume_vertex in vertices_to_mark_deleted) self.assertFalse(prometheus_vertex in vertices_to_mark_deleted) self.assertFalse(cluster_vertex in vertices_to_mark_deleted) def _assert_vertices_status(self, category, vitrage_type, num_vertices, num_marked_deleted): vertices = \ self.processor.entity_graph.get_vertices({ VProps.VITRAGE_CATEGORY: category, VProps.VITRAGE_TYPE: vitrage_type, }) self.assertThat(vertices, matchers.HasLength(num_vertices)) marked_deleted_vertices = \ self.processor.entity_graph.get_vertices({ VProps.VITRAGE_CATEGORY: category, VProps.VITRAGE_TYPE: vitrage_type, VProps.VITRAGE_IS_DELETED: True }) self.assertThat(marked_deleted_vertices, matchers.HasLength(num_marked_deleted)) def _periodic_process_setup_stage(self, consistency_interval): self._create_processor_with_graph(processor=self.processor) current_time = utcnow() # set all vertices to be have timestamp that consistency won't get self._update_timestamp( self.processor.entity_graph.get_vertices(), current_time + timedelta(seconds=1.5 * consistency_interval)) # check number of instances in graph instance_vertices = self.processor.entity_graph.get_vertices({ VProps.VITRAGE_CATEGORY: EntityCategory.RESOURCE, VProps.VITRAGE_TYPE: NOVA_INSTANCE_DATASOURCE }) self.assertThat(instance_vertices, matchers.HasLength(self.NUM_INSTANCES)) # set current timestamp of part of the instances self._update_timestamp(instance_vertices[0:3], current_time) # set part of the instances as deleted for i in range(3, 6): instance_vertices[i][VProps.VITRAGE_IS_DELETED] = True self.processor.entity_graph.update_vertex(instance_vertices[i]) # set part of the instances as deleted for i in range(6, 9): instance_vertices[i][VProps.VITRAGE_IS_DELETED] = True instance_vertices[i][VProps.VITRAGE_SAMPLE_TIMESTAMP] = str( current_time + timedelta(seconds=2 * consistency_interval + 1)) self.processor.entity_graph.update_vertex(instance_vertices[i]) self._add_resources_by_type(consistency_interval=consistency_interval, datasource_name='static', resource_type='nic') self._add_resources_by_type(consistency_interval=consistency_interval, datasource_name='cinder.volume', resource_type='cinder.volume') def _update_timestamp(self, lst, timestamp): for vertex in lst: vertex[VProps.VITRAGE_SAMPLE_TIMESTAMP] = str(timestamp) self.processor.entity_graph.update_vertex(vertex) def _process_events(self): num_retries = 0 while True: if self.event_queue.empty(): time.sleep(0.3) if not self.event_queue.empty(): time.sleep(1) count = 0 while not self.event_queue.empty(): count += 1 data = self.event_queue.get() if isinstance(data, list): for event in data: self.processor.process_event(event) else: self.processor.process_event(data) return num_retries += 1 if num_retries == 30: return def _add_resources_by_type(self, consistency_interval, resource_type, datasource_name): def _create_resource_by_type(v_id, v_type, ds_name, timestamp, is_deleted=False): return self._create_resource(vitrage_id=v_id, resource_type=v_type, datasource_name=ds_name, sample_timestamp=timestamp, is_deleted=is_deleted) self._add_entities_with_different_timestamps( consistency_interval=consistency_interval, create_func=_create_resource_by_type, category=EntityCategory.RESOURCE, datasource_name=datasource_name, resource_type=resource_type) def _add_alarms_by_type(self, consistency_interval, alarm_type): def _create_alarm_by_type(v_id, v_type, ds_name, timestamp, is_deleted=False): return self._create_alarm(vitrage_id=v_id, alarm_type=v_type, datasource_name=ds_name, project_id=None, vitrage_resource_project_id=None, metadata=None, vitrage_sample_timestamp=timestamp, is_deleted=is_deleted) self._add_entities_with_different_timestamps( consistency_interval=consistency_interval, create_func=_create_alarm_by_type, category=EntityCategory.ALARM, datasource_name=alarm_type, resource_type=alarm_type) def _add_entities_with_different_timestamps(self, consistency_interval, create_func, category, datasource_name, resource_type): # add resources to the graph: # - updated_resource # - outdated_resource with an old timestamp # - deleted_resource with an old timestamp and is_deleted==true future_timestamp = \ str(utcnow() + timedelta(seconds=2 * consistency_interval)) past_timestamp = \ str(utcnow() - timedelta(seconds=2 * consistency_interval - 1)) updated_resource = create_func(v_id=resource_type + '1234', v_type=resource_type, ds_name=datasource_name, timestamp=future_timestamp) outdated_resource = create_func(v_id=resource_type + '5678', v_type=resource_type, ds_name=datasource_name, timestamp=past_timestamp) deleted_resource = create_func(v_id=resource_type + '9999', v_type=resource_type, ds_name=datasource_name, timestamp=past_timestamp, is_deleted=True) self.graph.add_vertex(updated_resource) self.graph.add_vertex(outdated_resource) self.graph.add_vertex(deleted_resource) # get the list of vertices resource_vertices = self.processor.entity_graph.get_vertices({ VProps.VITRAGE_CATEGORY: category, VProps.VITRAGE_TYPE: resource_type }) self.assertThat(resource_vertices, matchers.HasLength(3), 'Wrong number of vertices of type %s', resource_type)
def load_snapshot(data): return NXGraph.read_gpickle(data.graph_snapshot) if data else None
def _create_graph(self): graph = NXGraph('Multi tenancy graph') self._add_alarm_persistency_subscription(graph) # create vertices cluster_vertex = create_cluster_placeholder_vertex() zone_vertex = self._create_resource('zone_1', NOVA_ZONE_DATASOURCE) host_vertex = self._create_resource('host_1', NOVA_HOST_DATASOURCE) instance_1_vertex = self._create_resource('instance_1', NOVA_INSTANCE_DATASOURCE, project_id='project_1') instance_2_vertex = self._create_resource('instance_2', NOVA_INSTANCE_DATASOURCE, project_id='project_1') instance_3_vertex = self._create_resource('instance_3', NOVA_INSTANCE_DATASOURCE, project_id='project_2') instance_4_vertex = self._create_resource('instance_4', NOVA_INSTANCE_DATASOURCE, project_id='project_2') alarm_on_host_vertex = self._create_alarm( 'alarm_on_host', 'alarm_on_host', metadata={ VProps.VITRAGE_TYPE: NOVA_HOST_DATASOURCE, VProps.NAME: 'host_1', VProps.RESOURCE_ID: 'host_1', VProps.VITRAGE_OPERATIONAL_SEVERITY: OperationalAlarmSeverity.SEVERE, VProps.VITRAGE_AGGREGATED_SEVERITY: OperationalAlarmSeverity.SEVERE }) alarm_on_instance_1_vertex = self._create_alarm( 'alarm_on_instance_1', 'deduced_alarm', project_id='project_1', vitrage_resource_project_id='project_1', metadata={ VProps.VITRAGE_TYPE: NOVA_INSTANCE_DATASOURCE, VProps.NAME: 'instance_1', VProps.RESOURCE_ID: 'sdg7849ythksjdg', VProps.VITRAGE_OPERATIONAL_SEVERITY: OperationalAlarmSeverity.SEVERE, VProps.VITRAGE_AGGREGATED_SEVERITY: OperationalAlarmSeverity.SEVERE }) alarm_on_instance_2_vertex = self._create_alarm( 'alarm_on_instance_2', 'deduced_alarm', vitrage_resource_project_id='project_1', metadata={ VProps.VITRAGE_TYPE: NOVA_INSTANCE_DATASOURCE, VProps.NAME: 'instance_2', VProps.RESOURCE_ID: 'nbfhsdugf', VProps.VITRAGE_OPERATIONAL_SEVERITY: OperationalAlarmSeverity.WARNING, VProps.VITRAGE_AGGREGATED_SEVERITY: OperationalAlarmSeverity.WARNING }) alarm_on_instance_3_vertex = self._create_alarm( 'alarm_on_instance_3', 'deduced_alarm', project_id='project_2', vitrage_resource_project_id='project_2', metadata={ VProps.VITRAGE_TYPE: NOVA_INSTANCE_DATASOURCE, VProps.NAME: 'instance_3', VProps.RESOURCE_ID: 'nbffhsdasdugf', VProps.VITRAGE_OPERATIONAL_SEVERITY: OperationalAlarmSeverity.CRITICAL, VProps.VITRAGE_AGGREGATED_SEVERITY: OperationalAlarmSeverity.CRITICAL }) alarm_on_instance_4_vertex = self._create_alarm( 'alarm_on_instance_4', 'deduced_alarm', vitrage_resource_project_id='project_2', metadata={ VProps.VITRAGE_TYPE: NOVA_INSTANCE_DATASOURCE, VProps.NAME: 'instance_4', VProps.RESOURCE_ID: 'ngsuy76hgd87f', VProps.VITRAGE_OPERATIONAL_SEVERITY: OperationalAlarmSeverity.WARNING, VProps.VITRAGE_AGGREGATED_SEVERITY: OperationalAlarmSeverity.WARNING }) # create links edges = list() edges.append( graph_utils.create_edge(cluster_vertex.vertex_id, zone_vertex.vertex_id, EdgeLabel.CONTAINS, update_timestamp=str(utcnow()))) edges.append( graph_utils.create_edge(zone_vertex.vertex_id, host_vertex.vertex_id, EdgeLabel.CONTAINS, update_timestamp=str(utcnow()))) edges.append( graph_utils.create_edge(host_vertex.vertex_id, instance_1_vertex.vertex_id, EdgeLabel.CONTAINS, update_timestamp=str(utcnow()))) edges.append( graph_utils.create_edge(host_vertex.vertex_id, instance_2_vertex.vertex_id, EdgeLabel.CONTAINS, update_timestamp=str(utcnow()))) edges.append( graph_utils.create_edge(host_vertex.vertex_id, instance_3_vertex.vertex_id, EdgeLabel.CONTAINS, update_timestamp=str(utcnow()))) edges.append( graph_utils.create_edge(host_vertex.vertex_id, instance_4_vertex.vertex_id, EdgeLabel.CONTAINS, update_timestamp=str(utcnow()))) edges.append( graph_utils.create_edge(alarm_on_host_vertex.vertex_id, host_vertex.vertex_id, EdgeLabel.ON, update_timestamp=str(utcnow()))) edges.append( graph_utils.create_edge(alarm_on_instance_1_vertex.vertex_id, instance_1_vertex.vertex_id, EdgeLabel.ON, update_timestamp=str(utcnow()))) edges.append( graph_utils.create_edge(alarm_on_instance_2_vertex.vertex_id, instance_2_vertex.vertex_id, EdgeLabel.ON, update_timestamp=str(utcnow()))) edges.append( graph_utils.create_edge(alarm_on_instance_3_vertex.vertex_id, instance_3_vertex.vertex_id, EdgeLabel.ON, update_timestamp=str(utcnow()))) edges.append( graph_utils.create_edge(alarm_on_instance_4_vertex.vertex_id, instance_4_vertex.vertex_id, EdgeLabel.ON, update_timestamp=str(utcnow()))) edges.append( graph_utils.create_edge(alarm_on_host_vertex.vertex_id, alarm_on_instance_1_vertex.vertex_id, EdgeLabel.CAUSES, update_timestamp=str(utcnow()))) edges.append( graph_utils.create_edge(alarm_on_host_vertex.vertex_id, alarm_on_instance_2_vertex.vertex_id, EdgeLabel.CAUSES, update_timestamp=str(utcnow()))) edges.append( graph_utils.create_edge(alarm_on_host_vertex.vertex_id, alarm_on_instance_3_vertex.vertex_id, EdgeLabel.CAUSES, update_timestamp=str(utcnow()))) edges.append( graph_utils.create_edge(alarm_on_host_vertex.vertex_id, alarm_on_instance_4_vertex.vertex_id, EdgeLabel.CAUSES, update_timestamp=str(utcnow()))) # add vertices to graph graph.add_vertex(cluster_vertex) graph.add_vertex(zone_vertex) graph.add_vertex(host_vertex) graph.add_vertex(instance_1_vertex) graph.add_vertex(instance_2_vertex) graph.add_vertex(instance_3_vertex) graph.add_vertex(instance_4_vertex) graph.add_vertex(alarm_on_host_vertex) graph.add_vertex(alarm_on_instance_1_vertex) graph.add_vertex(alarm_on_instance_2_vertex) graph.add_vertex(alarm_on_instance_3_vertex) graph.add_vertex(alarm_on_instance_4_vertex) # add links to graph for edge in edges: graph.add_edge(edge) return graph