def _exercise_domain_api(ref_id): driver = self.resource_api.driver self.assertRaises(exception.DomainNotFound, driver.get_domain, ref_id) self.assertRaises(exception.DomainNotFound, driver.get_domain_by_name, resource.NULL_DOMAIN_ID) domain_ids = [ x['id'] for x in driver.list_domains(driver_hints.Hints()) ] self.assertNotIn(ref_id, domain_ids) domains = driver.list_domains_from_ids([ref_id]) self.assertThat(domains, matchers.HasLength(0)) self.assertRaises(exception.DomainNotFound, driver.update_domain, ref_id, {}) self.assertRaises(exception.DomainNotFound, driver.delete_domain, ref_id)
def _test_update_event_transform(self, instance_events): for event in instance_events: # Test action transformer = self.transformers[NOVA_INSTANCE_DATASOURCE] wrapper = transformer.transform(event) # Test assertions self._validate_update_vertex_props( transformer, wrapper.vertex, event) # Validate the neighbors: only one valid host neighbor neighbors = wrapper.neighbors self.assertThat(neighbors, matchers.HasLength(1)) self._validate_host_neighbor(neighbors[0], event) event_type = event[DSProps.EVENT_TYPE] if event_type == 'compute.instance.delete.end': self.assertEqual(GraphAction.DELETE_ENTITY, wrapper.action) elif event_type == 'compute.instance.create.start': self.assertEqual(GraphAction.CREATE_ENTITY, wrapper.action) else: self.assertEqual(GraphAction.UPDATE_ENTITY, wrapper.action)
def test_set_and_get_queue_metadata(self): # Retrieve random queue queue_name = self.queues[data_utils.rand_int_id( 0, len(self.queues) - 1)] # Check the Queue has no metadata _, body = self.get_queue_metadata(queue_name) self.assertThat(body, matchers.HasLength(0)) # Create metadata key3 = [0, 1, 2, 3, 4] key2 = data_utils.rand_name('value') req_body1 = dict() req_body1[data_utils.rand_name('key3')] = key3 req_body1[data_utils.rand_name('key2')] = key2 req_body = dict() req_body[data_utils.rand_name('key1')] = req_body1 # Set Queue Metadata _, body = self.set_queue_metadata(queue_name, req_body) self.assertEqual('', body) # Get Queue Metadata _, body = self.get_queue_metadata(queue_name) self.assertThat(body, matchers.Equals(req_body))
def test_meta(self): class Manager(object): __metaclass__ = manager.ManagerMeta @manager.periodic_task def foo(self): return 'foo' @manager.periodic_task(spacing=4) def bar(self): return 'bar' @manager.periodic_task(enabled=False) def baz(self): return 'baz' m = Manager() self.assertThat(m._periodic_tasks, matchers.HasLength(2)) self.assertEqual(None, m._periodic_spacing['foo']) self.assertEqual(4, m._periodic_spacing['bar']) self.assertThat(m._periodic_spacing, matchers.Not(matchers.Contains('baz')))
def test_load_datasources_value_with_errors(self): # setup entity_graph_opts = [ cfg.StrOpt('datasources_values_dir', default=utils.get_resources_dir() + '/datasources_values/erroneous_values'), ] conf = cfg.ConfigOpts() conf.register_opts(entity_graph_opts, group='entity_graph') conf.register_opts(self.DATASOURCES_OPTS, group='datasources') self._load_datasources(conf) # action info_mapper = DatasourceInfoMapper(conf) # test assertions missing_values = 1 erroneous_values = 1 num_valid_datasources = len(info_mapper.datasources_value_confs) + \ missing_values + erroneous_values self.assertThat(conf.datasources.types, matchers.HasLength(num_valid_datasources))
def test_drivers(self): self.register_fake_conductors() expected = sorted([ { 'name': self.d1, 'hosts': [self.h1] }, { 'name': self.d2, 'hosts': [self.h1, self.h2] }, ], key=lambda d: d['name']) data = self.get_json('/drivers') self.assertThat(data['drivers'], matchers.HasLength(2)) drivers = sorted(data['drivers'], key=lambda d: d['name']) for i in range(len(expected)): d = drivers[i] self.assertEqual(expected[i]['name'], d['name']) self.assertEqual(sorted(expected[i]['hosts']), sorted(d['hosts'])) self.validate_link(d['links'][0]['href']) self.validate_link(d['links'][1]['href'])
def test_update_transform(self): LOG.debug('Cinder Volume transformer test: transform entity event ' 'update') # Test setup spec_list = mock_sync.simple_volume_generators(volume_num=3, instance_num=7, update_events=7) static_events = mock_sync.generate_random_events_list(spec_list) for event in static_events: # Test action wrapper = self.transformers[CINDER_VOLUME_DATASOURCE].transform( event) # Test assertions vertex = wrapper.vertex self._validate_volume_vertex_props(vertex, event) neighbors = wrapper.neighbors self.assertThat(neighbors, matchers.HasLength(1)) self._validate_neighbors(neighbors, vertex.vertex_id, event)
def test_list_domains_for_user(self): domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} self.resource_api.create_domain(domain['id'], domain) user = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex, 'domain_id': domain['id'], 'enabled': True} test_domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} self.resource_api.create_domain(test_domain1['id'], test_domain1) test_domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} self.resource_api.create_domain(test_domain2['id'], test_domain2) user = self.identity_api.create_user(user) user_domains = self.assignment_api.list_domains_for_user(user['id']) self.assertEqual(0, len(user_domains)) self.assignment_api.create_grant(user_id=user['id'], domain_id=test_domain1['id'], role_id=self.role_member['id']) self.assignment_api.create_grant(user_id=user['id'], domain_id=test_domain2['id'], role_id=self.role_member['id']) user_domains = self.assignment_api.list_domains_for_user(user['id']) self.assertThat(user_domains, matchers.HasLength(2))
def test_list_domains_for_user_with_inherited_grants(self): """Test that inherited roles on the domain are excluded. Test Plan: - Create two domains, one user, group and role - Domain1 is given an inherited user role, Domain2 an inherited group role (for a group of which the user is a member) - When listing domains for user, neither domain should be returned """ domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} domain1 = self.resource_api.create_domain(domain1['id'], domain1) domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} domain2 = self.resource_api.create_domain(domain2['id'], domain2) user = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex, 'domain_id': domain1['id'], 'enabled': True} user = self.identity_api.create_user(user) group = {'name': uuid.uuid4().hex, 'domain_id': domain1['id']} group = self.identity_api.create_group(group) self.identity_api.add_user_to_group(user['id'], group['id']) role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} self.role_api.create_role(role['id'], role) # Create a grant on each domain, one user grant, one group grant, # both inherited. self.assignment_api.create_grant(user_id=user['id'], domain_id=domain1['id'], role_id=role['id'], inherited_to_projects=True) self.assignment_api.create_grant(group_id=group['id'], domain_id=domain2['id'], role_id=role['id'], inherited_to_projects=True) user_domains = self.assignment_api.list_domains_for_user(user['id']) # No domains should be returned since both domains have only inherited # roles assignments. self.assertThat(user_domains, matchers.HasLength(0))
def test_multiple_lines_nochange(self): global_content = textwrap.dedent("""\ foo<2;python_version=='2.7' foo>1;python_version!='2.7' """) project_content = textwrap.dedent("""\ foo<2;python_version=='2.7' foo>1;python_version!='2.7' """) global_reqs = requirement.parse(global_content) project_reqs = list(requirement.to_reqs(project_content)) actions, reqs = update._sync_requirements_file(global_reqs, project_reqs, 'f', False, False, False) self.assertEqual( requirement.Requirements([ requirement.Requirement('foo', '', '<2', "python_version=='2.7'", ''), requirement.Requirement('foo', '', '>1', "python_version!='2.7'", '') ]), reqs) self.assertThat(actions, matchers.HasLength(0))
def test_connect_retries(self): def _timeout_error(request, context): raise requests.exceptions.Timeout() self.stub_url('GET', text=_timeout_error) session = client_session.Session() retries = 3 with mock.patch('time.sleep') as m: self.assertRaises(exceptions.RequestTimeout, session.get, self.TEST_URL, connect_retries=retries) self.assertEqual(retries, m.call_count) # 3 retries finishing with 2.0 means 0.5, 1.0 and 2.0 m.assert_called_with(2.0) # we count retries so there will be one initial request + 3 retries self.assertThat(self.requests_mock.request_history, matchers.HasLength(retries + 1))
def test_single_global_multiple_in_project(self): global_content = textwrap.dedent("""\ foo>1 """) project_content = textwrap.dedent("""\ foo<2;python_version=='2.7' foo>1;python_version!='2.7' """) global_reqs = requirement.parse(global_content) project_reqs = list(requirement.to_reqs(project_content)) actions, reqs = update._sync_requirements_file(global_reqs, project_reqs, 'f', False, False, False) self.assertEqual( requirement.Requirements( [requirement.Requirement('foo', '', '>1', "", '')]), reqs) self.assertEqual( project.StdOut(" foo<2;python_version=='2.7' -> foo>1\n"), actions[2]) self.assertEqual( project.StdOut(" foo>1;python_version!='2.7' -> \n"), actions[3]) self.assertThat(actions, matchers.HasLength(4))
def test_var_binds(self): oid_with_alarm_objects = \ common.GENERAL_OID + '.' + \ common.COMPANY_OID + '.' + common.ALARM_OBJECTS_OID var_binds = self.snmp_sender._get_var_binds(common.alarm_data) self.assertThat(var_binds, matchers.HasLength(3)) self.assertIn((oid_with_alarm_objects + '.' + common.NAME_OID, OctetString(common.alarm_data.get(VProps.NAME, sender.NA))), var_binds) self.assertIn((oid_with_alarm_objects + '.' + common.IS_DELETED_OID, OctetString(common.alarm_data.get (VProps.VITRAGE_IS_DELETED, sender.NA))), var_binds) self.assertIn((oid_with_alarm_objects + '.' + common.SEVERITY_OID, OctetString(common.alarm_data.get (VProps.VITRAGE_OPERATIONAL_SEVERITY, sender.NA))), var_binds)
def _validate_instance_neighbor(self, wrapper, alarm_id, instance_id): self.assertThat(wrapper.neighbors, matchers.HasLength(1)) vm_neighbor = wrapper.neighbors[0] instance_transformer = self.transformers[NOVA_INSTANCE_DATASOURCE] properties = { VProps.ID: instance_id, VProps.VITRAGE_TYPE: NOVA_INSTANCE_DATASOURCE, VProps.VITRAGE_CATEGORY: EntityCategory.RESOURCE, VProps.VITRAGE_SAMPLE_TIMESTAMP: wrapper.vertex[VProps.VITRAGE_SAMPLE_TIMESTAMP], } expected_neighbor = instance_transformer. \ create_neighbor_placeholder_vertex(**properties) self.assertEqual(expected_neighbor, vm_neighbor.vertex) # Validate neighbor edge self._validate_neighbor_edge(alarm_id, vm_neighbor)
def test_heat_stack_validity(self): # Setup processor = self._create_processor_with_graph(self.conf) self.assertThat( processor.entity_graph, matchers.HasLength(self._num_total_expected_vertices())) spec_list = mock_driver.simple_stack_generators( stack_num=1, instance_and_volume_num=1, snapshot_events=1) static_events = mock_driver.generate_random_events_list(spec_list) heat_stack_event = static_events[0] # Action processor.process_event(heat_stack_event) # Test assertions self.assertThat( processor.entity_graph, matchers.HasLength(self._num_total_expected_vertices() + 3)) stack_vertices = processor.entity_graph.get_vertices( vertex_attr_filter={ VProps.VITRAGE_CATEGORY: EntityCategory.RESOURCE, VProps.VITRAGE_TYPE: HEAT_STACK_DATASOURCE }) self.assertThat(stack_vertices, matchers.HasLength(1)) instance_vertices = processor.entity_graph.get_vertices( vertex_attr_filter={ VProps.VITRAGE_CATEGORY: EntityCategory.RESOURCE, VProps.VITRAGE_TYPE: NOVA_INSTANCE_DATASOURCE }) self.assertThat(instance_vertices, matchers.HasLength(self.NUM_INSTANCES + 1)) cinder_vertices = processor.entity_graph.get_vertices( vertex_attr_filter={ VProps.VITRAGE_CATEGORY: EntityCategory.RESOURCE, VProps.VITRAGE_TYPE: CINDER_VOLUME_DATASOURCE }) self.assertThat(cinder_vertices, matchers.HasLength(1)) stack_neighbors = processor.entity_graph.neighbors( stack_vertices[0].vertex_id) self.assertThat(stack_neighbors, matchers.HasLength(2))
def test_show_webhook(self): webhooks = self.vitrage_client.webhook.list() self.assertThat( webhooks, matchers.HasLength(self.pre_test_webhook_count), 'Amount of webhooks should be ' 'the same as before the test') created_webhook = self.vitrage_client.webhook.add( url="https://www.test.com", regex_filter=REGEX_PROPS, headers=HEADERS_PROPS) show_webhook = self.vitrage_client.webhook.show(created_webhook['id']) self.assertIsNotNone(show_webhook, 'webhook not listed') self.assertEqual(created_webhook[HEADERS], show_webhook[HEADERS], 'headers mismatch') self.assertEqual(created_webhook[REGEX_FILTER], show_webhook[REGEX_FILTER], 'regex mismatch') self.assertEqual(created_webhook[URL], show_webhook[URL], 'URL mismatch') self.vitrage_client.webhook.delete(created_webhook['id'])
def _add_entities_with_different_timestamps(self, consistency_interval, create_func, category, datasource_name, resource_type): # add resources to the graph: # - updated_resource # - outdated_resource with an old timestamp # - deleted_resource with an old timestamp and is_deleted==true future_timestamp = str(datetime.datetime_delta( 2 * consistency_interval)) past_timestamp = str(datetime.datetime_delta( -2 * consistency_interval + 1)) updated_resource = create_func( v_id=resource_type + '1234', v_type=resource_type, ds_name=datasource_name, timestamp=future_timestamp) outdated_resource = create_func( v_id=resource_type + '5678', v_type=resource_type, ds_name=datasource_name, timestamp=past_timestamp) deleted_resource = create_func( v_id=resource_type + '9999', v_type=resource_type, ds_name=datasource_name, timestamp=past_timestamp, is_deleted=True) self.graph.add_vertex(updated_resource) self.graph.add_vertex(outdated_resource) self.graph.add_vertex(deleted_resource) # get the list of vertices resource_vertices = self.processor.entity_graph.get_vertices({ VProps.VITRAGE_CATEGORY: category, VProps.VITRAGE_TYPE: resource_type }) self.assertThat(resource_vertices, matchers.HasLength(3), 'Wrong number of vertices of type %s', resource_type)
def test_list_domains_for_user_with_grants(self): # Create two groups each with a role on a different domain, and # make user1 a member of both groups. Both these new domains # should now be included, along with any direct user grants. domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} self.resource_api.create_domain(domain['id'], domain) user = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex, 'domain_id': domain['id'], 'enabled': True} user = self.identity_api.create_user(user) group1 = {'name': uuid.uuid4().hex, 'domain_id': domain['id']} group1 = self.identity_api.create_group(group1) group2 = {'name': uuid.uuid4().hex, 'domain_id': domain['id']} group2 = self.identity_api.create_group(group2) test_domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} self.resource_api.create_domain(test_domain1['id'], test_domain1) test_domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} self.resource_api.create_domain(test_domain2['id'], test_domain2) test_domain3 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} self.resource_api.create_domain(test_domain3['id'], test_domain3) self.identity_api.add_user_to_group(user['id'], group1['id']) self.identity_api.add_user_to_group(user['id'], group2['id']) # Create 3 grants, one user grant, the other two as group grants self.assignment_api.create_grant(user_id=user['id'], domain_id=test_domain1['id'], role_id=self.role_member['id']) self.assignment_api.create_grant(group_id=group1['id'], domain_id=test_domain2['id'], role_id=self.role_admin['id']) self.assignment_api.create_grant(group_id=group2['id'], domain_id=test_domain3['id'], role_id=self.role_admin['id']) user_domains = self.assignment_api.list_domains_for_user(user['id']) self.assertThat(user_domains, matchers.HasLength(3))
def _periodic_process_setup_stage(self, consistency_interval): self._create_processor_with_graph(self.conf, processor=self.processor) current_time = utcnow() # set all vertices to be have timestamp that consistency won't get self._update_timestamp( self.processor.entity_graph.get_vertices(), current_time + timedelta(seconds=1.5 * consistency_interval)) # check number of instances in graph instance_vertices = self.processor.entity_graph.get_vertices({ VProps.VITRAGE_CATEGORY: EntityCategory.RESOURCE, VProps.VITRAGE_TYPE: NOVA_INSTANCE_DATASOURCE }) self.assertThat(instance_vertices, matchers.HasLength(self.NUM_INSTANCES)) # set current timestamp of part of the instances self._update_timestamp(instance_vertices[0:3], current_time) # set part of the instances as deleted for i in range(3, 6): instance_vertices[i][VProps.VITRAGE_IS_DELETED] = True self.processor.entity_graph.update_vertex(instance_vertices[i]) # set part of the instances as deleted for i in range(6, 9): instance_vertices[i][VProps.VITRAGE_IS_DELETED] = True instance_vertices[i][VProps.VITRAGE_SAMPLE_TIMESTAMP] = str( current_time + timedelta(seconds=2 * consistency_interval + 1)) self.processor.entity_graph.update_vertex(instance_vertices[i]) self._add_static_resources(consistency_interval) self._add_cinder_volume_resources(consistency_interval)
def test_list_credentials_filtered_by_type_and_user_id(self): """Call ``GET /credentials?user_id={user_id}&type={type}``.""" user1_id = uuid.uuid4().hex user2_id = uuid.uuid4().hex # Creating credentials for two different users credential_user1_ec2 = unit.new_credential_ref(user_id=user1_id, type=CRED_TYPE_EC2) credential_user1_cert = unit.new_credential_ref(user_id=user1_id) credential_user2_cert = unit.new_credential_ref(user_id=user2_id) self.credential_api.create_credential(credential_user1_ec2['id'], credential_user1_ec2) self.credential_api.create_credential(credential_user1_cert['id'], credential_user1_cert) self.credential_api.create_credential(credential_user2_cert['id'], credential_user2_cert) r = self.get('/credentials?user_id=%s&type=ec2' % user1_id) self.assertValidCredentialListResponse(r, ref=credential_user1_ec2) self.assertThat(r.result['credentials'], matchers.HasLength(1)) cred = r.result['credentials'][0] self.assertEqual(CRED_TYPE_EC2, cred['type']) self.assertEqual(user1_id, cred['user_id'])
def test_zabbix_alarm_transform(self): LOG.debug('Zabbix alarm transformer test: transform entity event') # Test setup spec_list = mock_sync.simple_zabbix_alarm_generators(host_num=4, events_num=10) zabbix_alarms = mock_sync.generate_sequential_events_list(spec_list) for alarm in zabbix_alarms: # Test action self.enrich_event(alarm, format_timestamp=False) wrapper = ZabbixTransformer(self.transformers, self.conf)\ .transform(alarm) self._validate_vertex(wrapper.vertex, alarm) neighbors = wrapper.neighbors self.assertThat(neighbors, matchers.HasLength(1)) neighbor = neighbors[0] # Right now we are support only host as a resource if neighbor.vertex[VProps.VITRAGE_TYPE] == NOVA_HOST_DATASOURCE: self._validate_host_neighbor(neighbors[0], alarm) self._validate_action(alarm, wrapper)
def _check_opt_names(self, opt_list, expected_opt_names): opt_names = [o.name for (g, l) in opt_list for o in l] self.assertThat(opt_names, matchers.HasLength(len(expected_opt_names))) for opt in opt_names: self.assertIn(opt, expected_opt_names)
def test_get_all_functionality(self): # Step 1 - Services with status OK should not be returned # Test setup scenario zabbix_driver = MockZabbixDriver(self.conf) alarm_data1 = self._extract_alarm_data() alarm_data2 = self._extract_alarm_data(z_resource_name='compute-2') alarm_data3 = self._extract_alarm_data(z_resource_name='compute-2', triggerid='2') zabbix_driver.set_alarm_datas([alarm_data1, alarm_data2, alarm_data3]) # Test action alarms = zabbix_driver._get_all_alarms() # Test assertions self.assertIsNotNone(alarms, 'No alarms returned') self.assertThat(alarms, IsEmpty()) # Step 2 - one raised alarm # Test setup alarm_data1 = self._extract_alarm_data(value='1') zabbix_driver.set_alarm_datas([alarm_data1, alarm_data2, alarm_data3]) # Test action alarms = zabbix_driver._get_all_alarms() # Test assertions self.assertIsNotNone(alarms, 'No alarms returned') self.assertThat(alarms, matchers.HasLength(1)) self._assert_contains(alarm_data1, alarms) # Step 3 - two raised alarms # Test setup alarm_data1 = self._extract_alarm_data(value='1', priority='4') alarm_data2 = self._extract_alarm_data(z_resource_name='compute-2', value='1') zabbix_driver.set_alarm_datas([alarm_data1, alarm_data2, alarm_data3]) expected_alarm1 = alarm_data1 expected_alarm2 = copy.copy(alarm_data2) expected_alarm2[ZProps.RESOURCE_NAME] = 'host2' # Test action alarms = zabbix_driver._get_all_alarms() # Test assertions self.assertIsNotNone(alarms, 'No alarms returned') self.assertThat(alarms, matchers.HasLength(2)) self._assert_contains(expected_alarm1, alarms) self._assert_contains(expected_alarm2, alarms) # Step 4 - Check inactive alarms. Get all function should return # inactive alarm (alarm that its status has changed to OK) # Test setup alarm_data1 = self._extract_alarm_data() alarm_data2 = self._extract_alarm_data(z_resource_name='compute-2') zabbix_driver.set_alarm_datas([alarm_data1, alarm_data2, alarm_data3]) expected_alarm1 = alarm_data1 expected_alarm2 = copy.copy(alarm_data2) expected_alarm2[ZProps.RESOURCE_NAME] = 'host2' # Test action alarms = zabbix_driver._get_all_alarms() # Test assertions # The alarms of alarm_data1/2 should be returned although their # status is OK, because they were not OK earlier self.assertIsNotNone(alarms, 'No alarms returned') self.assertThat(alarms, matchers.HasLength(2)) self._assert_contains(expected_alarm1, alarms) self._assert_contains(expected_alarm2, alarms) # Step 4 - get all when all alarms are inactivated and their status # was not changed # Test action alarms = zabbix_driver._get_all_alarms() # Test assertions self.assertIsNotNone(alarms, 'alarms is None') self.assertThat(alarms, IsEmpty())
def test_get_changes_and_get_all(self): # Step 1 - get changes # Step setup zabbix_driver = MockZabbixDriver(self.conf) alarm_data1 = self._extract_alarm_data(priority='2', value='1') alarm_data2 = self._extract_alarm_data(z_resource_name='compute-2', priority='2') alarm_data3 = self._extract_alarm_data(z_resource_name='compute-2', triggerid='2') zabbix_driver.set_alarm_datas([alarm_data1, alarm_data2, alarm_data3]) # Step action alarms = zabbix_driver._get_changed_alarms() # Step assertions self.assertIsNotNone(alarms, 'No alarms returned') self.assertThat(alarms, matchers.HasLength(1)) self._assert_contains(alarm_data1, alarms) # Step 2 - get changes when no change occurred (returns nothing) # Step action alarms = zabbix_driver._get_changed_alarms() # Step assertions self.assertIsNotNone(alarms, 'No alarms returned') self.assertThat(alarms, IsEmpty()) # Step 3 - get all # Step action alarms = zabbix_driver._get_all_alarms() # Step assertions self.assertIsNotNone(alarms, 'No alarms returned') self.assertThat(alarms, matchers.HasLength(1)) self._assert_contains(alarm_data1, alarms) # Step 4 - get all for second time # (when no change has occurred it returns the same) # Step action alarms = zabbix_driver._get_all_alarms() # Step assertions self.assertIsNotNone(alarms, 'No alarms returned') self.assertThat(alarms, matchers.HasLength(1)) self._assert_contains(alarm_data1, alarms) # Step 5 - calling get changes right after get all (returns nothing) # Step setup alarm_data1 = self._extract_alarm_data(priority='4', value='1') alarm_data2 = self._extract_alarm_data(z_resource_name='compute-2', priority='1', value='1') zabbix_driver.set_alarm_datas([alarm_data1, alarm_data2, alarm_data3]) expected_alarm1 = alarm_data1 expected_alarm2 = copy.copy(alarm_data2) expected_alarm2[ZProps.RESOURCE_NAME] = 'host2' # Step action get_all_alarms = zabbix_driver._get_all_alarms() changed_alarms = zabbix_driver._get_changed_alarms() # Step assertions self.assertIsNotNone(get_all_alarms, 'No alarms returned') self.assertThat(get_all_alarms, matchers.HasLength(2)) self._assert_contains(expected_alarm1, get_all_alarms) self._assert_contains(expected_alarm2, get_all_alarms) self.assertIsNotNone(changed_alarms, 'No alarms returned') self.assertThat(changed_alarms, IsEmpty()) # Step 6 - get changes # Step setup alarm_data2 = self._extract_alarm_data(z_resource_name='compute-2', priority='4', value='1') alarm_data3 = self._extract_alarm_data(z_resource_name='compute-2', triggerid='2', priority='4', value='1') zabbix_driver.set_alarm_datas([alarm_data1, alarm_data2, alarm_data3]) expected_alarm1 = copy.copy(alarm_data2) expected_alarm1[ZProps.RESOURCE_NAME] = 'host2' expected_alarm2 = copy.copy(expected_alarm1) expected_alarm2[ZProps.TRIGGER_ID] = '2' # Step action alarms = zabbix_driver._get_changed_alarms() # Step assertions self.assertIsNotNone(alarms, 'No alarms returned') self.assertThat(alarms, matchers.HasLength(2)) self._assert_contains(expected_alarm1, alarms) self._assert_contains(expected_alarm2, alarms)
def test_get_changes_functionality(self): # Step 1 - get changes when all alarms are OK # Test setup zabbix_driver = MockZabbixDriver(self.conf) alarm_data1 = self._extract_alarm_data(priority='2') alarm_data2 = self._extract_alarm_data(z_resource_name='compute-2', priority='2') alarm_data3 = self._extract_alarm_data(z_resource_name='compute-2', description='Uptime', priority='3') zabbix_driver.set_alarm_datas([alarm_data1, alarm_data2, alarm_data3]) # Test action alarms = zabbix_driver._get_changed_alarms() # Test assertions self.assertIsNotNone(alarms, 'No alarms returned') self.assertThat(alarms, IsEmpty()) # Step 2 - get changes when alarm is raised # Test setup alarm_data1 = self._extract_alarm_data(priority='2', value='1') zabbix_driver.set_alarm_datas([alarm_data1, alarm_data2, alarm_data3]) # Test action alarms = zabbix_driver._get_changed_alarms() # Test assertions self.assertIsNotNone(alarms, 'No alarms returned') self.assertThat(alarms, matchers.HasLength(1)) self._assert_contains(alarm_data1, alarms) # Step 3 - get changes when the priority of inactive alarm is changed # Test setup alarm_data2 = self._extract_alarm_data(z_resource_name='compute-2', priority='3') zabbix_driver.set_alarm_datas([alarm_data1, alarm_data2, alarm_data3]) # Test action alarms = zabbix_driver._get_changed_alarms() # Test assertions self.assertIsNotNone(alarms, 'No alarms returned') self.assertThat(alarms, IsEmpty()) # Step 4 - get changes when: # 1. alarm1 - priority of active alarm is changed (should be returned) # 2. alarm2 - raised alarm (should be returned) # 3. alarm3 - priority of inactive alarm is changed (should not # be returned) # Test setup alarm_data1 = self._extract_alarm_data(priority='4', value='1') alarm_data2 = self._extract_alarm_data(z_resource_name='compute-2', priority='1', value='1') alarm_data3 = self._extract_alarm_data(z_resource_name='compute-2', triggerid='22222', priority='1') zabbix_driver.set_alarm_datas([alarm_data1, alarm_data2, alarm_data3]) expected_alarm1 = alarm_data1 expected_alarm2 = copy.copy(alarm_data2) expected_alarm2[ZProps.RESOURCE_NAME] = 'host2' # Test action alarms = zabbix_driver._get_changed_alarms() # Test assertions self.assertIsNotNone(alarms, 'No alarms returned') self.assertThat(alarms, matchers.HasLength(2)) self._assert_contains(expected_alarm1, alarms) self._assert_contains(expected_alarm2, alarms) # Step 5 - get changes when all active alarms are changed to inactive # Test setup alarm_data1 = self._extract_alarm_data(priority='4') alarm_data2 = self._extract_alarm_data(z_resource_name='compute-2', priority='1') zabbix_driver.set_alarm_datas([alarm_data1, alarm_data2, alarm_data3]) expected_alarm1 = alarm_data1 expected_alarm2 = copy.copy(alarm_data2) expected_alarm2[ZProps.RESOURCE_NAME] = 'host2' # Test action alarms = zabbix_driver._get_changed_alarms() # Test assertions self.assertIsNotNone(alarms, 'No alarms returned') self.assertThat(alarms, matchers.HasLength(2)) self._assert_contains(expected_alarm1, alarms) self._assert_contains(expected_alarm2, alarms) # Step 6 - get changes when no change occurred # Action alarms = zabbix_driver._get_changed_alarms() # Test assertions self.assertIsNotNone(alarms, 'alarms is None') self.assertThat(alarms, IsEmpty())
def test_cms_hash_token_sha256(self): """Can also hash with sha256.""" token = self.examples.SIGNED_TOKEN_SCOPED token_id = cms.cms_hash_token(token, mode='sha256') # sha256 hash is 64 chars. self.assertThat(token_id, matchers.HasLength(64))
def test_no_log_warn(self): self.assertThat(list(checks.no_log_warn('LOG.warning("bl")')), IsEmpty()) self.assertThat(list(checks.no_log_warn('LOG.warn("foo")')), matchers.HasLength(1))
def _test_collectd_alarm(self, resource_type, resource_name, host_name): # Setup processor = self._create_processor_with_graph(self.conf) self.assertThat( processor.entity_graph, matchers.HasLength(self._num_total_expected_vertices())) time1 = time.time() severity1 = 'WARNING' link_down_message = 'link state of "qvo818dd156-be" is "DOWN"' collectd_event = self._create_collectd_event(time1, resource_type, resource_name, host_name, severity1, link_down_message) # Action processor.process_event(collectd_event) # Test assertions self.assertThat( processor.entity_graph, matchers.HasLength(self._num_total_expected_vertices() + 1)) collectd_vertices = processor.entity_graph.get_vertices( vertex_attr_filter={ VProps.VITRAGE_CATEGORY: EntityCategory.ALARM, VProps.VITRAGE_TYPE: COLLECTD_DATASOURCE }) self.assertThat(collectd_vertices, matchers.HasLength(1)) collectd_vertex1 = collectd_vertices[0] self._assert_collectd_vertex_equals(collectd_vertex1, time1, resource_type, resource_name, severity1) collectd_neighbors = processor.entity_graph.neighbors( collectd_vertices[0].vertex_id) self._assert_collectd_neighbor_equals(collectd_neighbors, resource_type, resource_name) # Action 2 - update the existing alarm time2 = time.time() severity2 = 'ERROR' collectd_event = self._create_collectd_event(time2, resource_type, resource_name, host_name, severity2, link_down_message) processor.process_event(collectd_event) # Test assertions - the collectd alarm vertex should be the same self.assertThat( processor.entity_graph, matchers.HasLength(self._num_total_expected_vertices() + 1)) collectd_vertices = processor.entity_graph.get_vertices( vertex_attr_filter={ VProps.VITRAGE_CATEGORY: EntityCategory.ALARM, VProps.VITRAGE_TYPE: COLLECTD_DATASOURCE }) self.assertThat(collectd_vertices, matchers.HasLength(1)) collectd_vertex2 = collectd_vertices[0] self.assertEqual(collectd_vertex1[VProps.VITRAGE_ID], collectd_vertex2[VProps.VITRAGE_ID]) # Action 3 - clear the alarm time3 = time.time() severity3 = 'OK' link_up_message = 'link state of "qvo818dd156-be" is "UP"' collectd_event = self._create_collectd_event(time3, resource_type, resource_name, host_name, severity3, link_up_message) processor.process_event(collectd_event) # Test assertions - the collectd alarm vertex should be removed collectd_vertices = processor.entity_graph.get_vertices( vertex_attr_filter={ VProps.VITRAGE_CATEGORY: EntityCategory.ALARM, VProps.VITRAGE_TYPE: COLLECTD_DATASOURCE }) self._assert_no_vertex(collectd_vertices)
def test_drivers_no_active_conductor(self): data = self.get_json('/drivers') self.assertThat(data['drivers'], matchers.HasLength(0)) self.assertEqual([], data['drivers'])
def _validate_policygroup(self, topology, pg_name=None, vport_num=1): if topology.normal_port is not None or self.is_dhcp_agent_present(): expected_pgs = 2 # Expecting software + hardware else: expected_pgs = 1 # Expecting only hardware if not Topology.has_unified_pg_for_all_support(): if self.is_dhcp_agent_present(): expected_pgs += 1 # Extra PG for dhcp agent # Repeated check in case of agent for attempt in range(Topology.nbr_retries_for_test_robustness): if len(topology.get_vsd_policygroups( True)) == expected_pgs: break else: LOG.error("Unexpected amount of PGs found, " "expected {} found {} " "(attempt {})".format( expected_pgs, len(topology.vsd_policygroups), attempt + 1)) time.sleep(1) self.assertThat(topology.get_vsd_policygroups(True), matchers.HasLength(expected_pgs), message="Unexpected amount of PGs found") for pg in topology.vsd_policygroups: if pg['type'] == 'HARDWARE': vsd_policygroup = pg break else: self.fail("Could not find HARDWARE policy group.") self.assertThat(vsd_policygroup['type'], matchers.Equals('HARDWARE')) if Topology.has_unified_pg_for_all_support(): if pg_name: self.assertEqual(pg_name, vsd_policygroup['name']) self.assertEqual(pg_name, vsd_policygroup['description']) self.assertEqual( "hw:" + (ExternalId( constants.NUAGE_PLCY_GRP_ALLOW_ALL).at_cms_id()), vsd_policygroup['externalID']) else: self.assertEqual(topology.security_group['id'] + "_HARDWARE", vsd_policygroup['name']) self.assertEqual(topology.security_group['name'], vsd_policygroup['description']) self.assertEqual( "hw:" + (ExternalId(topology.security_group['id']).at_cms_id()), vsd_policygroup['externalID']) vsd_pg_vports = self.vsd_client.get_vport(constants.POLICYGROUP, vsd_policygroup['ID']) self.assertThat(vsd_pg_vports, matchers.HasLength(vport_num), message="Expected to find exactly {} " "vport in PG".format(vport_num)) for vsd_pg_vport in vsd_pg_vports: if vsd_pg_vport['ID'] == topology.vsd_baremetal_vport['ID']: break else: self.fail("Vport should be part of HARDWARE PG") else: if pg_name: self.assertThat(vsd_policygroup['name'], matchers.Contains(pg_name)) vsd_pg_vports = self.vsd_client.get_vport(constants.POLICYGROUP, vsd_policygroup['ID']) self.assertThat(vsd_pg_vports, matchers.HasLength(1), message="Expected to find exactly 1 vport in PG") self.assertThat(vsd_pg_vports[0]['ID'], matchers.Equals( topology.vsd_baremetal_vport['ID']), message="Vport should be part of HARDWARE PG")