def test_action_get_all_project_safe(self): parser.simple_parse(shared.sample_action) _create_action(self.ctx) new_ctx = utils.dummy_context(project='another-project') actions = db_api.action_get_all(new_ctx, project_safe=True) self.assertEqual(0, len(actions)) actions = db_api.action_get_all(new_ctx, project_safe=False) self.assertEqual(1, len(actions))
def test_action_get_project_safe(self): parser.simple_parse(shared.sample_action) action = _create_action(self.ctx) new_ctx = utils.dummy_context(project='another-project') retobj = db_api.action_get(new_ctx, action.id, project_safe=True) self.assertIsNone(retobj) retobj = db_api.action_get(new_ctx, action.id, project_safe=False) self.assertIsNotNone(retobj)
def test_action_get_with_admin_context(self): parser.simple_parse(shared.sample_action) action = _create_action(self.ctx) new_ctx = utils.dummy_context(project='another-project', is_admin=True) retobj = db_api.action_get(new_ctx, action.id, project_safe=True) self.assertIsNone(retobj) retobj = db_api.action_get(new_ctx, action.id, project_safe=False) self.assertIsNotNone(retobj)
def test_load(self): timestamp = timeutils.utcnow() spec = parser.simple_parse(sample_trigger) db_trigger = self._create_db_trigger('FAKE_ID', created_time=timestamp, updated_time=timestamp) self.assertIsNotNone(db_trigger) res = base.Trigger.load(self.ctx, trigger_id=db_trigger.id) self.assertIsInstance(res, base.Trigger) self.assertEqual('FAKE_ID', res.id) self.assertEqual('test-trigger', res.name) self.assertEqual('FakeTriggerType', res.type_name) self.assertEqual('blah blah', res.desc) self.assertEqual(base.INSUFFICIENT_DATA, res.state) self.assertTrue(res.enabled) self.assertEqual(base.S_CRITICAL, res.severity) self.assertEqual({'alarm_actions': ['http://url1']}, res.links) self.assertEqual(timestamp, res.created_time) self.assertEqual(timestamp, res.updated_time) self.assertIsNone(res.deleted_time) self.assertEqual(spec, res.spec) self.assertEqual(spec['type'], res.spec_data['type']) self.assertEqual(str(spec['version']), res.spec_data['version']) self.assertEqual(spec['rule'], res.spec_data['rule']) # load trigger via db trigger object res = base.Trigger.load(self.ctx, db_trigger=db_trigger) self.assertIsInstance(res, base.Trigger) self.assertEqual('FAKE_ID', res.id)
def _create_action(context, action_json=shared.sample_action, **kwargs): data = parser.simple_parse(action_json) data['user'] = context.user_id data['project'] = context.project_id data['domain'] = context.domain_id data.update(kwargs) return db_api.action_create(context, data)
def test_trigger_list_with_sort_keys(self): spec = parser.simple_parse(trigger_spec) t1 = self.eng.trigger_create(self.ctx, 'TB', spec, severity='low') t2 = self.eng.trigger_create(self.ctx, 'TA', spec, severity='low') t3 = self.eng.trigger_create(self.ctx, 'TC', spec, severity='high') # default by created_time result = self.eng.trigger_list(self.ctx) self.assertEqual(t1['id'], result[0]['id']) self.assertEqual(t2['id'], result[1]['id']) # use name for sorting result = self.eng.trigger_list(self.ctx, sort_keys=['name']) self.assertEqual(t2['id'], result[0]['id']) self.assertEqual(t1['id'], result[1]['id']) # use permission for sorting result = self.eng.trigger_list(self.ctx, sort_keys=['severity']) self.assertEqual(t3['id'], result[0]['id']) # use name and permission for sorting result = self.eng.trigger_list(self.ctx, sort_keys=['severity', 'name']) self.assertEqual(t3['id'], result[0]['id']) self.assertEqual(t2['id'], result[1]['id']) self.assertEqual(t1['id'], result[2]['id']) # unknown keys will be ignored result = self.eng.trigger_list(self.ctx, sort_keys=['duang']) self.assertIsNotNone(result)
def create_profile(context, profile=sample_profile, **kwargs): data = parser.simple_parse(profile) data['user'] = context.user data['project'] = context.project data['domain'] = context.domain data.update(kwargs) return db_api.profile_create(context, data)
def test_trigger_list_with_limit_marker(self): spec = parser.simple_parse(trigger_spec) t1 = self.eng.trigger_create(self.ctx, 't-1', spec) t2 = self.eng.trigger_create(self.ctx, 't-2', spec) result = self.eng.trigger_list(self.ctx, limit=0) self.assertEqual(0, len(result)) result = self.eng.trigger_list(self.ctx, limit=1) self.assertEqual(1, len(result)) result = self.eng.trigger_list(self.ctx, limit=2) self.assertEqual(2, len(result)) result = self.eng.trigger_list(self.ctx, limit=3) self.assertEqual(2, len(result)) result = self.eng.trigger_list(self.ctx, marker=t1['id']) self.assertEqual(1, len(result)) result = self.eng.trigger_list(self.ctx, marker=t2['id']) self.assertEqual(0, len(result)) self.eng.trigger_create(self.ctx, 't-3', spec) result = self.eng.trigger_list(self.ctx, limit=1, marker=t1['id']) self.assertEqual(1, len(result)) result = self.eng.trigger_list(self.ctx, limit=2, marker=t1['id']) self.assertEqual(2, len(result))
def test_profile_create(self): data = parser.simple_parse(shared.sample_profile) profile = shared.create_profile(self.ctx) self.assertIsNotNone(profile.id) self.assertEqual(data['name'], profile.name) self.assertEqual(data['type'], profile.type) self.assertEqual(data['spec'], profile.spec)
def test_to_dict(self): spec = parser.simple_parse(sample_trigger) trigger = base.Trigger('t1', spec, id='FAKE_ID', desc='DESC', user=self.ctx.user, project=self.ctx.project, domain=self.ctx.domain) expected = { 'id': 'FAKE_ID', 'name': 't1', 'type': 'FakeTriggerType', 'desc': 'DESC', 'state': base.INSUFFICIENT_DATA, 'enabled': True, 'severity': base.S_LOW, 'links': {}, 'spec': spec, 'user': self.ctx.user, 'project': self.ctx.project, 'domain': self.ctx.domain, 'created_time': None, 'updated_time': None, 'deleted_time': None } res = trigger.to_dict() self.assertEqual(expected, res)
def test_trigger_list_with_sort_dir(self): spec = parser.simple_parse(trigger_spec) t1 = self.eng.trigger_create(self.ctx, 'TB', spec, severity='low') t2 = self.eng.trigger_create(self.ctx, 'TA', spec, severity='low') t3 = self.eng.trigger_create(self.ctx, 'TC', spec, severity='high') # default by created_time, ascending result = self.eng.trigger_list(self.ctx) self.assertEqual(t1['id'], result[0]['id']) self.assertEqual(t2['id'], result[1]['id']) # sort by created_time, descending result = self.eng.trigger_list(self.ctx, sort_dir='desc') self.assertEqual(t3['id'], result[0]['id']) self.assertEqual(t2['id'], result[1]['id']) # use name for sorting, descending result = self.eng.trigger_list(self.ctx, sort_keys=['name'], sort_dir='desc') self.assertEqual(t3['id'], result[0]['id']) self.assertEqual(t1['id'], result[1]['id']) ex = self.assertRaises(ValueError, self.eng.trigger_list, self.ctx, sort_dir='Bogus') self.assertEqual( "Unknown sort direction, must be one of: " "asc-nullsfirst, asc-nullslast, desc-nullsfirst, " "desc-nullslast", six.text_type(ex))
def test_trigger_create_type_not_found(self): spec = parser.simple_parse(trigger_spec) spec['type'] = 'Bogus' ex = self.assertRaises(rpc.ExpectedException, self.eng.trigger_create, self.ctx, 't-1', spec) self.assertEqual(exception.TriggerTypeNotFound, ex.exc_info[0])
def setUp(self): super(TestProfileBase, self).setUp() self.ctx = utils.dummy_context(project='profile_test_project') g_env = environment.global_env() g_env.register_profile('os.dummy-1.0', DummyProfile) g_env.register_profile('os.dummy-1.1', DummyProfile) self.spec = parser.simple_parse(sample_profile)
def _create_action(context, action_json=shared.sample_action, **kwargs): data = parser.simple_parse(action_json) data['user'] = context.user data['project'] = context.project data['domain'] = context.domain data.update(kwargs) return db_api.action_create(context, data)
def test_trigger_list_with_sort_dir(self): spec = parser.simple_parse(trigger_spec) t1 = self.eng.trigger_create(self.ctx, 'TB', spec, severity='low') t2 = self.eng.trigger_create(self.ctx, 'TA', spec, severity='low') t3 = self.eng.trigger_create(self.ctx, 'TC', spec, severity='high') # default by created_time, ascending result = self.eng.trigger_list(self.ctx) self.assertEqual(t1['id'], result[0]['id']) self.assertEqual(t2['id'], result[1]['id']) # sort by created_time, descending result = self.eng.trigger_list(self.ctx, sort_dir='desc') self.assertEqual(t3['id'], result[0]['id']) self.assertEqual(t2['id'], result[1]['id']) # use name for sorting, descending result = self.eng.trigger_list(self.ctx, sort_keys=['name'], sort_dir='desc') self.assertEqual(t3['id'], result[0]['id']) self.assertEqual(t1['id'], result[1]['id']) ex = self.assertRaises(ValueError, self.eng.trigger_list, self.ctx, sort_dir='Bogus') self.assertEqual("Unknown sort direction, must be one of: " "asc-nullsfirst, asc-nullslast, desc-nullsfirst, " "desc-nullslast", six.text_type(ex))
def test_trigger_create_invalid_spec(self): spec = parser.simple_parse(trigger_spec) spec['KEY3'] = 'value3' ex = self.assertRaises(rpc.ExpectedException, self.eng.trigger_create, self.ctx, 't-1', spec) self.assertEqual(exception.SpecValidationFailed, ex.exc_info[0])
def setUp(self): super(TestPolicyBase, self).setUp() self.ctx = utils.dummy_context() environment.global_env().register_policy('senlin.policy.dummy-1.0', DummyPolicy) environment.global_env().register_policy('senlin.policy.dummy-1.1', DummyPolicy) self.spec = parser.simple_parse(sample_policy)
def test_profile_create(self): data = parser.simple_parse(shared.sample_profile) profile = shared.create_profile(self.ctx) self.assertIsNotNone(profile.id) self.assertEqual(data["name"], profile.name) self.assertEqual(data["type"], profile.type) self.assertEqual(data["spec"], profile.spec) self.assertEqual(data["permission"], profile.permission)
def test_validate_illegal_tc_timezone(self): spec = parser.simple_parse(threshold_alarm) spec["time_constraints"][0]["timezone"] = "Moon/Back" a = alarm.Alarm("A1", spec) ex = self.assertRaises(exc.InvalidSpec, a.validate) expected = "Invalid timezone value specified for property 'timezone' " "(Moon/Back): 'Moon/Back'" self.assertEqual(expected, six.text_type(ex))
def test_trigger_create_failed_validation(self): spec = parser.simple_parse(trigger_spec) self.patchobject(fakes.TestTrigger, 'validate', side_effect=exception.InvalidSpec(message='BOOM')) ex = self.assertRaises(rpc.ExpectedException, self.eng.trigger_create, self.ctx, 't1', spec) self.assertEqual(exception.InvalidSpec, ex.exc_info[0])
def test_combination_alarm(self): spec = parser.simple_parse(combination_alarm) a = alarm.CombinationAlarm('A1', spec) self.assertIsNotNone(a.rule) self.assertEqual('combination', a.namespace) self.assertEqual('and', a.rule['operator']) self.assertIn('alarm_001', a.rule['alarm_ids']) self.assertIn('alarm_001', a.rule['alarm_ids'])
def test_combination_alarm(self): spec = parser.simple_parse(combination_alarm) a = alarm.CombinationAlarm("A1", spec) self.assertIsNotNone(a.rule) self.assertEqual("combination", a.namespace) self.assertEqual("and", a.rule["operator"]) self.assertIn("alarm_001", a.rule["alarm_ids"]) self.assertIn("alarm_001", a.rule["alarm_ids"])
def test_validate_illegal_tc_timezone(self): spec = parser.simple_parse(threshold_alarm) spec['time_constraints'][0]['timezone'] = 'Moon/Back' a = alarm.Alarm('A1', spec) ex = self.assertRaises(exc.InvalidSpec, a.validate) expected = ("Invalid timezone value specified for property 'timezone' " "(Moon/Back): 'Moon/Back'") self.assertEqual(expected, six.text_type(ex))
def test_validate_illegal_tc_start(self): spec = parser.simple_parse(threshold_alarm) spec['time_constraints'][0]['start'] = 'XYZ' a = alarm.Alarm('A1', spec) ex = self.assertRaises(exc.InvalidSpec, a.validate) expected = ("Invalid cron expression specified for property 'start' " "(XYZ): Exactly 5 or 6 columns has to be specified for " "iteratorexpression.") self.assertEqual(expected, six.text_type(ex))
def test_action_get_by_name_duplicated(self): data = parser.simple_parse(shared.sample_action) action = _create_action(self.ctx) another_action = _create_action(self.ctx) self.assertIsNotNone(action) self.assertIsNotNone(another_action) self.assertNotEqual(action.id, another_action.id) self.assertRaises(exception.MultipleChoices, db_api.action_get_by_name, self.ctx, data['name'])
def test_trigger_delete(self): spec = parser.simple_parse(trigger_spec) t1 = self.eng.trigger_create(self.ctx, 'T1', spec) tid = t1['id'] result = self.eng.trigger_delete(self.ctx, tid) self.assertIsNone(result) ex = self.assertRaises(rpc.ExpectedException, self.eng.trigger_get, self.ctx, tid) self.assertEqual(exception.TriggerNotFound, ex.exc_info[0])
def test_store_for_create(self): spec = parser.simple_parse(sample_trigger) trigger = base.Trigger('t1', spec) trigger_id = trigger.store(self.ctx) res = base.Trigger.load(self.ctx, trigger_id) self.assertIsNotNone(res) self.assertIsNotNone(res.id) self.assertIsNotNone(res.created_time) self.assertIsNone(res.updated_time) self.assertIsNone(res.deleted_time)
def test_trigger_get(self): spec = parser.simple_parse(trigger_spec) t = self.eng.trigger_create(self.ctx, 't-1', spec) for identity in [t['id'], t['id'][:6], 't-1']: result = self.eng.trigger_get(self.ctx, identity) self.assertIsInstance(result, dict) self.assertEqual(t['id'], result['id']) ex = self.assertRaises(rpc.ExpectedException, self.eng.trigger_get, self.ctx, 'Bogus') self.assertEqual(exception.TriggerNotFound, ex.exc_info[0])
def test_trigger_list(self): spec = parser.simple_parse(trigger_spec) t1 = self.eng.trigger_create(self.ctx, 't-1', spec) t2 = self.eng.trigger_create(self.ctx, 't-2', spec) result = self.eng.trigger_list(self.ctx) self.assertIsInstance(result, list) names = [t['name'] for t in result] ids = [t['id'] for t in result] self.assertIn(t1['name'], names) self.assertIn(t2['name'], names) self.assertIn(t1['id'], ids) self.assertIn(t2['id'], ids)
def test_create_with_failure(self, mock_senlindriver): cc = mock.Mock() sd = mock.Mock() sd.telemetry.return_value = cc mock_senlindriver.return_value = sd cc.alarm_create.side_effect = exc.ResourceCreationFailure(rtype="Alarm") spec = parser.simple_parse(threshold_alarm) a = alarm.ThresholdAlarm("A1", spec) res, reason = a.create(self.ctx) self.assertFalse(res) self.assertEqual("Failed in creating Alarm.", reason)
def test_trigger_create_with_parameters(self): spec = parser.simple_parse(trigger_spec) result = self.eng.trigger_create(self.ctx, 't-1', spec, description='DESC', enabled=False, state='OK', severity='high') self.assertEqual(spec, result['spec']) self.assertEqual('DESC', result['desc']) self.assertFalse(result['enabled']) self.assertEqual('OK', result['state']) self.assertEqual('high', result['severity'])
def test_create(self, mock_senlindriver): cc = mock.Mock() sd = mock.Mock() sd.telemetry.return_value = cc mock_senlindriver.return_value = sd spec = parser.simple_parse(threshold_alarm) params = { 'alarm_actions': ['http://url1'], 'ok_actions': ['http://url2'], 'insufficient_data_actions': ['http://url3'] } a = alarm.ThresholdAlarm('A1', spec) res, alarm_dict = a.create(self.ctx, **params) self.assertTrue(res) sd.telemetry.assert_called_once_with(self.ctx.to_dict()) values = { 'name': 'A1', 'description': '', 'type': 'threshold', 'state': 'insufficient_data', 'severity': 'low', 'enabled': True, 'alarm_actions': ['http://url1'], 'ok_actions': ['http://url2'], 'insufficient_data_actions': ['http://url3'], 'time_constraints': [{ 'name': None, 'description': None, 'start': '10 * * * * *', 'duration': 10, 'timezone': '', }], 'repeat_actions': True, 'threshold_rule': { 'meter_name': 'cpu_util', 'evaluation_periods': 2, 'period': 120, 'statistic': 'avg', 'threshold': 15, 'query': [{ 'field': 'resource_metadata.cluster', 'value': 'cluster1', 'op': '=='}], 'comparison_operator': 'lt', } } cc.alarm_create.assert_called_once_with(**values) self.assertIsNotNone(a.id)
def test_agg_metric_alarm(self): spec = parser.simple_parse(agg_metric_alarm) a = alarm.AggregateByMetricsAlarm("A1", spec) self.assertIsNotNone(a.rule) self.assertEqual("gnocchi_aggregation_by_metrics_threshold", a.namespace) self.assertIn("disk.io.read.bytes", a.rule["metrics"]) self.assertIn("disk.io.write.bytes", a.rule["metrics"]) self.assertEqual("lt", a.rule["comparison_operator"]) self.assertEqual(16384, a.rule["threshold"]) self.assertEqual(62, a.rule["granularity"]) self.assertEqual(2, a.rule["evaluation_periods"]) self.assertEqual("avg", a.rule["aggregation_method"])
def test_create_with_failure(self, mock_senlindriver): cc = mock.Mock() sd = mock.Mock() sd.telemetry.return_value = cc mock_senlindriver.return_value = sd cc.alarm_create.side_effect = exc.ResourceCreationFailure( rtype='Alarm') spec = parser.simple_parse(threshold_alarm) a = alarm.ThresholdAlarm('A1', spec) res, reason = a.create(self.ctx) self.assertFalse(res) self.assertEqual('Failed in creating Alarm.', reason)
def test_resource_alarm(self): spec = parser.simple_parse(resource_alarm) a = alarm.ResourceAlarm('A1', spec) self.assertIsNotNone(a.rule) self.assertEqual('gnocchi_resources_threshold', a.namespace) self.assertEqual('cpu_util', a.rule['metric']) self.assertEqual('gt', a.rule['comparison_operator']) self.assertEqual(75, a.rule['threshold']) self.assertEqual(61, a.rule['granularity']) self.assertEqual(3, a.rule['evaluation_periods']) self.assertEqual('avg', a.rule['aggregation_method']) self.assertEqual('instance', a.rule['resource_type']) self.assertEqual('001-002-0003', a.rule['resource_id'])
def test_resource_alarm(self): spec = parser.simple_parse(resource_alarm) a = alarm.ResourceAlarm("A1", spec) self.assertIsNotNone(a.rule) self.assertEqual("gnocchi_resources_threshold", a.namespace) self.assertEqual("cpu_util", a.rule["metric"]) self.assertEqual("gt", a.rule["comparison_operator"]) self.assertEqual(75, a.rule["threshold"]) self.assertEqual(61, a.rule["granularity"]) self.assertEqual(3, a.rule["evaluation_periods"]) self.assertEqual("avg", a.rule["aggregation_method"]) self.assertEqual("instance", a.rule["resource_type"]) self.assertEqual("001-002-0003", a.rule["resource_id"])
def test_agg_metric_alarm(self): spec = parser.simple_parse(agg_metric_alarm) a = alarm.AggregateByMetricsAlarm('A1', spec) self.assertIsNotNone(a.rule) self.assertEqual('gnocchi_aggregation_by_metrics_threshold', a.namespace) self.assertIn('disk.io.read.bytes', a.rule['metrics']) self.assertIn('disk.io.write.bytes', a.rule['metrics']) self.assertEqual('lt', a.rule['comparison_operator']) self.assertEqual(16384, a.rule['threshold']) self.assertEqual(62, a.rule['granularity']) self.assertEqual(2, a.rule['evaluation_periods']) self.assertEqual('avg', a.rule['aggregation_method'])
def test_action_create(self): data = parser.simple_parse(shared.sample_action) action = db_api.action_create(self.ctx, data) self.assertIsNotNone(action) self.assertEqual(data['name'], action.name) self.assertEqual(data['target'], action.target) self.assertEqual(data['action'], action.action) self.assertEqual(data['cause'], action.cause) self.assertEqual(data['timeout'], action.timeout) self.assertEqual(data['status'], action.status) self.assertEqual(data['status_reason'], action.status_reason) self.assertEqual(10, action.inputs['max_size']) self.assertIsNone(action.outputs)
def test_threshold_alarm(self): spec = parser.simple_parse(threshold_alarm) a = alarm.ThresholdAlarm("A1", spec) self.assertIsNotNone(a.rule) self.assertEqual("threshold", a.namespace) self.assertEqual("cpu_util", a.rule["meter_name"]) self.assertEqual("lt", a.rule["comparison_operator"]) self.assertEqual(15, a.rule["threshold"]) self.assertEqual(120, a.rule["period"]) self.assertEqual(2, a.rule["evaluation_periods"]) self.assertEqual("avg", a.rule["statistic"]) query = [{"field": "resource_metadata.cluster", "op": "==", "value": "cluster1"}] self.assertEqual(query, a.rule["query"])
def test_agg_resource_alarm(self): spec = parser.simple_parse(agg_resource_alarm) a = alarm.AggregateByResourcesAlarm("A1", spec) self.assertIsNotNone(a.rule) self.assertEqual("gnocchi_aggregation_by_resources_threshold", a.namespace) self.assertEqual("network.read.packets", a.rule["metric"]) self.assertEqual("lt", a.rule["comparison_operator"]) self.assertEqual(1024, a.rule["threshold"]) self.assertEqual(65, a.rule["granularity"]) self.assertEqual(5, a.rule["evaluation_periods"]) self.assertEqual("avg", a.rule["aggregation_method"]) self.assertEqual("instance", a.rule["resource_type"]) self.assertEqual("project_id==1234", a.rule["query"])
def test_trigger_list_show_deleted(self): spec = parser.simple_parse(trigger_spec) t1 = self.eng.trigger_create(self.ctx, 't-1', spec) result = self.eng.trigger_list(self.ctx) self.assertEqual(1, len(result)) self.assertEqual(t1['id'], result[0]['id']) self.eng.trigger_delete(self.ctx, t1['id']) result = self.eng.trigger_list(self.ctx) self.assertEqual(0, len(result)) result = self.eng.trigger_list(self.ctx, show_deleted=True) self.assertEqual(1, len(result)) self.assertEqual(t1['id'], result[0]['id'])
def test_action_get_by_name(self): data = parser.simple_parse(shared.sample_action) _create_action(self.ctx) retobj = db_api.action_get_by_name(self.ctx, data['name']) self.assertIsNotNone(retobj) self.assertEqual(data['name'], retobj.name) self.assertEqual(data['target'], retobj.target) self.assertEqual(data['action'], retobj.action) self.assertEqual(data['cause'], retobj.cause) self.assertEqual(data['timeout'], retobj.timeout) self.assertEqual(data['status'], retobj.status) self.assertEqual(data['status_reason'], retobj.status_reason) self.assertEqual(10, retobj.inputs['max_size']) self.assertIsNone(retobj.outputs)
def test_agg_resource_alarm(self): spec = parser.simple_parse(agg_resource_alarm) a = alarm.AggregateByResourcesAlarm('A1', spec) self.assertIsNotNone(a.rule) self.assertEqual('gnocchi_aggregation_by_resources_threshold', a.namespace) self.assertEqual('network.read.packets', a.rule['metric']) self.assertEqual('lt', a.rule['comparison_operator']) self.assertEqual(1024, a.rule['threshold']) self.assertEqual(65, a.rule['granularity']) self.assertEqual(5, a.rule['evaluation_periods']) self.assertEqual('avg', a.rule['aggregation_method']) self.assertEqual('instance', a.rule['resource_type']) self.assertEqual('project_id==1234', a.rule['query'])
def test_trigger_list_with_filters(self): spec = parser.simple_parse(trigger_spec) self.eng.trigger_create(self.ctx, 'TB', spec, severity='low') self.eng.trigger_create(self.ctx, 'TA', spec, severity='low') self.eng.trigger_create(self.ctx, 'TC', spec, severity='high') result = self.eng.trigger_list(self.ctx, filters={'name': 'TB'}) self.assertEqual(1, len(result)) self.assertEqual('TB', result[0]['name']) result = self.eng.trigger_list(self.ctx, filters={'name': 'TD'}) self.assertEqual(0, len(result)) filters = {'severity': 'low'} result = self.eng.trigger_list(self.ctx, filters=filters) self.assertEqual(2, len(result))