def prepare_alarms(self): self.alarms = [ models.Alarm(name='or-alarm', description='the or alarm', type='combination', enabled=True, user_id='foobar', project_id='snafu', alarm_id=str(uuid.uuid4()), state='insufficient data', state_timestamp=constants.MIN_DATETIME, timestamp=constants.MIN_DATETIME, insufficient_data_actions=[], ok_actions=[], alarm_actions=[], repeat_actions=False, time_constraints=[], rule=dict( alarm_ids=[ '9cfc3e51-2ff1-4b1d-ac01-c1bd4c6d0d1e', '1d441595-d069-4e05-95ab-8693ba6a8302' ], operator='or', ), severity='critical'), models.Alarm(name='and-alarm', description='the and alarm', type='combination', enabled=True, user_id='foobar', project_id='snafu', alarm_id=str(uuid.uuid4()), state='insufficient data', state_timestamp=constants.MIN_DATETIME, timestamp=constants.MIN_DATETIME, insufficient_data_actions=[], ok_actions=[], alarm_actions=[], repeat_actions=False, time_constraints=[], rule=dict( alarm_ids=[ 'b82734f4-9d06-48f3-8a86-fa59a0c99dc8', '15a700e5-2fe8-4b3d-8c55-9e92831f6a2b' ], operator='and', ), severity='critical') ]
def get_alarms(self, name=None, user=None, state=None, meter=None, project=None, enabled=None, alarm_id=None, alarm_type=None, severity=None, exclude=None): if meter: raise aodh.NotImplementedError('Filter by meter not implemented') q = hbase_utils.make_query(alarm_id=alarm_id, name=name, enabled=enabled, user_id=user, project_id=project, state=state, type=alarm_type, severity=severity, exclude=exclude) with self.conn_pool.connection() as conn: alarm_table = conn.table(self.ALARM_TABLE) gen = alarm_table.scan(filter=q) alarms = [ hbase_utils.deserialize_entry(data) for ignored, data in gen ] for alarm in sorted(alarms, key=operator.itemgetter('timestamp'), reverse=True): yield models.Alarm(**alarm)
def test_update_llu(self): llu = alarm_models.Alarm(alarm_id='llu', enabled=True, type=ALARM_TYPE, name='llu', description='llu', timestamp=constants.MIN_DATETIME, user_id='bla', project_id='ffo', state="insufficient data", state_reason="insufficient data", state_timestamp=constants.MIN_DATETIME, ok_actions=[], alarm_actions=[], insufficient_data_actions=[], repeat_actions=False, time_constraints=[], rule=dict(comparison_operator='lt', threshold=34, aggregation_method='max', evaluation_periods=1, granularity=60, metrics=METRIC_IDS)) updated = self.alarm_conn.create_alarm(llu) updated.state = alarm_models.Alarm.ALARM_OK updated.description = ':)' self.alarm_conn.update_alarm(updated) all = list(self.alarm_conn.get_alarms()) self.assertEqual(1, len(all))
def post(self, data): """Create a new alarm. :param data: an alarm within the request body. """ rbac.enforce('create_alarm', pecan.request.headers, pecan.request.enforcer) conn = pecan.request.alarm_storage_conn now = timeutils.utcnow() data.alarm_id = str(uuid.uuid4()) user_limit, project_limit = rbac.get_limited_to( pecan.request.headers, pecan.request.enforcer) def _set_ownership(aspect, owner_limitation, header): attr = '%s_id' % aspect requested_owner = getattr(data, attr) explicit_owner = requested_owner != wtypes.Unset caller = pecan.request.headers.get(header) if (owner_limitation and explicit_owner and requested_owner != caller): raise base.ProjectNotAuthorized(requested_owner, aspect) actual_owner = (owner_limitation or requested_owner if explicit_owner else caller) setattr(data, attr, actual_owner) _set_ownership('user', user_limit, 'X-User-Id') _set_ownership('project', project_limit, 'X-Project-Id') # Check if there's room for one more alarm if is_over_quota(conn, data.project_id, data.user_id): raise OverQuota(data) data.timestamp = now data.state_timestamp = now ALARMS_RULES[data.type].plugin.create_hook(data) change = data.as_dict(models.Alarm) data.update_actions() # make sure alarms are unique by name per project. alarms = list(conn.get_alarms(name=data.name, project=data.project_id)) if alarms: raise base.ClientSideError(_("Alarm with name='%s' exists") % data.name, status_code=409) try: alarm_in = models.Alarm(**change) except Exception: LOG.exception(_("Error while posting alarm: %s") % change) raise base.ClientSideError(_("Alarm incorrect")) alarm = conn.create_alarm(alarm_in) self._record_creation(conn, change, alarm.alarm_id, now) v2_utils.set_resp_location_hdr("/v2/alarms/" + alarm.alarm_id) return Alarm.from_db_model(alarm)
def put(self, data): """Modify this alarm. :param data: an alarm within the request body. """ rbac.enforce('change_alarm', pecan.request.headers, pecan.request.enforcer) # Ensure alarm exists alarm_in = self._alarm() now = timeutils.utcnow() data.alarm_id = self._id user, project = rbac.get_limited_to(pecan.request.headers, pecan.request.enforcer) if user: data.user_id = user elif data.user_id == wtypes.Unset: data.user_id = alarm_in.user_id if project: data.project_id = project elif data.project_id == wtypes.Unset: data.project_id = alarm_in.project_id data.timestamp = now if alarm_in.state != data.state: data.state_timestamp = now else: data.state_timestamp = alarm_in.state_timestamp # make sure alarms are unique by name per project. if alarm_in.name != data.name: alarms = list( self.conn.get_alarms(name=data.name, project=data.project_id)) if alarms: raise base.ClientSideError(_("Alarm with name=%s exists") % data.name, status_code=409) ALARMS_RULES[data.type].plugin.update_hook(data) old_data = Alarm.from_db_model(alarm_in) old_alarm = old_data.as_dict(models.Alarm) data.update_actions(old_data) updated_alarm = data.as_dict(models.Alarm) try: alarm_in = models.Alarm(**updated_alarm) except Exception: LOG.exception(_("Error while putting alarm: %s") % updated_alarm) raise base.ClientSideError(_("Alarm incorrect")) alarm = self.conn.update_alarm(alarm_in) change = dict( (k, v) for k, v in updated_alarm.items() if v != old_alarm[k] and k not in ['timestamp', 'state_timestamp']) self._record_change(change, now, on_behalf_of=alarm.project_id) return Alarm.from_db_model(alarm)
def setUp(self): super(TestQueryAlarmsController, self).setUp() self.alarm_url = '/query/alarms' for state in ['ok', 'alarm', 'insufficient data']: for date in [datetime.datetime(2013, 1, 1), datetime.datetime(2013, 2, 2)]: for id in [1, 2]: alarm_id = "-".join([state, date.isoformat(), str(id)]) project_id = "project-id%d" % id alarm = models.Alarm(name=alarm_id, type=RULE_TYPE, enabled=True, alarm_id=alarm_id, description='a', state=state, state_reason="state_reason", state_timestamp=date, timestamp=date, ok_actions=[], insufficient_data_actions=[], alarm_actions=[], repeat_actions=True, user_id="user-id%d" % id, project_id=project_id, time_constraints=[], rule=dict(comparison_operator='gt', threshold=2.0, aggregation_method='mean', evaluation_periods=60, granularity=1, metrics=[]), severity='critical') self.alarm_conn.create_alarm(alarm)
def update_alarm(self, alarm): """Update alarm.""" data = alarm.as_dict() self.db.alarm.update({'alarm_id': alarm.alarm_id}, {'$set': data}, upsert=True) stored_alarm = self.db.alarm.find({'alarm_id': alarm.alarm_id})[0] del stored_alarm['_id'] self._ensure_encapsulated_rule_format(stored_alarm) self._ensure_time_constraints(stored_alarm) return models.Alarm(**stored_alarm)
def update_alarm(self, alarm): """Create an alarm. :param alarm: The alarm to create. It is Alarm object, so we need to call as_dict() """ _id = alarm.alarm_id alarm_to_store = hbase_utils.serialize_entry(alarm.as_dict()) with self.conn_pool.connection() as conn: alarm_table = conn.table(self.ALARM_TABLE) alarm_table.put(_id, alarm_to_store) stored_alarm = hbase_utils.deserialize_entry(alarm_table.row(_id)) return models.Alarm(**stored_alarm)
def put(self, data): """Modify this alarm. :param data: an alarm within the request body. """ # Ensure alarm exists alarm_in = self._enforce_rbac('change_alarm') now = timeutils.utcnow() data.alarm_id = self._id user, project = rbac.get_limited_to(pecan.request.headers, pecan.request.enforcer) if user: data.user_id = user elif data.user_id == wtypes.Unset: data.user_id = alarm_in.user_id if project: data.project_id = project elif data.project_id == wtypes.Unset: data.project_id = alarm_in.project_id data.timestamp = now if alarm_in.state != data.state: data.state_timestamp = now data.state_reason = ALARM_REASON_MANUAL else: data.state_timestamp = alarm_in.state_timestamp data.state_reason = alarm_in.state_reason ALARMS_RULES[data.type].plugin.update_hook(data) old_data = Alarm.from_db_model(alarm_in) old_alarm = old_data.as_dict(models.Alarm) data.update_actions(old_data) updated_alarm = data.as_dict(models.Alarm) try: alarm_in = models.Alarm(**updated_alarm) except Exception: LOG.exception("Error while putting alarm: %s", updated_alarm) raise base.ClientSideError(_("Alarm incorrect")) alarm = pecan.request.storage.update_alarm(alarm_in) change = dict( (k, v) for k, v in updated_alarm.items() if v != old_alarm[k] and k not in ['timestamp', 'state_timestamp']) self._record_change(change, now, on_behalf_of=alarm.project_id) return Alarm.from_db_model(alarm)
def _retrieve_alarms(self, query_filter, orderby, limit): if limit is not None: alarms = self.db.alarm.find(query_filter, limit=limit, sort=orderby) else: alarms = self.db.alarm.find(query_filter, sort=orderby) for alarm in alarms: a = {} a.update(alarm) del a['_id'] self._ensure_encapsulated_rule_format(a) self._ensure_time_constraints(a) yield models.Alarm(**a)
def update_alarm(self, alarm, upsert=False): """Create an alarm. :param alarm: The alarm to create. It is Alarm object, so we need to call as_dict() """ _id = alarm.alarm_id alarm_to_store = hbase_utils.serialize_entry(alarm.as_dict()) with self.conn_pool.connection() as conn: alarm_table = conn.table(self.ALARM_TABLE) if not upsert: q = hbase_utils.make_query(alarm_id=alarm.alarm_id) query_alarm = alarm_table.scan(filter=q) if len(list(query_alarm)) == 0: raise storage.AlarmNotFound(alarm.alarm_id) alarm_table.put(_id, alarm_to_store) stored_alarm = hbase_utils.deserialize_entry(alarm_table.row(_id)) return models.Alarm(**stored_alarm)
def _row_to_alarm_model(row): return alarm_api_models.Alarm( alarm_id=row.alarm_id, enabled=row.enabled, type=row.type, name=row.name, description=row.description, timestamp=row.timestamp, user_id=row.user_id, project_id=row.project_id, state=row.state, state_timestamp=row.state_timestamp, ok_actions=row.ok_actions, alarm_actions=row.alarm_actions, insufficient_data_actions=(row.insufficient_data_actions), rule=row.rule, time_constraints=row.time_constraints, repeat_actions=row.repeat_actions, severity=row.severity)
def prepare_alarms(self): self.alarms = [ models.Alarm(name='composite-GRT-OR-GRT', description='composite alarm converted', type='composite', enabled=True, user_id='fake_user', project_id='fake_project', state='insufficient data', state_timestamp=constants.MIN_DATETIME, timestamp=constants.MIN_DATETIME, insufficient_data_actions=['log://'], ok_actions=['log://'], alarm_actions=['log://'], repeat_actions=False, alarm_id=uuidutils.generate_uuid(), time_constraints=[], rule={"or": [self.sub_rule1, self.sub_rule2]}, severity='critical'), ]
def _alarm(**kwargs): alarm_id = kwargs.get('id') or uuidutils.generate_uuid() return models.Alarm(name=kwargs.get('name', alarm_id), type='event', enabled=True, alarm_id=alarm_id, description='desc', state=kwargs.get('state', 'insufficient data'), severity='critical', state_timestamp=constants.MIN_DATETIME, timestamp=constants.MIN_DATETIME, ok_actions=[], insufficient_data_actions=[], alarm_actions=[], repeat_actions=kwargs.get('repeat', False), user_id='user', project_id=kwargs.get('project', ''), time_constraints=[], rule=dict(event_type=kwargs.get('event_type', '*'), query=kwargs.get('query', [])))
def setUp(self): super(TestQueryAlarmsController, self).setUp() self.alarm_url = '/query/alarms' for state in ['ok', 'alarm', 'insufficient data']: for date in [ datetime.datetime(2013, 1, 1), datetime.datetime(2013, 2, 2) ]: for id in [1, 2]: alarm_id = "-".join([state, date.isoformat(), str(id)]) project_id = "project-id%d" % id alarm = models.Alarm(name=alarm_id, type='threshold', enabled=True, alarm_id=alarm_id, description='a', state=state, state_timestamp=date, timestamp=date, ok_actions=[], insufficient_data_actions=[], alarm_actions=[], repeat_actions=True, user_id="user-id%d" % id, project_id=project_id, time_constraints=[], rule=dict(comparison_operator='gt', threshold=2.0, statistic='avg', evaluation_periods=60, period=1, meter_name='meter.test', query=[{ 'field': 'project_id', 'op': 'eq', 'value': project_id }]), severity='critical') self.alarm_conn.create_alarm(alarm)
def test_evaluate_octavia_error(self, mock_session, mock_octavia): class Response(object): def __init__(self, status_code, content): self.status_code = status_code self.content = content alarm = models.Alarm( name='lb_member_alarm', description='lb_member_alarm', type=loadbalancer.ALARM_TYPE, enabled=True, user_id=uuidutils.generate_uuid(), project_id=uuidutils.generate_uuid(dashed=False), alarm_id=uuidutils.generate_uuid(), state='insufficient data', state_reason='insufficient data', state_timestamp=constants.MIN_DATETIME, timestamp=constants.MIN_DATETIME, insufficient_data_actions=[], ok_actions=[], alarm_actions=[], repeat_actions=False, time_constraints=[], severity='low', rule=dict( pool_id=uuidutils.generate_uuid(), stack_id=uuidutils.generate_uuid(), autoscaling_group_id=uuidutils.generate_uuid(), )) mock_client = mock.MagicMock() mock_octavia.return_value = mock_client msg = 'Pool NotFound' mock_client.member_list.return_value = Response(404, msg) self.evaluator.evaluate(alarm) self.assertEqual(evaluator.UNKNOWN, alarm.state) self.assertEqual(msg, alarm.state_reason)
def test_evaluate(self, mock_session, mock_octavia): alarm = models.Alarm( name='lb_member_alarm', description='lb_member_alarm', type=loadbalancer.ALARM_TYPE, enabled=True, user_id=uuidutils.generate_uuid(), project_id=uuidutils.generate_uuid(dashed=False), alarm_id=uuidutils.generate_uuid(), state='insufficient data', state_reason='insufficient data', state_timestamp=constants.MIN_DATETIME, timestamp=constants.MIN_DATETIME, insufficient_data_actions=[], ok_actions=[], alarm_actions=[], repeat_actions=False, time_constraints=[], severity='low', rule=dict( pool_id=uuidutils.generate_uuid(), stack_id=uuidutils.generate_uuid(), autoscaling_group_id=uuidutils.generate_uuid(), )) mock_client = mock.MagicMock() mock_octavia.return_value = mock_client created_at = timeutils.utcnow() - datetime.timedelta(days=1) mock_client.member_list.return_value = { 'members': [{ 'created_at': created_at.isoformat(), 'admin_state_up': True, 'operating_status': 'ERROR', }] } self.evaluator.evaluate(alarm) self.assertEqual(evaluator.ALARM, alarm.state)
def add_some_alarms(self): alarms = [ alarm_models.Alarm(alarm_id='r3d', enabled=True, type=ALARM_TYPE, name='red-alert', description='my red-alert', timestamp=datetime.datetime(2015, 7, 2, 10, 25), user_id='me', project_id='and-da-boys', state="insufficient data", state_reason="insufficient data", state_timestamp=constants.MIN_DATETIME, ok_actions=[], alarm_actions=['http://nowhere/alarms'], insufficient_data_actions=[], repeat_actions=False, time_constraints=[ dict(name='testcons', start='0 11 * * *', duration=300) ], rule=dict(comparison_operator='eq', threshold=36, aggregation_method='count', evaluation_periods=1, granularity=60, metrics=METRIC_IDS), severity='low'), alarm_models.Alarm(alarm_id='0r4ng3', enabled=True, type=ALARM_TYPE, name='orange-alert', description='a orange', timestamp=datetime.datetime(2015, 7, 2, 10, 40), user_id='me', project_id='and-da-boys', state="insufficient data", state_reason="insufficient data", state_timestamp=constants.MIN_DATETIME, ok_actions=[], alarm_actions=['http://nowhere/alarms'], insufficient_data_actions=[], repeat_actions=False, time_constraints=[], rule=dict(comparison_operator='gt', threshold=75, aggregation_method='avg', evaluation_periods=1, granularity=60, metrics=METRIC_IDS), severity='low'), alarm_models.Alarm(alarm_id='y3ll0w', enabled=False, type=ALARM_TYPE, name='yellow-alert', description='yellow', timestamp=datetime.datetime(2015, 7, 2, 10, 10), user_id='me', project_id='and-da-boys', state="insufficient data", state_reason="insufficient data", state_timestamp=constants.MIN_DATETIME, ok_actions=[], alarm_actions=['http://nowhere/alarms'], insufficient_data_actions=[], repeat_actions=False, time_constraints=[], rule=dict(comparison_operator='lt', threshold=10, aggregation_method='min', evaluation_periods=1, granularity=60, metrics=METRIC_IDS), severity='low') ] for a in alarms: self.alarm_conn.create_alarm(a)
def prepare_alarms(self): self.alarms = [ models.Alarm(name='alarm_threshold_nest', description='alarm with sub rules nested combined', type='composite', enabled=True, user_id='fake_user', project_id='fake_project', alarm_id=uuidutils.generate_uuid(), state='insufficient data', state_timestamp=constants.MIN_DATETIME, timestamp=constants.MIN_DATETIME, insufficient_data_actions=[], ok_actions=[], alarm_actions=[], repeat_actions=False, time_constraints=[], rule={ "or": [ self.sub_rule1, { "and": [self.sub_rule2, self.sub_rule3] } ] }, severity='critical'), models.Alarm( name='alarm_threshold_or', description='alarm on one of sub rules triggered', type='composite', enabled=True, user_id='fake_user', project_id='fake_project', state='insufficient data', state_timestamp=constants.MIN_DATETIME, timestamp=constants.MIN_DATETIME, insufficient_data_actions=[], ok_actions=[], alarm_actions=[], repeat_actions=False, alarm_id=uuidutils.generate_uuid(), time_constraints=[], rule={"or": [self.sub_rule1, self.sub_rule2, self.sub_rule3]}, severity='critical'), models.Alarm( name='alarm_threshold_and', description='alarm on all the sub rules triggered', type='composite', enabled=True, user_id='fake_user', project_id='fake_project', state='insufficient data', state_timestamp=constants.MIN_DATETIME, timestamp=constants.MIN_DATETIME, insufficient_data_actions=[], ok_actions=[], alarm_actions=[], repeat_actions=False, alarm_id=uuidutils.generate_uuid(), time_constraints=[], rule={"and": [self.sub_rule1, self.sub_rule2, self.sub_rule3]}, severity='critical'), models.Alarm(name='alarm_multi_type_rules', description='alarm with threshold and gnocchi rules', type='composite', enabled=True, user_id='fake_user', project_id='fake_project', alarm_id=uuidutils.generate_uuid(), state='insufficient data', state_timestamp=constants.MIN_DATETIME, timestamp=constants.MIN_DATETIME, insufficient_data_actions=[], ok_actions=[], alarm_actions=[], repeat_actions=False, time_constraints=[], rule={ "and": [ self.sub_rule2, self.sub_rule3, { 'or': [ self.sub_rule1, self.sub_rule4, self.sub_rule5, self.sub_rule6 ] } ] }, severity='critical'), ]
def prepare_alarms(self): self.alarms = [ models.Alarm(name='instance_running_hot', description='instance_running_hot', type='threshold', enabled=True, user_id='foobar', project_id='snafu', alarm_id=uuidutils.generate_uuid(), state='insufficient data', state_timestamp=constants.MIN_DATETIME, state_reason='Not evaluated', timestamp=constants.MIN_DATETIME, insufficient_data_actions=[], ok_actions=[], alarm_actions=[], repeat_actions=False, time_constraints=[], rule=dict(comparison_operator='gt', threshold=80.0, evaluation_periods=5, statistic='avg', period=60, meter_name='cpu_util', query=[{ 'field': 'meter', 'op': 'eq', 'value': 'cpu_util' }, { 'field': 'resource_id', 'op': 'eq', 'value': 'my_instance' }]), severity='critical'), models.Alarm(name='group_running_idle', description='group_running_idle', type='threshold', enabled=True, user_id='foobar', project_id='snafu', state='insufficient data', state_timestamp=constants.MIN_DATETIME, state_reason='Not evaluated', timestamp=constants.MIN_DATETIME, insufficient_data_actions=[], ok_actions=[], alarm_actions=[], repeat_actions=False, alarm_id=uuidutils.generate_uuid(), time_constraints=[], rule=dict(comparison_operator='le', threshold=10.0, evaluation_periods=4, statistic='max', period=300, meter_name='cpu_util', query=[{ 'field': 'meter', 'op': 'eq', 'value': 'cpu_util' }, { 'field': 'metadata.user_metadata.AS', 'op': 'eq', 'value': 'my_group' }]), severity='critical'), ]
def add_some_alarms(self): alarms = [ alarm_models.Alarm( alarm_id='r3d', enabled=True, type='threshold', name='red-alert', description='my red-alert', timestamp=datetime.datetime(2015, 7, 2, 10, 25), user_id='me', project_id='and-da-boys', state="insufficient data", state_timestamp=constants.MIN_DATETIME, ok_actions=[], alarm_actions=['http://nowhere/alarms'], insufficient_data_actions=[], repeat_actions=False, time_constraints=[ dict(name='testcons', start='0 11 * * *', duration=300) ], rule=dict(comparison_operator='eq', threshold=36, statistic='count', evaluation_periods=1, period=60, meter_name='test.one', query=[{ 'field': 'key', 'op': 'eq', 'value': 'value', 'type': 'string' }]), ), alarm_models.Alarm( alarm_id='0r4ng3', enabled=True, type='threshold', name='orange-alert', description='a orange', timestamp=datetime.datetime(2015, 7, 2, 10, 40), user_id='me', project_id='and-da-boys', state="insufficient data", state_timestamp=constants.MIN_DATETIME, ok_actions=[], alarm_actions=['http://nowhere/alarms'], insufficient_data_actions=[], repeat_actions=False, time_constraints=[], rule=dict(comparison_operator='gt', threshold=75, statistic='avg', evaluation_periods=1, period=60, meter_name='test.forty', query=[{ 'field': 'key2', 'op': 'eq', 'value': 'value2', 'type': 'string' }]), ), alarm_models.Alarm( alarm_id='y3ll0w', enabled=False, type='threshold', name='yellow-alert', description='yellow', timestamp=datetime.datetime(2015, 7, 2, 10, 10), user_id='me', project_id='and-da-boys', state="insufficient data", state_timestamp=constants.MIN_DATETIME, ok_actions=[], alarm_actions=['http://nowhere/alarms'], insufficient_data_actions=[], repeat_actions=False, time_constraints=[], rule=dict(comparison_operator='lt', threshold=10, statistic='min', evaluation_periods=1, period=60, meter_name='test.five', query=[{ 'field': 'key2', 'op': 'eq', 'value': 'value2', 'type': 'string' }, { 'field': 'user_metadata.key3', 'op': 'eq', 'value': 'value3', 'type': 'string' }]), ) ] for a in alarms: self.alarm_conn.create_alarm(a)
def setUp(self): self.client = self.useFixture( fixtures.MockPatch( 'aodh.evaluator.gnocchi.client')).mock.Client.return_value self.prepared_alarms = [ models.Alarm(name='instance_running_hot', description='instance_running_hot', type='gnocchi_resources_threshold', enabled=True, user_id='foobar', project_id='snafu', alarm_id=uuidutils.generate_uuid(), state='insufficient data', state_reason='insufficient data', state_timestamp=constants.MIN_DATETIME, timestamp=constants.MIN_DATETIME, insufficient_data_actions=[], ok_actions=[], alarm_actions=[], repeat_actions=False, time_constraints=[], severity='low', rule=dict(comparison_operator='gt', threshold=80.0, evaluation_periods=5, aggregation_method='mean', granularity=60, metric='cpu_util', resource_type='instance', resource_id='my_instance')), models.Alarm( name='group_running_idle', description='group_running_idle', type='gnocchi_aggregation_by_metrics_threshold', enabled=True, user_id='foobar', project_id='snafu', state='insufficient data', state_reason='insufficient data', state_timestamp=constants.MIN_DATETIME, timestamp=constants.MIN_DATETIME, insufficient_data_actions=[], ok_actions=[], alarm_actions=[], repeat_actions=False, alarm_id=uuidutils.generate_uuid(), time_constraints=[], rule=dict(comparison_operator='le', threshold=10.0, evaluation_periods=4, aggregation_method='max', granularity=300, metrics=[ '0bb1604d-1193-4c0a-b4b8-74b170e35e83', '9ddc209f-42f8-41e1-b8f1-8804f59c4053' ]), ), models.Alarm(name='instance_not_running', description='instance_running_hot', type='gnocchi_aggregation_by_resources_threshold', enabled=True, user_id='foobar', project_id='snafu', alarm_id=uuidutils.generate_uuid(), state='insufficient data', state_reason='insufficient data', state_timestamp=constants.MIN_DATETIME, timestamp=constants.MIN_DATETIME, insufficient_data_actions=[], ok_actions=[], alarm_actions=[], repeat_actions=False, time_constraints=[], rule=dict(comparison_operator='gt', threshold=80.0, evaluation_periods=6, aggregation_method='mean', granularity=50, metric='cpu_util', resource_type='instance', query='{"=": {"server_group": ' '"my_autoscaling_group"}}')), ] super(TestGnocchiEvaluatorBase, self).setUp()
def prepare_alarms(self): self.alarms = [ models.Alarm(name='instance_running_hot', description='instance_running_hot', type='gnocchi_resources_threshold', enabled=True, user_id='foobar', project_id='snafu', alarm_id=str(uuid.uuid4()), state='insufficient data', state_timestamp=constants.MIN_DATETIME, timestamp=constants.MIN_DATETIME, insufficient_data_actions=[], ok_actions=[], alarm_actions=[], repeat_actions=False, time_constraints=[], rule=dict( comparison_operator='gt', threshold=80.0, evaluation_periods=5, aggregation_method='mean', granularity=60, metric='cpu_util', resource_type='instance', resource_id='my_instance') ), models.Alarm(name='group_running_idle', description='group_running_idle', type='gnocchi_aggregation_by_metrics_threshold', enabled=True, user_id='foobar', project_id='snafu', state='insufficient data', state_timestamp=constants.MIN_DATETIME, timestamp=constants.MIN_DATETIME, insufficient_data_actions=[], ok_actions=[], alarm_actions=[], repeat_actions=False, alarm_id=str(uuid.uuid4()), time_constraints=[], rule=dict( comparison_operator='le', threshold=10.0, evaluation_periods=4, aggregation_method='max', granularity=300, metrics=['0bb1604d-1193-4c0a-b4b8-74b170e35e83', '9ddc209f-42f8-41e1-b8f1-8804f59c4053']), ), models.Alarm(name='instance_not_running', description='instance_running_hot', type='gnocchi_aggregation_by_resources_threshold', enabled=True, user_id='foobar', project_id='snafu', alarm_id=str(uuid.uuid4()), state='insufficient data', state_timestamp=constants.MIN_DATETIME, timestamp=constants.MIN_DATETIME, insufficient_data_actions=[], ok_actions=[], alarm_actions=[], repeat_actions=False, time_constraints=[], rule=dict( comparison_operator='gt', threshold=80.0, evaluation_periods=6, aggregation_method='mean', granularity=50, metric='cpu_util', resource_type='instance', query='{"=": {"server_group": ' '"my_autoscaling_group"}}') ), ]
def conversion(): args = get_parser().parse_args() conf = service.prepare_service([]) conn = storage.get_connection_from_config(conf) combination_alarms = list( conn.get_alarms(alarm_type='combination', alarm_id=args.alarm_id or None)) count = 0 for alarm in combination_alarms: new_name = 'From-combination: %s' % alarm.alarm_id n_alarm = list(conn.get_alarms(name=new_name, alarm_type='composite')) if n_alarm: LOG.warning( _LW('Alarm %(alarm)s has been already converted as ' 'composite alarm: %(n_alarm_id)s, skipped.'), { 'alarm': alarm.alarm_id, 'n_alarm_id': n_alarm[0].alarm_id }) continue try: composite_rule = _generate_composite_rule(conn, alarm) except DependentAlarmNotFound as e: LOG.warning( _LW('The dependent alarm %(dep_alarm)s of alarm %' '(com_alarm)s not found, skipped.'), { 'com_alarm': e.com_alarm_id, 'dep_alarm': e.dependent_alarm_id }) continue except UnsupportedSubAlarmType as e: LOG.warning( _LW('Alarm conversion from combination to composite ' 'only support combination alarms depending ' 'threshold alarms, the type of alarm %(alarm)s ' 'is: %(type)s, skipped.'), { 'alarm': e.sub_alarm_id, 'type': e.sub_alarm_type }) continue new_alarm = models.Alarm(**alarm.as_dict()) new_alarm.alarm_id = uuidutils.generate_uuid() new_alarm.name = new_name new_alarm.type = 'composite' new_alarm.description = ('composite alarm converted from combination ' 'alarm: %s' % alarm.alarm_id) new_alarm.rule = composite_rule new_alarm.timestamp = datetime.datetime.now() conn.create_alarm(new_alarm) LOG.info( _LI('End Converting combination alarm %(s_alarm)s to ' 'composite alarm %(d_alarm)s'), { 's_alarm': alarm.alarm_id, 'd_alarm': new_alarm.alarm_id }) count += 1 if args.delete_combination_alarm: for alarm in combination_alarms: LOG.info(_LI('Deleting the combination alarm %s...'), alarm.alarm_id) conn.delete_alarm(alarm.alarm_id) LOG.info( _LI('%s combination alarms have been converted to composite ' 'alarms.'), count)