def get_one(self, message_id): """Return a single event with the given message id. :param message_id: Message ID of the Event to be returned """ rbac.enforce("events:show", pecan.request) filters = _build_rbac_query_filters() t_filter = filters['t_filter'] admin_proj = filters['admin_proj'] event_filter = storage.EventFilter(traits_filter=t_filter, admin_proj=admin_proj, message_id=message_id) events = [event for event in pecan.request.event_storage_conn.get_events(event_filter)] if not events: raise base.EntityNotFound(_("Event"), message_id) if len(events) > 1: LOG.error(_("More than one event with " "id %s returned from storage driver") % message_id) event = events[0] return Event(message_id=event.message_id, event_type=event.event_type, generated=event.generated, traits=event.traits, raw=event.raw)
def get_state(self): """Get the state of this alarm.""" rbac.enforce('get_alarm_state', pecan.request) alarm = self._alarm() return alarm.state
def post(self, body): """Define query for retrieving Sample data. :param body: Query rules for the samples to be returned. """ rbac.enforce('query_sample', pecan.request) sample_name_mapping = { "resource": "resource_id", "meter": "counter_name", "type": "counter_type", "unit": "counter_unit", "volume": "counter_volume" } query = ValidatedComplexQuery(body, storage.models.Sample, sample_name_mapping, metadata_allowed=True) query.validate(visibility_field="project_id") conn = pecan.request.storage_conn return [ samples.Sample.from_db_model(s) for s in conn.query_samples( query.filter_expr, query.orderby, query.limit) ]
def get_one(self, message_id): """Return a single event with the given message id. :param message_id: Message ID of the Event to be returned """ rbac.enforce("events:show", pecan.request) filters = _build_rbac_query_filters() t_filter = filters['t_filter'] admin_proj = filters['admin_proj'] event_filter = storage.EventFilter(traits_filter=t_filter, admin_proj=admin_proj, message_id=message_id) events = [ event for event in pecan.request.event_storage_conn.get_events( event_filter) ] if not events: raise base.EntityNotFound(_("Event"), message_id) if len(events) > 1: LOG.error( _("More than one event with " "id %s returned from storage driver") % message_id) event = events[0] return Event(message_id=event.message_id, event_type=event.event_type, generated=event.generated, traits=event.traits, raw=event.raw)
def put(self, data): """Modify this alarm. :param data: an alarm within the request body. """ rbac.enforce('change_alarm', pecan.request) # Ensure alarm exists alarm_in = self._alarm() now = timeutils.utcnow() data.alarm_id = self._id user, project = rbac.get_limited_to(pecan.request.headers) if user: data.user_id = user elif data.user_id == wtypes.Unset: data.user_id = alarm_in.user_id if project: data.project_id = project elif data.project_id == wtypes.Unset: data.project_id = alarm_in.project_id data.timestamp = now if alarm_in.state != data.state: data.state_timestamp = now else: data.state_timestamp = alarm_in.state_timestamp # make sure alarms are unique by name per project. if alarm_in.name != data.name: alarms = list(self.conn.get_alarms(name=data.name, project=data.project_id)) if alarms: raise base.ClientSideError( _("Alarm with name=%s exists") % data.name, status_code=409) ALARMS_RULES[data.type].plugin.update_hook(data) old_data = Alarm.from_db_model(alarm_in) old_alarm = old_data.as_dict(alarm_models.Alarm) data.update_actions(old_data) updated_alarm = data.as_dict(alarm_models.Alarm) try: alarm_in = alarm_models.Alarm(**updated_alarm) except Exception: LOG.exception(_("Error while putting alarm: %s") % updated_alarm) raise base.ClientSideError(_("Alarm incorrect")) alarm = self.conn.update_alarm(alarm_in) change = dict((k, v) for k, v in updated_alarm.items() if v != old_alarm[k] and k not in ['timestamp', 'state_timestamp']) self._record_change(change, now, on_behalf_of=alarm.project_id) return Alarm.from_db_model(alarm)
def post(self, data): """Create a new alarm. :param data: an alarm within the request body. """ rbac.enforce('create_alarm', pecan.request) conn = pecan.request.alarm_storage_conn now = timeutils.utcnow() data.alarm_id = str(uuid.uuid4()) user_limit, project_limit = rbac.get_limited_to(pecan.request.headers) def _set_ownership(aspect, owner_limitation, header): attr = '%s_id' % aspect requested_owner = getattr(data, attr) explicit_owner = requested_owner != wtypes.Unset caller = pecan.request.headers.get(header) if (owner_limitation and explicit_owner and requested_owner != caller): raise base.ProjectNotAuthorized(requested_owner, aspect) actual_owner = (owner_limitation or requested_owner if explicit_owner else caller) setattr(data, attr, actual_owner) _set_ownership('user', user_limit, 'X-User-Id') _set_ownership('project', project_limit, 'X-Project-Id') # Check if there's room for one more alarm if is_over_quota(conn, data.project_id, data.user_id): raise OverQuota(data) data.timestamp = now data.state_timestamp = now ALARMS_RULES[data.type].plugin.create_hook(data) data.update_actions() change = data.as_dict(alarm_models.Alarm) # make sure alarms are unique by name per project. alarms = list(conn.get_alarms(name=data.name, project=data.project_id)) if alarms: raise base.ClientSideError( _("Alarm with name='%s' exists") % data.name, status_code=409) try: alarm_in = alarm_models.Alarm(**change) except Exception: LOG.exception(_("Error while posting alarm: %s") % change) raise base.ClientSideError(_("Alarm incorrect")) alarm = conn.create_alarm(alarm_in) self._record_creation(conn, change, alarm.alarm_id, now) return Alarm.from_db_model(alarm)
def put(self, data): """Modify this alarm. :param data: an alarm within the request body. """ rbac.enforce('change_alarm', pecan.request) # Ensure alarm exists alarm_in = self._alarm() now = timeutils.utcnow() data.alarm_id = self._id user, project = rbac.get_limited_to(pecan.request.headers) if user: data.user_id = user elif data.user_id == wtypes.Unset: data.user_id = alarm_in.user_id if project: data.project_id = project elif data.project_id == wtypes.Unset: data.project_id = alarm_in.project_id data.timestamp = now if alarm_in.state != data.state: data.state_timestamp = now else: data.state_timestamp = alarm_in.state_timestamp alarm_in.severity = data.severity # make sure alarms are unique by name per project. if alarm_in.name != data.name: alarms = list( self.conn.get_alarms(name=data.name, project=data.project_id)) if alarms: raise base.ClientSideError(_("Alarm with name=%s exists") % data.name, status_code=409) ALARMS_RULES[data.type].plugin.update_hook(data) old_alarm = Alarm.from_db_model(alarm_in).as_dict(alarm_models.Alarm) updated_alarm = data.as_dict(alarm_models.Alarm) try: alarm_in = alarm_models.Alarm(**updated_alarm) except Exception: LOG.exception(_("Error while putting alarm: %s") % updated_alarm) raise base.ClientSideError(_("Alarm incorrect")) alarm = self.conn.update_alarm(alarm_in) change = dict( (k, v) for k, v in updated_alarm.items() if v != old_alarm[k] and k not in ['timestamp', 'state_timestamp']) self._record_change(change, now, on_behalf_of=alarm.project_id) return Alarm.from_db_model(alarm)
def delete(self): """Delete this alarm.""" rbac.enforce("delete_alarm", pecan.request) # ensure alarm exists before deleting alarm = self._alarm() self.conn.delete_alarm(alarm.alarm_id) change = Alarm.from_db_model(alarm).as_dict(alarm_models.Alarm) self._record_change(change, timeutils.utcnow(), type=alarm_models.AlarmChange.DELETION)
def delete(self): """Delete this alarm.""" rbac.enforce('delete_alarm', pecan.request) # ensure alarm exists before deleting alarm = self._alarm() self.conn.delete_alarm(alarm.alarm_id) alarm_object = Alarm.from_db_model(alarm) alarm_object.delete_actions()
def post(self, body): """Define query for retrieving Alarm data. :param body: Query rules for the alarms to be returned. """ rbac.enforce("query_alarm", pecan.request) query = ValidatedComplexQuery(body, alarm_models.Alarm) query.validate(visibility_field="project_id") conn = pecan.request.alarm_storage_conn return [alarms.Alarm.from_db_model(s) for s in conn.query_alarms(query.filter_expr, query.orderby, query.limit)]
def delete(self): """Delete this alarm.""" rbac.enforce('delete_alarm', pecan.request) # ensure alarm exists before deleting alarm = self._alarm() self.conn.delete_alarm(alarm.alarm_id) change = Alarm.from_db_model(alarm).as_dict(alarm_models.Alarm) self._record_change(change, timeutils.utcnow(), type=alarm_models.AlarmChange.DELETION)
def get_all(self, q=None): """Return all alarms, based on the query provided. :param q: Filter rules for the alarms to be returned. """ rbac.enforce("get_alarms", pecan.request) q = q or [] # Timestamp is not supported field for Simple Alarm queries kwargs = v2_utils.query_to_kwargs(q, pecan.request.alarm_storage_conn.get_alarms, allow_timestamps=False) return [Alarm.from_db_model(m) for m in pecan.request.alarm_storage_conn.get_alarms(**kwargs)]
def get_one(self, resource_id): """Retrieve details about one resource. :param resource_id: The UUID of the resource. """ rbac.enforce("get_resource", pecan.request) authorized_project = rbac.get_limited_to_project(pecan.request.headers) resources = list(pecan.request.storage_conn.get_resources(resource=resource_id, project=authorized_project)) if not resources: raise base.EntityNotFound(_("Resource"), resource_id) return Resource.from_db_and_links(resources[0], self._resource_links(resource_id))
def get_all(self, q=None): """Return all known meters, based on the data recorded so far. :param q: Filter rules for the meters to be returned. """ rbac.enforce("get_meters", pecan.request) q = q or [] # Timestamp field is not supported for Meter queries kwargs = v2_utils.query_to_kwargs(q, pecan.request.storage_conn.get_meters, allow_timestamps=False) return [Meter.from_db_model(m) for m in pecan.request.storage_conn.get_meters(**kwargs)]
def get_all(self, q=None, meter=None, groupby=None, period=None, aggregate=None): """Retrieve all statistics for all meters :param q: Filter rules for the statistics to be returned. """ rbac.enforce('compute_statistics', pecan.request) q = q or [] meter = meter or [] groupby = groupby or [] aggregate = aggregate or [] if period and period < 0: raise base.ClientSideError(_("Period must be positive.")) g = meters._validate_groupby_fields(groupby) # TO DO: break out the meter names and invoke multiple calls kwargs = v2_utils.query_to_kwargs(q, storage.SampleFilter.__init__) aggregate = utils.uniq(aggregate, ['func', 'param']) # Find the original timestamp in the query to use for clamping # the duration returned in the statistics. start = end = None for i in q: if i.field == 'timestamp' and i.op in ('lt', 'le'): end = timeutils.parse_isotime(i.value).replace(tzinfo=None) elif i.field == 'timestamp' and i.op in ('gt', 'ge'): start = timeutils.parse_isotime(i.value).replace(tzinfo=None) ret = [] kwargs['meter'] = meter f = storage.SampleFilter(**kwargs) try: computed = pecan.request.storage_conn.get_meter_statistics( f, period, g, aggregate) dbStats = [ ScopedStatistics(start_timestamp=start, end_timestamp=end, **c.as_dict()) for c in computed ] ret += dbStats except OverflowError: LOG.exception("Problem processing meters %s" % meter) return ret
def statistics(self, q=None, groupby=None, period=None, aggregate=None): """Computes the statistics of the samples in the time range given. :param q: Filter rules for the data to be returned. :param groupby: Fields for group by aggregation :param period: Returned result will be an array of statistics for a period long of that number of seconds. :param aggregate: The selectable aggregation functions to be applied. """ rbac.enforce('compute_statistics', pecan.request) q = q or [] groupby = groupby or [] aggregate = aggregate or [] if period and period < 0: raise base.ClientSideError(_("Period must be positive.")) kwargs = v2_utils.query_to_kwargs(q, storage.SampleFilter.__init__) kwargs['meter'] = self.meter_name f = storage.SampleFilter(**kwargs) g = _validate_groupby_fields(groupby) aggregate = utils.uniq(aggregate, ['func', 'param']) # Find the original timestamp in the query to use for clamping # the duration returned in the statistics. start = end = None for i in q: if i.field == 'timestamp' and i.op in ('lt', 'le'): end = timeutils.parse_isotime(i.value).replace( tzinfo=None) elif i.field == 'timestamp' and i.op in ('gt', 'ge'): start = timeutils.parse_isotime(i.value).replace( tzinfo=None) try: computed = pecan.request.storage_conn.get_meter_statistics( f, period, g, aggregate) LOG.debug(_('computed value coming from %r'), pecan.request.storage_conn) return [Statistics(start_timestamp=start, end_timestamp=end, **c.as_dict()) for c in computed] except OverflowError as e: params = dict(period=period, err=e) raise base.ClientSideError( _("Invalid period %(period)s: %(err)s") % params)
def statistics(self, q=None, groupby=None, period=None, aggregate=None): """Computes the statistics of the samples in the time range given. :param q: Filter rules for the data to be returned. :param groupby: Fields for group by aggregation :param period: Returned result will be an array of statistics for a period long of that number of seconds. :param aggregate: The selectable aggregation functions to be applied. """ rbac.enforce('compute_statistics', pecan.request) q = q or [] groupby = groupby or [] aggregate = aggregate or [] if period and period < 0: raise base.ClientSideError(_("Period must be positive.")) kwargs = v2_utils.query_to_kwargs(q, storage.SampleFilter.__init__) kwargs['meter'] = self.meter_name f = storage.SampleFilter(**kwargs) g = _validate_groupby_fields(groupby) aggregate = utils.uniq(aggregate, ['func', 'param']) # Find the original timestamp in the query to use for clamping # the duration returned in the statistics. start = end = None for i in q: if i.field == 'timestamp' and i.op in ('lt', 'le'): end = timeutils.parse_isotime(i.value).replace(tzinfo=None) elif i.field == 'timestamp' and i.op in ('gt', 'ge'): start = timeutils.parse_isotime(i.value).replace(tzinfo=None) try: computed = pecan.request.storage_conn.get_meter_statistics( f, period, g, aggregate) LOG.debug(_('computed value coming from %r'), pecan.request.storage_conn) return [ Statistics(start_timestamp=start, end_timestamp=end, **c.as_dict()) for c in computed ] except OverflowError as e: params = dict(period=period, err=e) raise base.ClientSideError( _("Invalid period %(period)s: %(err)s") % params)
def get_all(self, q=None, limit=None): """Return all known samples, based on the data recorded so far. :param q: Filter rules for the samples to be returned. :param limit: Maximum number of samples to be returned. """ rbac.enforce("get_samples", pecan.request) q = q or [] limit = utils.enforce_limit(limit) kwargs = utils.query_to_kwargs(q, storage.SampleFilter.__init__) f = storage.SampleFilter(**kwargs) return map(Sample.from_db_model, pecan.request.storage_conn.get_samples(f, limit=limit))
def get_all(self, q=None, limit=None): """Return samples for the meter. :param q: Filter rules for the data to be returned. :param limit: Maximum number of samples to return. """ rbac.enforce("get_samples", pecan.request) q = q or [] limit = v2_utils.enforce_limit(limit) kwargs = v2_utils.query_to_kwargs(q, storage.SampleFilter.__init__) kwargs["meter"] = self.meter_name f = storage.SampleFilter(**kwargs) return [OldSample.from_db_model(e) for e in pecan.request.storage_conn.get_samples(f, limit=limit)]
def post(self, body): """Define query for retrieving Alarm data. :param body: Query rules for the alarms to be returned. """ rbac.enforce('query_alarm', pecan.request) query = ValidatedComplexQuery(body, alarm_models.Alarm) query.validate(visibility_field="project_id") conn = pecan.request.alarm_storage_conn return [ alarms.Alarm.from_db_model(s) for s in conn.query_alarms( query.filter_expr, query.orderby, query.limit) ]
def get_one(self, resource_id): """Retrieve details about one resource. :param resource_id: The UUID of the resource. """ rbac.enforce('get_resource', pecan.request) authorized_project = rbac.get_limited_to_project(pecan.request.headers) resources = list(pecan.request.storage_conn.get_resources( resource=resource_id, project=authorized_project)) if not resources: raise base.EntityNotFound(_('Resource'), resource_id) return Resource.from_db_and_links(resources[0], self._resource_links(resource_id))
def get_all(self, q=None): """Return all alarms, based on the query provided. :param q: Filter rules for the alarms to be returned. """ rbac.enforce('get_alarms', pecan.request) q = q or [] # Timestamp is not supported field for Simple Alarm queries kwargs = v2_utils.query_to_kwargs( q, pecan.request.alarm_storage_conn.get_alarms, allow_timestamps=False) return [Alarm.from_db_model(m) for m in pecan.request.alarm_storage_conn.get_alarms(**kwargs)]
def get_one(self, sample_id): """Return a sample. :param sample_id: the id of the sample. """ rbac.enforce("get_sample", pecan.request) f = storage.SampleFilter(message_id=sample_id) samples = list(pecan.request.storage_conn.get_samples(f)) if len(samples) < 1: raise base.EntityNotFound(_("Sample"), sample_id) return Sample.from_db_model(samples[0])
def get_one(self, sample_id): """Return a sample. :param sample_id: the id of the sample. """ rbac.enforce('get_sample', pecan.request) f = storage.SampleFilter(message_id=sample_id) samples = list(pecan.request.storage_conn.get_samples(f)) if len(samples) < 1: raise base.EntityNotFound(_('Sample'), sample_id) return Sample.from_db_model(samples[0])
def post(self, body): """Define query for retrieving AlarmChange data. :param body: Query rules for the alarm history to be returned. """ rbac.enforce('query_alarm_history', pecan.request) query = ValidatedComplexQuery(body, alarm_models.AlarmChange) query.validate(visibility_field="on_behalf_of") conn = pecan.request.alarm_storage_conn return [alarms.AlarmChange.from_db_model(s) for s in conn.query_alarm_history(query.filter_expr, query.orderby, query.limit)]
def history(self, q=None): """Assembles the alarm history requested. :param q: Filter rules for the changes to be described. """ rbac.enforce("alarm_history", pecan.request) q = q or [] # allow history to be returned for deleted alarms, but scope changes # returned to those carried out on behalf of the auth'd tenant, to # avoid inappropriate cross-tenant visibility of alarm history auth_project = rbac.get_limited_to_project(pecan.request.headers) conn = pecan.request.alarm_storage_conn kwargs = v2_utils.query_to_kwargs(q, conn.get_alarm_changes, ["on_behalf_of", "alarm_id"]) return [AlarmChange.from_db_model(ac) for ac in conn.get_alarm_changes(self._id, auth_project, **kwargs)]
def get_all(self, q=None, limit=None): """Return all known samples, based on the data recorded so far. :param q: Filter rules for the samples to be returned. :param limit: Maximum number of samples to be returned. """ rbac.enforce('get_samples', pecan.request) q = q or [] limit = utils.enforce_limit(limit) kwargs = utils.query_to_kwargs(q, storage.SampleFilter.__init__) f = storage.SampleFilter(**kwargs) return map(Sample.from_db_model, pecan.request.storage_conn.get_samples(f, limit=limit))
def get_all(self, q=None, meter_links=1): """Retrieve definitions of all of the resources. :param q: Filter rules for the resources to be returned. :param meter_links: option to include related meter links """ rbac.enforce("get_resources", pecan.request) q = q or [] kwargs = utils.query_to_kwargs(q, pecan.request.storage_conn.get_resources) resources = [ Resource.from_db_and_links(r, self._resource_links(r.resource_id, meter_links)) for r in pecan.request.storage_conn.get_resources(**kwargs) ] return resources
def get_all(self, q=None, limit=None): """Return samples for the meter. :param q: Filter rules for the data to be returned. :param limit: Maximum number of samples to return. """ rbac.enforce("get_samples", pecan.request) q = q or [] if limit and limit < 0: raise base.ClientSideError(_("Limit must be positive")) kwargs = v2_utils.query_to_kwargs(q, storage.SampleFilter.__init__) kwargs["meter"] = self.meter_name f = storage.SampleFilter(**kwargs) return [OldSample.from_db_model(e) for e in pecan.request.storage_conn.get_samples(f, limit=limit)]
def get_all(self, q=None, limit=None): """Return all known samples, based on the data recorded so far. :param q: Filter rules for the samples to be returned. :param limit: Maximum number of samples to be returned. """ rbac.enforce("get_samples", pecan.request) q = q or [] if limit and limit < 0: raise base.ClientSideError(_("Limit must be positive")) kwargs = utils.query_to_kwargs(q, storage.SampleFilter.__init__) f = storage.SampleFilter(**kwargs) return map(Sample.from_db_model, pecan.request.storage_conn.get_samples(f, limit=limit))
def get_all(self, q=None, limit=None): """Return all known samples, based on the data recorded so far. :param q: Filter rules for the samples to be returned. :param limit: Maximum number of samples to be returned. """ rbac.enforce('get_samples', pecan.request) q = q or [] if limit and limit < 0: raise base.ClientSideError(_("Limit must be positive")) kwargs = utils.query_to_kwargs(q, storage.SampleFilter.__init__) f = storage.SampleFilter(**kwargs) return map(Sample.from_db_model, pecan.request.storage_conn.get_samples(f, limit=limit))
def get_all(self, q=None, limit=None): """Return samples for the meter. :param q: Filter rules for the data to be returned. :param limit: Maximum number of samples to return. """ rbac.enforce('get_samples', pecan.request) q = q or [] limit = v2_utils.enforce_limit(limit) kwargs = v2_utils.query_to_kwargs(q, storage.SampleFilter.__init__) kwargs['meter'] = self.meter_name f = storage.SampleFilter(**kwargs) return [OldSample.from_db_model(e) for e in pecan.request.storage_conn.get_samples(f, limit=limit) ]
def get_one(self, resource_id): """Retrieve details about one resource. :param resource_id: The UUID of the resource. """ rbac.enforce("get_resource", pecan.request) # In case we have special character in resource id, for example, swift # can generate samples with resource id like # 29f809d9-88bb-4c40-b1ba-a77a1fcf8ceb/glance resource_id = urllib.parse.unquote(resource_id) authorized_project = rbac.get_limited_to_project(pecan.request.headers) resources = list(pecan.request.storage_conn.get_resources(resource=resource_id, project=authorized_project)) if not resources: raise base.EntityNotFound(_("Resource"), resource_id) return Resource.from_db_and_links(resources[0], self._resource_links(resource_id))
def get_all(self, q=None): """Return all known meters, based on the data recorded so far. :param q: Filter rules for the meters to be returned. """ rbac.enforce('get_meters', pecan.request) q = q or [] # Timestamp field is not supported for Meter queries kwargs = v2_utils.query_to_kwargs( q, pecan.request.storage_conn.get_meters, allow_timestamps=False) return [ Meter.from_db_model(m) for m in pecan.request.storage_conn.get_meters(**kwargs) ]
def get_one(self, resource_id): """Retrieve details about one resource. :param resource_id: The UUID of the resource. """ rbac.enforce('get_resource', pecan.request) # In case we have special character in resource id, for example, swift # can generate samples with resource id like # 29f809d9-88bb-4c40-b1ba-a77a1fcf8ceb/glance resource_id = urllib.parse.unquote(resource_id) authorized_project = rbac.get_limited_to_project(pecan.request.headers) resources = list(pecan.request.storage_conn.get_resources( resource=resource_id, project=authorized_project)) if not resources: raise base.EntityNotFound(_('Resource'), resource_id) return Resource.from_db_and_links(resources[0], self._resource_links(resource_id))
def get_all(self, q=None, meter_links=1): """Retrieve definitions of all of the resources. :param q: Filter rules for the resources to be returned. :param meter_links: option to include related meter links """ rbac.enforce('get_resources', pecan.request) q = q or [] kwargs = utils.query_to_kwargs( q, pecan.request.storage_conn.get_resources) resources = [ Resource.from_db_and_links(r, self._resource_links(r.resource_id, meter_links)) for r in pecan.request.storage_conn.get_resources(**kwargs)] return resources
def get_all(self, q=None, limit=None): """Return all events matching the query filters. :param q: Filter arguments for which Events to return :param limit: Maximum number of samples to be returned. """ rbac.enforce("events:index", pecan.request) q = q or [] limit = v2_utils.enforce_limit(limit) event_filter = _event_query_to_event_filter(q) return [Event(message_id=event.message_id, event_type=event.event_type, generated=event.generated, traits=event.traits, raw=event.raw) for event in pecan.request.event_storage_conn.get_events(event_filter, limit)]
def get_all(self, q=[], limit=None): """Return all known meters, based on the data recorded so far. :param q: Filter rules for the meters to be returned. :param limit: limit of the number of meter types to return. """ rbac.enforce('get_meters', pecan.request) q = q or [] # Timestamp field is not supported for Meter queries lim = v2_utils.enforce_limit(limit) kwargs = v2_utils.query_to_kwargs( q, pecan.request.storage_conn.get_meter_types, ['limit'], allow_timestamps=False) return [MeterType.from_db_model(m) for m in pecan.request.storage_conn.get_meter_types(limit=lim, **kwargs)]
def get_all(self, q=None, limit=None, meter_links=1): """Retrieve definitions of all of the resources. :param q: Filter rules for the resources to be returned. :param meter_links: option to include related meter links """ rbac.enforce("get_resources", pecan.request) q = q or [] limit = utils.enforce_limit(limit) kwargs = utils.query_to_kwargs(q, pecan.request.storage_conn.get_resources) if "limit" in kwargs: raise base.ClientSideError(_("Limit is not a valid field for queries, " "use 'limit' parameter.")) resources = [ Resource.from_db_and_links(r, self._resource_links(r.resource_id, meter_links)) for r in pecan.request.storage_conn.get_resources(limit=limit, **kwargs) ] return resources
def get_all(self, q=None, limit=None): """Return samples for the meter. :param q: Filter rules for the data to be returned. :param limit: Maximum number of samples to return. """ rbac.enforce('get_samples', pecan.request) q = q or [] if limit and limit < 0: raise base.ClientSideError(_("Limit must be positive")) kwargs = v2_utils.query_to_kwargs(q, storage.SampleFilter.__init__) kwargs['meter'] = self.meter_name f = storage.SampleFilter(**kwargs) return [ OldSample.from_db_model(e) for e in pecan.request.storage_conn.get_samples(f, limit=limit) ]
def history(self, q=None): """Assembles the alarm history requested. :param q: Filter rules for the changes to be described. """ rbac.enforce('alarm_history', pecan.request) q = q or [] # allow history to be returned for deleted alarms, but scope changes # returned to those carried out on behalf of the auth'd tenant, to # avoid inappropriate cross-tenant visibility of alarm history auth_project = rbac.get_limited_to_project(pecan.request.headers) conn = pecan.request.alarm_storage_conn kwargs = v2_utils.query_to_kwargs( q, conn.get_alarm_changes, ['on_behalf_of', 'alarm_id']) return [AlarmChange.from_db_model(ac) for ac in conn.get_alarm_changes(self._id, auth_project, **kwargs)]
def get_all(self, q=None, limit=None, unique=''): """Return all known meters, based on the data recorded so far. :param q: Filter rules for the meters to be returned. :param unique: flag to indicate unique meters to be returned. """ rbac.enforce('get_meters', pecan.request) q = q or [] # Timestamp field is not supported for Meter queries limit = v2_utils.enforce_limit(limit) kwargs = v2_utils.query_to_kwargs( q, pecan.request.storage_conn.get_meters, ['limit'], allow_timestamps=False) return [Meter.from_db_model(m) for m in pecan.request.storage_conn.get_meters( limit=limit, unique=strutils.bool_from_string(unique), **kwargs)]
def put_state(self, state): """Set the state of this alarm. :param state: an alarm state within the request body. """ rbac.enforce("change_alarm_state", pecan.request) # note(sileht): body are not validated by wsme # Workaround for https://bugs.launchpad.net/wsme/+bug/1227229 if state not in state_kind: raise base.ClientSideError(_("state invalid")) now = timeutils.utcnow() alarm = self._alarm() alarm.state = state alarm.state_timestamp = now alarm = self.conn.update_alarm(alarm) change = {"state": alarm.state} self._record_change(change, now, on_behalf_of=alarm.project_id, type=alarm_models.AlarmChange.STATE_TRANSITION) return alarm.state
def get_all(self, q=None, limit=None, unique=""): """Return all known meters, based on the data recorded so far. :param q: Filter rules for the meters to be returned. :param unique: flag to indicate unique meters to be returned. """ rbac.enforce("get_meters", pecan.request) q = q or [] # Timestamp field is not supported for Meter queries limit = v2_utils.enforce_limit(limit) kwargs = v2_utils.query_to_kwargs(q, pecan.request.storage_conn.get_meters, ["limit"], allow_timestamps=False) return [ Meter.from_db_model(m) for m in pecan.request.storage_conn.get_meters( limit=limit, unique=strutils.bool_from_string(unique), **kwargs ) ]
def put_state(self, state): """Set the state of this alarm. :param state: an alarm state within the request body. """ rbac.enforce('change_alarm_state', pecan.request) # note(sileht): body are not validated by wsme # Workaround for https://bugs.launchpad.net/wsme/+bug/1227229 if state not in state_kind: raise base.ClientSideError(_("state invalid")) now = timeutils.utcnow() alarm = self._alarm() alarm.state = state alarm.state_timestamp = now alarm = self.conn.update_alarm(alarm) change = {'state': alarm.state} self._record_change(change, now, on_behalf_of=alarm.project_id, type=alarm_models.AlarmChange.STATE_TRANSITION) return alarm.state
def get_all(self, q=None, limit=None): """Return all known meters, based on the data recorded so far. :param q: Filter rules for the meters to be returned. """ rbac.enforce('get_meters', pecan.request) q = q or [] # Timestamp field is not supported for Meter queries limit = v2_utils.enforce_limit(limit) kwargs = v2_utils.query_to_kwargs( q, pecan.request.storage_conn.get_meters, allow_timestamps=False) if 'limit' in kwargs: raise base.ClientSideError(_( "Limit is not a valid field for queries, " "use 'limit' parameter.")) return [Meter.from_db_model(m) for m in pecan.request.storage_conn.get_meters(limit=limit, **kwargs)]
def get_all(self, q=None, limit=None, meter_links=0): """Retrieve definitions of all of the resources. :param q: Filter rules for the resources to be returned. :param limit: Maximum number of resources to return. :param meter_links: option to include related meter links. """ rbac.enforce('get_resources', pecan.request) q = q or [] r_ids = [] if len(q) == 1: # Improve query time for meter-based stats reports from # Horizon. Get resource info for specified resource ids in one # call as opposed to one by one. # q is a list of Query objects. Convert the first and # only item to dictionary to retrieve the list of resource ids. d = q[0].as_dict() if d['field'] == 'resource_ids': r_ids = ast.literal_eval(d['value']) if r_ids: resources = [ Resource.from_db_and_links( r, self._resource_links(r.resource_id, meter_links)) for r in pecan.request.storage_conn.get_resources_batch(r_ids) ] return resources else: limit = utils.enforce_limit(limit) kwargs = utils.query_to_kwargs( q, pecan.request.storage_conn.get_resources, ['limit']) resources = [ Resource.from_db_and_links( r, self._resource_links(r.resource_id, meter_links)) for r in pecan.request.storage_conn.get_resources(limit=limit, **kwargs) ] return resources
def post(self, body): """Define query for retrieving Sample data. :param body: Query rules for the samples to be returned. """ rbac.enforce("query_sample", pecan.request) sample_name_mapping = { "resource": "resource_id", "meter": "counter_name", "type": "counter_type", "unit": "counter_unit", "volume": "counter_volume", } query = ValidatedComplexQuery(body, storage.models.Sample, sample_name_mapping, metadata_allowed=True) query.validate(visibility_field="project_id") conn = pecan.request.storage_conn return [ samples.Sample.from_db_model(s) for s in conn.query_samples(query.filter_expr, query.orderby, query.limit) ]
def post(self, direct='', samples=None): """Post a list of new Samples to Telemetry. :param direct: a flag indicates whether the samples will be posted directly to storage or not. :param samples: a list of samples within the request body. """ rbac.enforce('create_samples', pecan.request) direct = strutils.bool_from_string(direct) if not samples: msg = _('Samples should be included in request body') raise base.ClientSideError(msg) now = timeutils.utcnow() auth_project = rbac.get_limited_to_project(pecan.request.headers) def_source = pecan.request.cfg.sample_source def_project_id = pecan.request.headers.get('X-Project-Id') def_user_id = pecan.request.headers.get('X-User-Id') published_samples = [] for s in samples: if self.meter_name != s.counter_name: raise wsme.exc.InvalidInput('counter_name', s.counter_name, 'should be %s' % self.meter_name) if s.message_id: raise wsme.exc.InvalidInput('message_id', s.message_id, 'The message_id must not be set') if s.counter_type not in sample.TYPES: raise wsme.exc.InvalidInput( 'counter_type', s.counter_type, 'The counter type must be: ' + ', '.join(sample.TYPES)) s.user_id = (s.user_id or def_user_id) s.project_id = (s.project_id or def_project_id) s.source = '%s:%s' % (s.project_id, (s.source or def_source)) s.timestamp = (s.timestamp or now) if auth_project and auth_project != s.project_id: # non admin user trying to cross post to another project_id auth_msg = 'can not post samples to other projects' raise wsme.exc.InvalidInput('project_id', s.project_id, auth_msg) published_sample = sample.Sample( name=s.counter_name, type=s.counter_type, unit=s.counter_unit, volume=s.counter_volume, user_id=s.user_id, project_id=s.project_id, resource_id=s.resource_id, timestamp=s.timestamp.isoformat(), resource_metadata=utils.restore_nesting(s.resource_metadata, separator='.'), source=s.source) s.message_id = published_sample.id sample_dict = publisher_utils.meter_message_from_counter( published_sample, cfg.CONF.publisher.telemetry_secret) if direct: ts = timeutils.parse_isotime(sample_dict['timestamp']) sample_dict['timestamp'] = timeutils.normalize_time(ts) pecan.request.storage_conn.record_metering_data(sample_dict) else: published_samples.append(sample_dict) if not direct: pecan.request.notifier.sample( { 'user': def_user_id, 'tenant': def_project_id, 'is_admin': True }, 'telemetry.api', {'samples': published_samples}) return samples
def post(self, direct='', samples=None): """Post a list of new Samples to Telemetry. :param direct: a flag indicates whether the samples will be posted directly to storage or not. :param samples: a list of samples within the request body. """ rbac.enforce('create_samples', pecan.request) direct = strutils.bool_from_string(direct) if not samples: msg = _('Samples should be included in request body') raise base.ClientSideError(msg) now = timeutils.utcnow() auth_project = rbac.get_limited_to_project(pecan.request.headers) def_source = pecan.request.cfg.sample_source def_project_id = pecan.request.headers.get('X-Project-Id') def_user_id = pecan.request.headers.get('X-User-Id') published_samples = [] for s in samples: if self.meter_name != s.counter_name: raise wsme.exc.InvalidInput('counter_name', s.counter_name, 'should be %s' % self.meter_name) if s.message_id: raise wsme.exc.InvalidInput('message_id', s.message_id, 'The message_id must not be set') if s.counter_type not in sample.TYPES: raise wsme.exc.InvalidInput('counter_type', s.counter_type, 'The counter type must be: ' + ', '.join(sample.TYPES)) s.user_id = (s.user_id or def_user_id) s.project_id = (s.project_id or def_project_id) s.source = '%s:%s' % (s.project_id, (s.source or def_source)) s.timestamp = (s.timestamp or now) if auth_project and auth_project != s.project_id: # non admin user trying to cross post to another project_id auth_msg = 'can not post samples to other projects' raise wsme.exc.InvalidInput('project_id', s.project_id, auth_msg) published_sample = sample.Sample( name=s.counter_name, type=s.counter_type, unit=s.counter_unit, volume=s.counter_volume, user_id=s.user_id, project_id=s.project_id, resource_id=s.resource_id, timestamp=s.timestamp.isoformat(), resource_metadata=utils.restore_nesting(s.resource_metadata, separator='.'), source=s.source) s.message_id = published_sample.id sample_dict = publisher_utils.meter_message_from_counter( published_sample, cfg.CONF.publisher.telemetry_secret) if direct: ts = timeutils.parse_isotime(sample_dict['timestamp']) sample_dict['timestamp'] = timeutils.normalize_time(ts) pecan.request.storage_conn.record_metering_data(sample_dict) else: published_samples.append(sample_dict) if not direct: ctxt = context.RequestContext(user=def_user_id, tenant=def_project_id, is_admin=True) notifier = pecan.request.notifier notifier.info(ctxt.to_dict(), 'telemetry.api', published_samples) return samples
def post(self, direct="", samples=None): """Post a list of new Samples to Telemetry. :param direct: a flag indicates whether the samples will be posted directly to storage or not. :param samples: a list of samples within the request body. """ rbac.enforce("create_samples", pecan.request) direct = strutils.bool_from_string(direct) if not samples: msg = _("Samples should be included in request body") raise base.ClientSideError(msg) now = timeutils.utcnow() auth_project = rbac.get_limited_to_project(pecan.request.headers) def_source = pecan.request.cfg.sample_source def_project_id = pecan.request.headers.get("X-Project-Id") def_user_id = pecan.request.headers.get("X-User-Id") published_samples = [] for s in samples: if self.meter_name != s.counter_name: raise wsme.exc.InvalidInput("counter_name", s.counter_name, "should be %s" % self.meter_name) if s.message_id: raise wsme.exc.InvalidInput("message_id", s.message_id, "The message_id must not be set") if s.counter_type not in sample.TYPES: raise wsme.exc.InvalidInput( "counter_type", s.counter_type, "The counter type must be: " + ", ".join(sample.TYPES) ) s.user_id = s.user_id or def_user_id s.project_id = s.project_id or def_project_id s.source = "%s:%s" % (s.project_id, (s.source or def_source)) s.timestamp = s.timestamp or now if auth_project and auth_project != s.project_id: # non admin user trying to cross post to another project_id auth_msg = "can not post samples to other projects" raise wsme.exc.InvalidInput("project_id", s.project_id, auth_msg) published_sample = sample.Sample( name=s.counter_name, type=s.counter_type, unit=s.counter_unit, volume=s.counter_volume, user_id=s.user_id, project_id=s.project_id, resource_id=s.resource_id, timestamp=s.timestamp.isoformat(), resource_metadata=utils.restore_nesting(s.resource_metadata, separator="."), source=s.source, ) s.message_id = published_sample.id sample_dict = publisher_utils.meter_message_from_counter( published_sample, pecan.request.cfg.publisher.telemetry_secret ) if direct: ts = timeutils.parse_isotime(sample_dict["timestamp"]) sample_dict["timestamp"] = timeutils.normalize_time(ts) pecan.request.storage_conn.record_metering_data(sample_dict) else: published_samples.append(sample_dict) if not direct: pecan.request.notifier.sample( {"user": def_user_id, "tenant": def_project_id, "is_admin": True}, "telemetry.api", {"samples": published_samples}, ) return samples
def post(self, samples): """Post a list of new Samples to Telemetry. :param samples: a list of samples within the request body. """ rbac.enforce('create_samples', pecan.request) now = timeutils.utcnow() auth_project = rbac.get_limited_to_project(pecan.request.headers) def_source = pecan.request.cfg.sample_source def_project_id = pecan.request.headers.get('X-Project-Id') def_user_id = pecan.request.headers.get('X-User-Id') published_samples = [] for s in samples: if self.meter_name != s.counter_name: raise wsme.exc.InvalidInput('counter_name', s.counter_name, 'should be %s' % self.meter_name) if s.message_id: raise wsme.exc.InvalidInput('message_id', s.message_id, 'The message_id must not be set') if s.counter_type not in sample.TYPES: raise wsme.exc.InvalidInput( 'counter_type', s.counter_type, 'The counter type must be: ' + ', '.join(sample.TYPES)) s.user_id = (s.user_id or def_user_id) s.project_id = (s.project_id or def_project_id) s.source = '%s:%s' % (s.project_id, (s.source or def_source)) s.timestamp = (s.timestamp or now) if auth_project and auth_project != s.project_id: # non admin user trying to cross post to another project_id auth_msg = 'can not post samples to other projects' raise wsme.exc.InvalidInput('project_id', s.project_id, auth_msg) published_sample = sample.Sample( name=s.counter_name, type=s.counter_type, unit=s.counter_unit, volume=s.counter_volume, user_id=s.user_id, project_id=s.project_id, resource_id=s.resource_id, timestamp=s.timestamp.isoformat(), resource_metadata=utils.restore_nesting(s.resource_metadata, separator='.'), source=s.source) published_samples.append(published_sample) s.message_id = published_sample.id with pecan.request.pipeline_manager.publisher( context.get_admin_context()) as publisher: publisher(published_samples) return samples
def post(self, samples): """Post a list of new Samples to Telemetry. :param samples: a list of samples within the request body. """ rbac.enforce('create_samples', pecan.request) now = timeutils.utcnow() auth_project = rbac.get_limited_to_project(pecan.request.headers) def_source = pecan.request.cfg.sample_source def_project_id = pecan.request.headers.get('X-Project-Id') def_user_id = pecan.request.headers.get('X-User-Id') published_samples = [] for s in samples: for p in pecan.request.pipeline_manager.pipelines: if p.support_meter(s.counter_name): break else: message = _("The metric %s is not supported by metering " "pipeline configuration.") % s.counter_name raise base.ClientSideError(message, status_code=409) if self.meter_name != s.counter_name: raise wsme.exc.InvalidInput('counter_name', s.counter_name, 'should be %s' % self.meter_name) if s.message_id: raise wsme.exc.InvalidInput('message_id', s.message_id, 'The message_id must not be set') if s.counter_type not in sample.TYPES: raise wsme.exc.InvalidInput('counter_type', s.counter_type, 'The counter type must be: ' + ', '.join(sample.TYPES)) s.user_id = (s.user_id or def_user_id) s.project_id = (s.project_id or def_project_id) s.source = '%s:%s' % (s.project_id, (s.source or def_source)) s.timestamp = (s.timestamp or now) if auth_project and auth_project != s.project_id: # non admin user trying to cross post to another project_id auth_msg = 'can not post samples to other projects' raise wsme.exc.InvalidInput('project_id', s.project_id, auth_msg) published_sample = sample.Sample( name=s.counter_name, type=s.counter_type, unit=s.counter_unit, volume=s.counter_volume, user_id=s.user_id, project_id=s.project_id, resource_id=s.resource_id, timestamp=s.timestamp.isoformat(), resource_metadata=utils.restore_nesting(s.resource_metadata, separator='.'), source=s.source) published_samples.append(published_sample) s.message_id = published_sample.id with pecan.request.pipeline_manager.publisher( context.get_admin_context()) as publisher: publisher(published_samples) return samples
def get(self): """Return this alarm.""" rbac.enforce('get_alarm', pecan.request) return Alarm.from_db_model(self._alarm())