def test_sample_filter_self_always_excluded(self): queries = [v2_base.Query(field='user_id', op='eq', value='20')] with mock.patch('pecan.request') as request: request.headers.return_value = {'X-ProjectId': 'foobar'} kwargs = utils.query_to_kwargs(queries, storage.SampleFilter.__init__) self.assertNotIn('self', kwargs)
def test_sample_filter_single(self): q = [v2_base.Query(field='user_id', op='eq', value='uid')] kwargs = utils.query_to_kwargs(q, storage.SampleFilter.__init__) self.assertIn('user', kwargs) self.assertEqual(1, len(kwargs)) self.assertEqual('uid', kwargs['user'])
def test_sample_filter_self_always_excluded(self): queries = [v2_base.Query(field='user_id', op='eq', value='20')] with mock.patch('pecan.request') as request: request.headers.return_value = {'X-ProjectId': 'foobar'} kwargs = utils.query_to_kwargs(queries, storage.SampleFilter.__init__) self.assertFalse('self' in kwargs)
def test_sample_filter_meta(self): q = [ v2_base.Query(field="metadata.size", op="eq", value="20"), v2_base.Query(field="resource_metadata.id", op="eq", value="meta_id"), ] kwargs = utils.query_to_kwargs(q, storage.SampleFilter.__init__) self.assertEqual(1, len(kwargs)) self.assertEqual(2, len(kwargs["metaquery"])) self.assertEqual(20, kwargs["metaquery"]["metadata.size"]) self.assertEqual("meta_id", kwargs["metaquery"]["metadata.id"])
def test_sample_filter_translation(self): queries = [ v2_base.Query(field=f, op="eq", value="fake_%s" % f, type="string") for f in ["user_id", "project_id", "resource_id"] ] with mock.patch("pecan.request") as request: request.headers.return_value = {"X-ProjectId": "foobar"} kwargs = utils.query_to_kwargs(queries, storage.SampleFilter.__init__) for o in ["user", "project", "resource"]: self.assertEqual("fake_%s_id" % o, kwargs.get(o))
def test_sample_filter_translation(self): queries = [ v2_base.Query(field=f, op='eq', value='fake_%s' % f, type='string') for f in ['user_id', 'project_id', 'resource_id'] ] with mock.patch('pecan.request') as request: request.headers.return_value = {'X-ProjectId': 'foobar'} kwargs = utils.query_to_kwargs(queries, storage.SampleFilter.__init__) for o in ['user', 'project', 'resource']: self.assertEqual('fake_%s_id' % o, kwargs.get(o))
def test_sample_filter_meta(self): q = [ v2_base.Query(field='metadata.size', op='eq', value='20'), v2_base.Query(field='resource_metadata.id', op='eq', value='meta_id') ] kwargs = utils.query_to_kwargs(q, storage.SampleFilter.__init__) self.assertEqual(1, len(kwargs)) self.assertEqual(2, len(kwargs['metaquery'])) self.assertEqual(20, kwargs['metaquery']['metadata.size']) self.assertEqual('meta_id', kwargs['metaquery']['metadata.id'])
def get_all(self, q=None): """Return all alarms, based on the query provided. :param q: Filter rules for the alarms to be returned. """ rbac.enforce("get_alarms", pecan.request) q = q or [] # Timestamp is not supported field for Simple Alarm queries kwargs = v2_utils.query_to_kwargs(q, pecan.request.alarm_storage_conn.get_alarms, allow_timestamps=False) return [Alarm.from_db_model(m) for m in pecan.request.alarm_storage_conn.get_alarms(**kwargs)]
def test_sample_filter_meta(self): q = [v2_base.Query(field='metadata.size', op='eq', value='20'), v2_base.Query(field='resource_metadata.id', op='eq', value='meta_id')] kwargs = utils.query_to_kwargs(q, storage.SampleFilter.__init__) self.assertEqual(1, len(kwargs)) self.assertEqual(2, len(kwargs['metaquery'])) self.assertEqual(20, kwargs['metaquery']['metadata.size']) self.assertEqual('meta_id', kwargs['metaquery']['metadata.id'])
def test_sample_filter_timestamp(self): ts_start = timeutils.utcnow() ts_end = ts_start + datetime.timedelta(minutes=5) q = [ v2_base.Query(field="timestamp", op="lt", value=str(ts_end)), v2_base.Query(field="timestamp", op="gt", value=str(ts_start)), ] kwargs = utils.query_to_kwargs(q, storage.SampleFilter.__init__) self.assertEqual(4, len(kwargs)) self.assertTimestampEqual(kwargs["start_timestamp"], ts_start) self.assertTimestampEqual(kwargs["end_timestamp"], ts_end) self.assertEqual("gt", kwargs["start_timestamp_op"]) self.assertEqual("lt", kwargs["end_timestamp_op"])
def test_sample_filter_timestamp(self): ts_start = timeutils.utcnow() ts_end = ts_start + datetime.timedelta(minutes=5) q = [ v2_base.Query(field='timestamp', op='lt', value=str(ts_end)), v2_base.Query(field='timestamp', op='gt', value=str(ts_start)) ] kwargs = utils.query_to_kwargs(q, storage.SampleFilter.__init__) self.assertEqual(4, len(kwargs)) self.assertTimestampEqual(kwargs['start_timestamp'], ts_start) self.assertTimestampEqual(kwargs['end_timestamp'], ts_end) self.assertEqual('gt', kwargs['start_timestamp_op']) self.assertEqual('lt', kwargs['end_timestamp_op'])
def test_sample_filter_translation(self): queries = [v2_base.Query(field=f, op='eq', value='fake_%s' % f, type='string') for f in ['user_id', 'project_id', 'resource_id']] with mock.patch('pecan.request') as request: request.headers.return_value = {'X-ProjectId': 'foobar'} kwargs = utils.query_to_kwargs(queries, storage.SampleFilter.__init__) for o in ['user', 'project', 'resource']: self.assertEqual('fake_%s_id' % o, kwargs.get(o))
def get_all(self, q=None): """Return all known meters, based on the data recorded so far. :param q: Filter rules for the meters to be returned. """ rbac.enforce("get_meters", pecan.request) q = q or [] # Timestamp field is not supported for Meter queries kwargs = v2_utils.query_to_kwargs(q, pecan.request.storage_conn.get_meters, allow_timestamps=False) return [Meter.from_db_model(m) for m in pecan.request.storage_conn.get_meters(**kwargs)]
def get_all(self, q=None, meter=None, groupby=None, period=None, aggregate=None): """Retrieve all statistics for all meters :param q: Filter rules for the statistics to be returned. """ rbac.enforce('compute_statistics', pecan.request) q = q or [] meter = meter or [] groupby = groupby or [] aggregate = aggregate or [] if period and period < 0: raise base.ClientSideError(_("Period must be positive.")) g = meters._validate_groupby_fields(groupby) # TO DO: break out the meter names and invoke multiple calls kwargs = v2_utils.query_to_kwargs(q, storage.SampleFilter.__init__) aggregate = utils.uniq(aggregate, ['func', 'param']) # Find the original timestamp in the query to use for clamping # the duration returned in the statistics. start = end = None for i in q: if i.field == 'timestamp' and i.op in ('lt', 'le'): end = timeutils.parse_isotime(i.value).replace(tzinfo=None) elif i.field == 'timestamp' and i.op in ('gt', 'ge'): start = timeutils.parse_isotime(i.value).replace(tzinfo=None) ret = [] kwargs['meter'] = meter f = storage.SampleFilter(**kwargs) try: computed = pecan.request.storage_conn.get_meter_statistics( f, period, g, aggregate) dbStats = [ ScopedStatistics(start_timestamp=start, end_timestamp=end, **c.as_dict()) for c in computed ] ret += dbStats except OverflowError: LOG.exception("Problem processing meters %s" % meter) return ret
def statistics(self, q=None, groupby=None, period=None, aggregate=None): """Computes the statistics of the samples in the time range given. :param q: Filter rules for the data to be returned. :param groupby: Fields for group by aggregation :param period: Returned result will be an array of statistics for a period long of that number of seconds. :param aggregate: The selectable aggregation functions to be applied. """ rbac.enforce('compute_statistics', pecan.request) q = q or [] groupby = groupby or [] aggregate = aggregate or [] if period and period < 0: raise base.ClientSideError(_("Period must be positive.")) kwargs = v2_utils.query_to_kwargs(q, storage.SampleFilter.__init__) kwargs['meter'] = self.meter_name f = storage.SampleFilter(**kwargs) g = _validate_groupby_fields(groupby) aggregate = utils.uniq(aggregate, ['func', 'param']) # Find the original timestamp in the query to use for clamping # the duration returned in the statistics. start = end = None for i in q: if i.field == 'timestamp' and i.op in ('lt', 'le'): end = timeutils.parse_isotime(i.value).replace( tzinfo=None) elif i.field == 'timestamp' and i.op in ('gt', 'ge'): start = timeutils.parse_isotime(i.value).replace( tzinfo=None) try: computed = pecan.request.storage_conn.get_meter_statistics( f, period, g, aggregate) LOG.debug(_('computed value coming from %r'), pecan.request.storage_conn) return [Statistics(start_timestamp=start, end_timestamp=end, **c.as_dict()) for c in computed] except OverflowError as e: params = dict(period=period, err=e) raise base.ClientSideError( _("Invalid period %(period)s: %(err)s") % params)
def statistics(self, q=None, groupby=None, period=None, aggregate=None): """Computes the statistics of the samples in the time range given. :param q: Filter rules for the data to be returned. :param groupby: Fields for group by aggregation :param period: Returned result will be an array of statistics for a period long of that number of seconds. :param aggregate: The selectable aggregation functions to be applied. """ rbac.enforce('compute_statistics', pecan.request) q = q or [] groupby = groupby or [] aggregate = aggregate or [] if period and period < 0: raise base.ClientSideError(_("Period must be positive.")) kwargs = v2_utils.query_to_kwargs(q, storage.SampleFilter.__init__) kwargs['meter'] = self.meter_name f = storage.SampleFilter(**kwargs) g = _validate_groupby_fields(groupby) aggregate = utils.uniq(aggregate, ['func', 'param']) # Find the original timestamp in the query to use for clamping # the duration returned in the statistics. start = end = None for i in q: if i.field == 'timestamp' and i.op in ('lt', 'le'): end = timeutils.parse_isotime(i.value).replace(tzinfo=None) elif i.field == 'timestamp' and i.op in ('gt', 'ge'): start = timeutils.parse_isotime(i.value).replace(tzinfo=None) try: computed = pecan.request.storage_conn.get_meter_statistics( f, period, g, aggregate) LOG.debug(_('computed value coming from %r'), pecan.request.storage_conn) return [ Statistics(start_timestamp=start, end_timestamp=end, **c.as_dict()) for c in computed ] except OverflowError as e: params = dict(period=period, err=e) raise base.ClientSideError( _("Invalid period %(period)s: %(err)s") % params)
def test_sample_filter_timestamp(self): ts_start = timeutils.utcnow() ts_end = ts_start + datetime.timedelta(minutes=5) q = [v2_base.Query(field='timestamp', op='lt', value=str(ts_end)), v2_base.Query(field='timestamp', op='gt', value=str(ts_start))] kwargs = utils.query_to_kwargs(q, storage.SampleFilter.__init__) self.assertEqual(4, len(kwargs)) self.assertTimestampEqual(kwargs['start_timestamp'], ts_start) self.assertTimestampEqual(kwargs['end_timestamp'], ts_end) self.assertEqual('gt', kwargs['start_timestamp_op']) self.assertEqual('lt', kwargs['end_timestamp_op'])
def test_sample_filter_multi(self): q = [ v2_base.Query(field='user_id', op='eq', value='uid'), v2_base.Query(field='project_id', op='eq', value='pid'), v2_base.Query(field='resource_id', op='eq', value='rid'), v2_base.Query(field='source', op='eq', value='source_name'), v2_base.Query(field='meter', op='eq', value='meter_name') ] kwargs = utils.query_to_kwargs(q, storage.SampleFilter.__init__) self.assertEqual(5, len(kwargs)) self.assertEqual('uid', kwargs['user']) self.assertEqual('pid', kwargs['project']) self.assertEqual('rid', kwargs['resource']) self.assertEqual('source_name', kwargs['source']) self.assertEqual('meter_name', kwargs['meter'])
def get_all(self, q=None, limit=None): """Return all known samples, based on the data recorded so far. :param q: Filter rules for the samples to be returned. :param limit: Maximum number of samples to be returned. """ rbac.enforce("get_samples", pecan.request) q = q or [] limit = utils.enforce_limit(limit) kwargs = utils.query_to_kwargs(q, storage.SampleFilter.__init__) f = storage.SampleFilter(**kwargs) return map(Sample.from_db_model, pecan.request.storage_conn.get_samples(f, limit=limit))
def get_all(self, q=None): """Return all alarms, based on the query provided. :param q: Filter rules for the alarms to be returned. """ rbac.enforce('get_alarms', pecan.request) q = q or [] # Timestamp is not supported field for Simple Alarm queries kwargs = v2_utils.query_to_kwargs( q, pecan.request.alarm_storage_conn.get_alarms, allow_timestamps=False) return [Alarm.from_db_model(m) for m in pecan.request.alarm_storage_conn.get_alarms(**kwargs)]
def test_sample_filter_multi(self): q = [ v2_base.Query(field="user_id", op="eq", value="uid"), v2_base.Query(field="project_id", op="eq", value="pid"), v2_base.Query(field="resource_id", op="eq", value="rid"), v2_base.Query(field="source", op="eq", value="source_name"), v2_base.Query(field="meter", op="eq", value="meter_name"), ] kwargs = utils.query_to_kwargs(q, storage.SampleFilter.__init__) self.assertEqual(5, len(kwargs)) self.assertEqual("uid", kwargs["user"]) self.assertEqual("pid", kwargs["project"]) self.assertEqual("rid", kwargs["resource"]) self.assertEqual("source_name", kwargs["source"]) self.assertEqual("meter_name", kwargs["meter"])
def get_all(self, q=None, limit=None): """Return samples for the meter. :param q: Filter rules for the data to be returned. :param limit: Maximum number of samples to return. """ rbac.enforce("get_samples", pecan.request) q = q or [] limit = v2_utils.enforce_limit(limit) kwargs = v2_utils.query_to_kwargs(q, storage.SampleFilter.__init__) kwargs["meter"] = self.meter_name f = storage.SampleFilter(**kwargs) return [OldSample.from_db_model(e) for e in pecan.request.storage_conn.get_samples(f, limit=limit)]
def get_all(self, q=None, limit=None): """Return all known samples, based on the data recorded so far. :param q: Filter rules for the samples to be returned. :param limit: Maximum number of samples to be returned. """ rbac.enforce('get_samples', pecan.request) q = q or [] limit = utils.enforce_limit(limit) kwargs = utils.query_to_kwargs(q, storage.SampleFilter.__init__) f = storage.SampleFilter(**kwargs) return map(Sample.from_db_model, pecan.request.storage_conn.get_samples(f, limit=limit))
def get_all(self, q=None, limit=None): """Return all known samples, based on the data recorded so far. :param q: Filter rules for the samples to be returned. :param limit: Maximum number of samples to be returned. """ rbac.enforce("get_samples", pecan.request) q = q or [] if limit and limit < 0: raise base.ClientSideError(_("Limit must be positive")) kwargs = utils.query_to_kwargs(q, storage.SampleFilter.__init__) f = storage.SampleFilter(**kwargs) return map(Sample.from_db_model, pecan.request.storage_conn.get_samples(f, limit=limit))
def get_all(self, q=None, limit=None): """Return samples for the meter. :param q: Filter rules for the data to be returned. :param limit: Maximum number of samples to return. """ rbac.enforce("get_samples", pecan.request) q = q or [] if limit and limit < 0: raise base.ClientSideError(_("Limit must be positive")) kwargs = v2_utils.query_to_kwargs(q, storage.SampleFilter.__init__) kwargs["meter"] = self.meter_name f = storage.SampleFilter(**kwargs) return [OldSample.from_db_model(e) for e in pecan.request.storage_conn.get_samples(f, limit=limit)]
def history(self, q=None): """Assembles the alarm history requested. :param q: Filter rules for the changes to be described. """ rbac.enforce("alarm_history", pecan.request) q = q or [] # allow history to be returned for deleted alarms, but scope changes # returned to those carried out on behalf of the auth'd tenant, to # avoid inappropriate cross-tenant visibility of alarm history auth_project = rbac.get_limited_to_project(pecan.request.headers) conn = pecan.request.alarm_storage_conn kwargs = v2_utils.query_to_kwargs(q, conn.get_alarm_changes, ["on_behalf_of", "alarm_id"]) return [AlarmChange.from_db_model(ac) for ac in conn.get_alarm_changes(self._id, auth_project, **kwargs)]
def get_all(self, q=None, meter_links=1): """Retrieve definitions of all of the resources. :param q: Filter rules for the resources to be returned. :param meter_links: option to include related meter links """ rbac.enforce("get_resources", pecan.request) q = q or [] kwargs = utils.query_to_kwargs(q, pecan.request.storage_conn.get_resources) resources = [ Resource.from_db_and_links(r, self._resource_links(r.resource_id, meter_links)) for r in pecan.request.storage_conn.get_resources(**kwargs) ] return resources
def get_all(self, q=None, limit=None): """Return samples for the meter. :param q: Filter rules for the data to be returned. :param limit: Maximum number of samples to return. """ rbac.enforce('get_samples', pecan.request) q = q or [] limit = v2_utils.enforce_limit(limit) kwargs = v2_utils.query_to_kwargs(q, storage.SampleFilter.__init__) kwargs['meter'] = self.meter_name f = storage.SampleFilter(**kwargs) return [OldSample.from_db_model(e) for e in pecan.request.storage_conn.get_samples(f, limit=limit) ]
def get_all(self, q=None, limit=None): """Return all known samples, based on the data recorded so far. :param q: Filter rules for the samples to be returned. :param limit: Maximum number of samples to be returned. """ rbac.enforce('get_samples', pecan.request) q = q or [] if limit and limit < 0: raise base.ClientSideError(_("Limit must be positive")) kwargs = utils.query_to_kwargs(q, storage.SampleFilter.__init__) f = storage.SampleFilter(**kwargs) return map(Sample.from_db_model, pecan.request.storage_conn.get_samples(f, limit=limit))
def get_all(self, q=None): """Return all known meters, based on the data recorded so far. :param q: Filter rules for the meters to be returned. """ rbac.enforce('get_meters', pecan.request) q = q or [] # Timestamp field is not supported for Meter queries kwargs = v2_utils.query_to_kwargs( q, pecan.request.storage_conn.get_meters, allow_timestamps=False) return [ Meter.from_db_model(m) for m in pecan.request.storage_conn.get_meters(**kwargs) ]
def get_all(self, q=[], limit=None): """Return all known meters, based on the data recorded so far. :param q: Filter rules for the meters to be returned. :param limit: limit of the number of meter types to return. """ rbac.enforce('get_meters', pecan.request) q = q or [] # Timestamp field is not supported for Meter queries lim = v2_utils.enforce_limit(limit) kwargs = v2_utils.query_to_kwargs( q, pecan.request.storage_conn.get_meter_types, ['limit'], allow_timestamps=False) return [MeterType.from_db_model(m) for m in pecan.request.storage_conn.get_meter_types(limit=lim, **kwargs)]
def get_all(self, q=None, meter_links=1): """Retrieve definitions of all of the resources. :param q: Filter rules for the resources to be returned. :param meter_links: option to include related meter links """ rbac.enforce('get_resources', pecan.request) q = q or [] kwargs = utils.query_to_kwargs( q, pecan.request.storage_conn.get_resources) resources = [ Resource.from_db_and_links(r, self._resource_links(r.resource_id, meter_links)) for r in pecan.request.storage_conn.get_resources(**kwargs)] return resources
def get_all(self, q=None, limit=None, meter_links=1): """Retrieve definitions of all of the resources. :param q: Filter rules for the resources to be returned. :param meter_links: option to include related meter links """ rbac.enforce("get_resources", pecan.request) q = q or [] limit = utils.enforce_limit(limit) kwargs = utils.query_to_kwargs(q, pecan.request.storage_conn.get_resources) if "limit" in kwargs: raise base.ClientSideError(_("Limit is not a valid field for queries, " "use 'limit' parameter.")) resources = [ Resource.from_db_and_links(r, self._resource_links(r.resource_id, meter_links)) for r in pecan.request.storage_conn.get_resources(limit=limit, **kwargs) ] return resources
def get_all(self, q=None, limit=None): """Return samples for the meter. :param q: Filter rules for the data to be returned. :param limit: Maximum number of samples to return. """ rbac.enforce('get_samples', pecan.request) q = q or [] if limit and limit < 0: raise base.ClientSideError(_("Limit must be positive")) kwargs = v2_utils.query_to_kwargs(q, storage.SampleFilter.__init__) kwargs['meter'] = self.meter_name f = storage.SampleFilter(**kwargs) return [ OldSample.from_db_model(e) for e in pecan.request.storage_conn.get_samples(f, limit=limit) ]
def history(self, q=None): """Assembles the alarm history requested. :param q: Filter rules for the changes to be described. """ rbac.enforce('alarm_history', pecan.request) q = q or [] # allow history to be returned for deleted alarms, but scope changes # returned to those carried out on behalf of the auth'd tenant, to # avoid inappropriate cross-tenant visibility of alarm history auth_project = rbac.get_limited_to_project(pecan.request.headers) conn = pecan.request.alarm_storage_conn kwargs = v2_utils.query_to_kwargs( q, conn.get_alarm_changes, ['on_behalf_of', 'alarm_id']) return [AlarmChange.from_db_model(ac) for ac in conn.get_alarm_changes(self._id, auth_project, **kwargs)]
def get_all(self, q=None, limit=None, unique=''): """Return all known meters, based on the data recorded so far. :param q: Filter rules for the meters to be returned. :param unique: flag to indicate unique meters to be returned. """ rbac.enforce('get_meters', pecan.request) q = q or [] # Timestamp field is not supported for Meter queries limit = v2_utils.enforce_limit(limit) kwargs = v2_utils.query_to_kwargs( q, pecan.request.storage_conn.get_meters, ['limit'], allow_timestamps=False) return [Meter.from_db_model(m) for m in pecan.request.storage_conn.get_meters( limit=limit, unique=strutils.bool_from_string(unique), **kwargs)]
def get_all(self, q=None, limit=None, unique=""): """Return all known meters, based on the data recorded so far. :param q: Filter rules for the meters to be returned. :param unique: flag to indicate unique meters to be returned. """ rbac.enforce("get_meters", pecan.request) q = q or [] # Timestamp field is not supported for Meter queries limit = v2_utils.enforce_limit(limit) kwargs = v2_utils.query_to_kwargs(q, pecan.request.storage_conn.get_meters, ["limit"], allow_timestamps=False) return [ Meter.from_db_model(m) for m in pecan.request.storage_conn.get_meters( limit=limit, unique=strutils.bool_from_string(unique), **kwargs ) ]
def get_all(self, q=None, limit=None): """Return all known meters, based on the data recorded so far. :param q: Filter rules for the meters to be returned. """ rbac.enforce('get_meters', pecan.request) q = q or [] # Timestamp field is not supported for Meter queries limit = v2_utils.enforce_limit(limit) kwargs = v2_utils.query_to_kwargs( q, pecan.request.storage_conn.get_meters, allow_timestamps=False) if 'limit' in kwargs: raise base.ClientSideError(_( "Limit is not a valid field for queries, " "use 'limit' parameter.")) return [Meter.from_db_model(m) for m in pecan.request.storage_conn.get_meters(limit=limit, **kwargs)]
def get_all(self, q=None, limit=None, meter_links=0): """Retrieve definitions of all of the resources. :param q: Filter rules for the resources to be returned. :param limit: Maximum number of resources to return. :param meter_links: option to include related meter links. """ rbac.enforce('get_resources', pecan.request) q = q or [] r_ids = [] if len(q) == 1: # Improve query time for meter-based stats reports from # Horizon. Get resource info for specified resource ids in one # call as opposed to one by one. # q is a list of Query objects. Convert the first and # only item to dictionary to retrieve the list of resource ids. d = q[0].as_dict() if d['field'] == 'resource_ids': r_ids = ast.literal_eval(d['value']) if r_ids: resources = [ Resource.from_db_and_links( r, self._resource_links(r.resource_id, meter_links)) for r in pecan.request.storage_conn.get_resources_batch(r_ids) ] return resources else: limit = utils.enforce_limit(limit) kwargs = utils.query_to_kwargs( q, pecan.request.storage_conn.get_resources, ['limit']) resources = [ Resource.from_db_and_links( r, self._resource_links(r.resource_id, meter_links)) for r in pecan.request.storage_conn.get_resources(limit=limit, **kwargs) ] return resources
def test_sample_filter_multi(self): q = [v2_base.Query(field='user_id', op='eq', value='uid'), v2_base.Query(field='project_id', op='eq', value='pid'), v2_base.Query(field='resource_id', op='eq', value='rid'), v2_base.Query(field='source', op='eq', value='source_name'), v2_base.Query(field='meter', op='eq', value='meter_name')] kwargs = utils.query_to_kwargs(q, storage.SampleFilter.__init__) self.assertEqual(5, len(kwargs)) self.assertEqual('uid', kwargs['user']) self.assertEqual('pid', kwargs['project']) self.assertEqual('rid', kwargs['resource']) self.assertEqual('source_name', kwargs['source']) self.assertEqual('meter_name', kwargs['meter'])
def vm(self, vm_uuid=None, startTime=None, endTime=None, timeType=None): """ 获取虚拟机内存使用率,cpu利用率,磁盘读取速率,写速率,网络读速率,写速率 memory 内存总量 cpu_util cpu利用率 memory.usage 内存使用量 disk.device.read.bytes.rate 磁盘读取速率 disk.device.write.bytes.rate 磁盘写速率 network.incoming.bytes.rate 网络读速率 network.outgoing.bytes.rate 网络写速率 :param vm_uuid: :param startTime: :param endTime: :param timeType: {hour,day,month,year} :return: """ if vm_uuid: # 验证参数 startTime, endTime, timeType = verify_params( startTime, endTime, timeType) period = get_period_by_timeType(timeType) q = get_q_from_condition(vm_uuid, startTime, endTime) kwargs = v2_utils.query_to_kwargs(q, storage.SampleFilter.__init__) dataType = [ "cpu_util", "memory", "memory.usage", "disk.read.bytes.rate", "disk.write.bytes.rate", "network.incoming.bytes.rate", "network.outgoing.bytes.rate" ] result = Capability(startTime, endTime, timeType) # dict_to_result = {"memory": result.memory_total, "cpu_util": result.cpu_util, # "disk.read.bytes.rate": result.disk_read_rate, # "disk.write.bytes.rate": result.disk_write_rate, # "network.incoming.bytes.rate": result.network_read_rate, # "network.outgoing.bytes.rate": result.network_write_rate, # "memory.usage": result.memory_usage} for data in dataType: kwargs['meter'] = data f = storage.SampleFilter(**kwargs) try: computed = pecan.request.storage_conn.get_meter_statistics( f, period) except Exception: continue if (len(computed) != 0): result = set_value_to_result(result, data, timeType, startTime, computed) result.memory_util = [ (result.memory_usage[i] / result.memory_total[i]) * 100 if result.memory_total[i] != 0 else 0 for i in range(len(result.memory_total)) ] result.disk_read_rate = [ item / 1024 for item in result.disk_read_rate ] result.disk_write_rate = [ item / 1024 for item in result.disk_write_rate ] result.network_read_rate = [ item / 1024 for item in result.network_read_rate ] result.network_write_rate = [ item / 1024 for item in result.network_write_rate ] else: raise wsme.exc.MissingArgument(argname="missingArg", msg='must pass vm_uuid') return result
def test_sample_filter_single(self): q = [v2_base.Query(field="user_id", op="eq", value="uid")] kwargs = utils.query_to_kwargs(q, storage.SampleFilter.__init__) self.assertIn("user", kwargs) self.assertEqual(1, len(kwargs)) self.assertEqual("uid", kwargs["user"])
def test_sample_filter_self_always_excluded(self): queries = [v2_base.Query(field="user_id", op="eq", value="20")] with mock.patch("pecan.request") as request: request.headers.return_value = {"X-ProjectId": "foobar"} kwargs = utils.query_to_kwargs(queries, storage.SampleFilter.__init__) self.assertFalse("self" in kwargs)