def test_uniq(self): class DriverA(object): source = 'class_A' func = 'func_A' param = 'param_A' class DriverB(object): source = 'class_A' func = 'func_A' param = 'param_B' class DriverC(object): source = 'class_A' func = 'func_C' param = 'param_C' driver_list = [DriverA(), DriverB(), DriverC()] uniq_driver_a = utils.uniq(driver_list, ['source']) self.assertEqual(len(uniq_driver_a), 1) uniq_driver_b = utils.uniq(driver_list, ['source', 'func']) self.assertEqual(len(uniq_driver_b), 2) uniq_driver_c = utils.uniq(driver_list, ['source', 'func', 'param']) self.assertEqual(len(uniq_driver_c), 3)
def get_all(self, q=None, meter=None, groupby=None, period=None, aggregate=None): """Retrieve all statistics for all meters :param q: Filter rules for the statistics to be returned. """ rbac.enforce('compute_statistics', pecan.request) q = q or [] meter = meter or [] groupby = groupby or [] aggregate = aggregate or [] if period and period < 0: raise base.ClientSideError(_("Period must be positive.")) g = meters._validate_groupby_fields(groupby) # TO DO: break out the meter names and invoke multiple calls kwargs = v2_utils.query_to_kwargs(q, storage.SampleFilter.__init__) aggregate = utils.uniq(aggregate, ['func', 'param']) # Find the original timestamp in the query to use for clamping # the duration returned in the statistics. start = end = None for i in q: if i.field == 'timestamp' and i.op in ('lt', 'le'): end = timeutils.parse_isotime(i.value).replace(tzinfo=None) elif i.field == 'timestamp' and i.op in ('gt', 'ge'): start = timeutils.parse_isotime(i.value).replace(tzinfo=None) ret = [] kwargs['meter'] = meter f = storage.SampleFilter(**kwargs) try: computed = pecan.request.storage_conn.get_meter_statistics( f, period, g, aggregate) dbStats = [ ScopedStatistics(start_timestamp=start, end_timestamp=end, **c.as_dict()) for c in computed ] ret += dbStats except OverflowError: LOG.exception("Problem processing meters %s" % meter) return ret
def statistics(self, q=None, groupby=None, period=None, aggregate=None): """Computes the statistics of the samples in the time range given. :param q: Filter rules for the data to be returned. :param groupby: Fields for group by aggregation :param period: Returned result will be an array of statistics for a period long of that number of seconds. :param aggregate: The selectable aggregation functions to be applied. """ rbac.enforce('compute_statistics', pecan.request) q = q or [] groupby = groupby or [] aggregate = aggregate or [] if period and period < 0: raise base.ClientSideError(_("Period must be positive.")) kwargs = v2_utils.query_to_kwargs(q, storage.SampleFilter.__init__) kwargs['meter'] = self.meter_name f = storage.SampleFilter(**kwargs) g = _validate_groupby_fields(groupby) aggregate = utils.uniq(aggregate, ['func', 'param']) # Find the original timestamp in the query to use for clamping # the duration returned in the statistics. start = end = None for i in q: if i.field == 'timestamp' and i.op in ('lt', 'le'): end = timeutils.parse_isotime(i.value).replace( tzinfo=None) elif i.field == 'timestamp' and i.op in ('gt', 'ge'): start = timeutils.parse_isotime(i.value).replace( tzinfo=None) try: computed = pecan.request.storage_conn.get_meter_statistics( f, period, g, aggregate) LOG.debug(_('computed value coming from %r'), pecan.request.storage_conn) return [Statistics(start_timestamp=start, end_timestamp=end, **c.as_dict()) for c in computed] except OverflowError as e: params = dict(period=period, err=e) raise base.ClientSideError( _("Invalid period %(period)s: %(err)s") % params)
def statistics(self, q=None, groupby=None, period=None, aggregate=None): """Computes the statistics of the samples in the time range given. :param q: Filter rules for the data to be returned. :param groupby: Fields for group by aggregation :param period: Returned result will be an array of statistics for a period long of that number of seconds. :param aggregate: The selectable aggregation functions to be applied. """ rbac.enforce('compute_statistics', pecan.request) q = q or [] groupby = groupby or [] aggregate = aggregate or [] if period and period < 0: raise base.ClientSideError(_("Period must be positive.")) kwargs = v2_utils.query_to_kwargs(q, storage.SampleFilter.__init__) kwargs['meter'] = self.meter_name f = storage.SampleFilter(**kwargs) g = _validate_groupby_fields(groupby) aggregate = utils.uniq(aggregate, ['func', 'param']) # Find the original timestamp in the query to use for clamping # the duration returned in the statistics. start = end = None for i in q: if i.field == 'timestamp' and i.op in ('lt', 'le'): end = timeutils.parse_isotime(i.value).replace(tzinfo=None) elif i.field == 'timestamp' and i.op in ('gt', 'ge'): start = timeutils.parse_isotime(i.value).replace(tzinfo=None) try: computed = pecan.request.storage_conn.get_meter_statistics( f, period, g, aggregate) LOG.debug(_('computed value coming from %r'), pecan.request.storage_conn) return [ Statistics(start_timestamp=start, end_timestamp=end, **c.as_dict()) for c in computed ] except OverflowError as e: params = dict(period=period, err=e) raise base.ClientSideError( _("Invalid period %(period)s: %(err)s") % params)