def get_interval(sample_filter, period, groupby, aggregate):
     self.assertIsNotNone(sample_filter.start_timestamp)
     self.assertIsNotNone(sample_filter.end_timestamp)
     if (sample_filter.start_timestamp > end
             or sample_filter.end_timestamp < start):
         return []
     duration_start = max(sample_filter.start_timestamp, start)
     duration_end = min(sample_filter.end_timestamp, end)
     duration = timeutils.delta_seconds(duration_start, duration_end)
     return [
         models.Statistics(
             unit='',
             min=0,
             max=0,
             avg=0,
             sum=0,
             count=0,
             period=None,
             period_start=None,
             period_end=None,
             duration=duration,
             duration_start=duration_start,
             duration_end=duration_end,
             groupby=None,
         )
     ]
示例#2
0
    def get_meter_statistics(self, sample_filter, period=None):
        """Return an iterable of models.Statistics instance containing meter
        statistics described by the query parameters.

        The filter must have a meter value set.

        """
        q = make_query_from_filter(sample_filter)

        if period:
            map_stats = self.MAP_STATS_PERIOD % \
                (period,
                 int(sample_filter.start.strftime('%s'))
                 if sample_filter.start else 0)
        else:
            map_stats = self.MAP_STATS

        results = self.db.meter.map_reduce(
            map_stats,
            self.REDUCE_STATS,
            {'inline': 1},
            finalize=self.FINALIZE_STATS,
            query=q,
        )

        return sorted(
            (models.Statistics(**(r['value'])) for r in results['results']),
            key=operator.attrgetter('period_start'))
示例#3
0
 def test_without_end_timestamp(self):
     statistics = [
         models.Statistics(
             unit=None,
             count=0,
             min=None,
             max=None,
             avg=None,
             duration=None,
             duration_start=self.late1,
             duration_end=self.late2,
             sum=0,
             period=None,
             period_start=None,
             period_end=None,
             groupby=None,
         )
     ]
     with mock.patch.object(type(self.conn),
                            'get_meter_statistics',
                            return_value=statistics):
         data = self.get_json('/meters/instance:m1.tiny/statistics',
                              q=[{
                                  'field': 'timestamp',
                                  'op': 'ge',
                                  'value': self.late1.isoformat()
                              }, {
                                  'field': 'resource_id',
                                  'value': 'resource-id'
                              }, {
                                  'field': 'search_offset',
                                  'value': 10
                              }])
     self._assert_times_match(data[0]['duration_start'], self.late1)
     self._assert_times_match(data[0]['duration_end'], self.late2)
示例#4
0
 def get_interval(event_filter, period, groupby, aggregate):
     assert event_filter.start
     assert event_filter.end
     if (event_filter.start > end or event_filter.end < start):
         return []
     duration_start = max(event_filter.start, start)
     duration_end = min(event_filter.end, end)
     duration = timeutils.delta_seconds(duration_start, duration_end)
     return [
         models.Statistics(
             unit='',
             min=0,
             max=0,
             avg=0,
             sum=0,
             count=0,
             period=None,
             period_start=None,
             period_end=None,
             duration=duration,
             duration_start=duration_start,
             duration_end=duration_end,
             groupby=None,
         )
     ]
示例#5
0
    def _stats_result_to_model(self, result, groupby, aggregate, period,
                               first_timestamp):
        if period is None:
            period = 0
        first_timestamp = pymongo_utils.from_unix_timestamp(first_timestamp)
        stats_args = self._stats_result_aggregates(result, aggregate)

        stats_args['unit'] = result['unit']
        stats_args['duration'] = (result["last_timestamp"] -
                                  result["first_timestamp"]).total_seconds()
        stats_args['duration_start'] = result['first_timestamp']
        stats_args['duration_end'] = result['last_timestamp']
        stats_args['period'] = period
        start = result.get("period_start", 0) * period

        stats_args['period_start'] = (first_timestamp +
                                      datetime.timedelta(seconds=start))
        stats_args['period_end'] = (first_timestamp +
                                    datetime.timedelta(seconds=start + period)
                                    if period else result['last_timestamp'])

        stats_args['groupby'] = (dict(
            (g, result['_id'].get(g.replace(".", "/")))
            for g in groupby) if groupby else None)
        return models.Statistics(**stats_args)
示例#6
0
def point_to_stat(point, tags, period, aggregate, unit='%'):
    """Convert the InfluxDB point to the Statistics object"""

    kwargs = {}
    if not point['last'] or not point['first']:
        return
    if not aggregate:
        for func in DEFAULT_AGGREGATES:
            kwargs[BACK_FUNC_TRANSITIONS.get(func, func)] = point.get(func)
    else:
        kwargs['aggregate'] = {}
        for description in aggregate:
            func = AGGREGATE_FUNC_TRANSITIONS.get(description.func,
                                                  description.func)
            kwargs['aggregate'][description.func] = point.get(func)
            if func in DEFAULT_AGGREGATES:
                kwargs[description.func] = point.get(func)

    tags = tags or {}
    tags = dict((key.replace("metadata", "resource_metadata"), value)
                for key, value in tags.items())
    kwargs["groupby"] = tags
    kwargs["duration_start"] = utils.sanitize_timestamp(point["first"])
    kwargs["duration_end"] = utils.sanitize_timestamp(point["last"])
    kwargs["duration"] = (kwargs["duration_end"] -
                          kwargs["duration_start"]).total_seconds()
    kwargs["period"] = period or 0
    kwargs["period_start"] = utils.sanitize_timestamp(point["time"])
    kwargs["period_end"] = (utils.sanitize_timestamp(point["time"]) +
                            datetime.timedelta(seconds=period)
                            if period else kwargs['duration_end'])
    kwargs["unit"] = unit
    return models.Statistics(**kwargs)
示例#7
0
    def get_meter_statistics(self, sample_filter, period=None):
        """Return an iterable of models.Statistics instance containing meter
        statistics described by the query parameters.

        The filter must have a meter value set.

        """
        q = make_query_from_filter(sample_filter)

        if period:
            if sample_filter.start:
                period_start = sample_filter.start
            else:
                period_start = self.db.meter.find(limit=1,
                                                  sort=[('timestamp',
                                                         pymongo.ASCENDING)
                                                        ])[0]['timestamp']
            period_start = int(calendar.timegm(period_start.utctimetuple()))
            map_stats = self.MAP_STATS_PERIOD % (period, period_start)
        else:
            map_stats = self.MAP_STATS

        results = self.db.meter.map_reduce(
            map_stats,
            self.REDUCE_STATS,
            {'inline': 1},
            finalize=self.FINALIZE_STATS,
            query=q,
        )

        return sorted(
            (models.Statistics(**(r['value'])) for r in results['results']),
            key=operator.attrgetter('period_start'))
示例#8
0
 def get_meter_statistics(event_filter):
     return models.Statistics(
         unit='',
         min=0, max=0, avg=0, sum=0, count=0,
         period=None,
         period_start=None,
         period_end=None,
         duration=end - start,
         duration_start=start,
         duration_end=end)
示例#9
0
    def get_meter_statistics(self, sample_filter, period=None, groupby=None):
        """Return an iterable of models.Statistics instance containing meter
        statistics described by the query parameters.

        The filter must have a meter value set.

        """
        #FIXME(sileht): since testscenarios is used
        # all API functionnal and DB tests have been enabled
        # get_meter_statistics will not return the expected data in some tests
        # Some other tests return "IndexError: list index out of range"
        # on the line: rslt = results['result'][0]
        # complete trace: http://paste.openstack.org/show/45016/
        # And because I have no db2 installation to test,
        # I have disable this method until it is fixed
        raise NotImplementedError("Statistics not implemented")

        if groupby:
            raise NotImplementedError("Group by not implemented.")

        q = make_query_from_filter(sample_filter)

        if period:
            raise NotImplementedError('Statistics for period not implemented.')

        results = self.db.meter.aggregate([
            {
                '$match': q
            },
            {
                '$group': self.GROUP
            },
            {
                '$project': self.PROJECT
            },
        ])

        # Since there is no period grouping, there should be only one set in
        # the results
        rslt = results['result'][0]

        duration = rslt['duration_end'] - rslt['duration_start']
        if hasattr(duration, 'total_seconds'):
            rslt['duration'] = duration.total_seconds()
        else:
            rslt['duration'] = duration.days * 3600 + duration.seconds

        rslt['period_start'] = rslt['duration_start']
        rslt['period_end'] = rslt['duration_end']
        # Period is not supported, set it to zero
        rslt['period'] = 0
        rslt['groupby'] = None

        return [models.Statistics(**(rslt))]
示例#10
0
 def _stats_result_to_model(result, groupby, aggregate):
     stats_args = Connection._stats_result_aggregates(result, aggregate)
     stats_args['unit'] = result['unit']
     stats_args['duration'] = result['duration']
     stats_args['duration_start'] = result['duration_start']
     stats_args['duration_end'] = result['duration_end']
     stats_args['period'] = result['period']
     stats_args['period_start'] = result['period_start']
     stats_args['period_end'] = result['period_end']
     stats_args['groupby'] = (dict(
         (g, result['groupby'][g]) for g in groupby) if groupby else None)
     return models.Statistics(**stats_args)
示例#11
0
 def _patch_get_stats(self, start, end):
     statitics = models.Statistics(unit='',
                                   min=0, max=0, avg=0, sum=0, count=0,
                                   period=None,
                                   period_start=None,
                                   period_end=None,
                                   duration=end - start,
                                   duration_start=start,
                                   duration_end=end,
                                   groupby=None)
     return mock.patch.object(self.conn, 'get_meter_statistics',
                              return_value=statitics)
示例#12
0
    def get_meter_statistics(self, sample_filter, period=None, groupby=None):
        """Return an iterable of models.Statistics instance containing meter
        statistics described by the query parameters.

        The filter must have a meter value set.

        """
        if (groupby and set(groupby) -
                set(['user_id', 'project_id', 'resource_id', 'source'])):
            raise NotImplementedError("Unable to group by these fields")

        q = make_query_from_filter(sample_filter)

        if period:
            if sample_filter.start:
                period_start = sample_filter.start
            else:
                period_start = self.db.meter.find(limit=1,
                                                  sort=[('timestamp',
                                                         pymongo.ASCENDING)
                                                        ])[0]['timestamp']
            period_start = int(calendar.timegm(period_start.utctimetuple()))
            params_period = {
                'period': period,
                'period_first': period_start,
                'groupby_fields': json.dumps(groupby)
            }
            if groupby:
                map_stats = self.MAP_STATS_PERIOD_GROUPBY % params_period
            else:
                map_stats = self.MAP_STATS_PERIOD % params_period
        else:
            if groupby:
                params_groupby = {'groupby_fields': json.dumps(groupby)}
                map_stats = self.MAP_STATS_GROUPBY % params_groupby
            else:
                map_stats = self.MAP_STATS

        results = self.db.meter.map_reduce(
            map_stats,
            self.REDUCE_STATS,
            {'inline': 1},
            finalize=self.FINALIZE_STATS,
            query=q,
        )

        # FIXME(terriyu) Fix get_meter_statistics() so we don't use sorted()
        # to return the results
        return sorted(
            (models.Statistics(**(r['value'])) for r in results['results']),
            key=operator.attrgetter('period_start'))
示例#13
0
 def _stats_result_to_model(result, period, period_start, period_end,
                            groupby, aggregate):
     stats_args = Connection._stats_result_aggregates(result, aggregate)
     stats_args['unit'] = result.unit
     duration = (timeutils.delta_seconds(result.tsmin, result.tsmax)
                 if result.tsmin is not None and result.tsmax is not None
                 else None)
     stats_args['duration'] = duration
     stats_args['duration_start'] = result.tsmin
     stats_args['duration_end'] = result.tsmax
     stats_args['period'] = period
     stats_args['period_start'] = period_start
     stats_args['period_end'] = period_end
     stats_args['groupby'] = (dict(
         (g, getattr(result, g)) for g in groupby) if groupby else None)
     return api_models.Statistics(**stats_args)
 def get_interval(ignore_self, event_filter, period):
     return [
         models.Statistics(
             count=0,
             min=None,
             max=None,
             avg=None,
             duration=None,
             duration_start=self.late1,
             duration_end=self.late2,
             sum=0,
             period=None,
             period_start=None,
             period_end=None,
         )
     ]
示例#15
0
 def _stats_result_to_model(result, period, period_start, period_end):
     duration = (timeutils.delta_seconds(result.tsmin, result.tsmax)
                 if result.tsmin is not None and result.tsmax is not None
                 else None)
     return api_models.Statistics(
         count=int(result.count),
         min=result.min,
         max=result.max,
         avg=result.avg,
         sum=result.sum,
         duration_start=result.tsmin,
         duration_end=result.tsmax,
         duration=duration,
         period=period,
         period_start=period_start,
         period_end=period_end,
     )
示例#16
0
 def get_interval(ignore_self, event_filter, period):
     return [
         models.Statistics(
             unit=None,
             count=0,
             min=None,
             max=None,
             avg=None,
             duration=None,
             duration_start=self.early1,
             duration_end=self.early2,
             sum=0,
             period=None,
             period_start=None,
             period_end=None,
         )
     ]
     return (self.early1, self.early2)
示例#17
0
 def _stats_result_to_model(result, period, period_start, period_end,
                            groupby):
     duration = (timeutils.delta_seconds(result.tsmin, result.tsmax)
                 if result.tsmin is not None and result.tsmax is not None
                 else None)
     return api_models.Statistics(
         unit=result.unit,
         count=int(result.count),
         min=result.min,
         max=result.max,
         avg=result.avg,
         sum=result.sum,
         duration_start=result.tsmin,
         duration_end=result.tsmax,
         duration=duration,
         period=period,
         period_start=period_start,
         period_end=period_end,
         groupby=(dict((g, getattr(result, g))
                       for g in groupby) if groupby else None))
示例#18
0
    def get_meter_statistics(self, sample_filter, period=None):
        """Return an iterable of models.Statistics instance containing meter
        statistics described by the query parameters.

        The filter must have a meter value set.

        """
        q = make_query_from_filter(sample_filter)

        if period:
            raise NotImplementedError('Statistics for period not implemented.')

        results = self.db.meter.aggregate([
            {
                '$match': q
            },
            {
                '$group': self.GROUP
            },
            {
                '$project': self.PROJECT
            },
        ])

        # Since there is no period grouping, there should be only one set in
        # the results
        rslt = results['result'][0]

        duration = rslt['duration_end'] - rslt['duration_start']
        if hasattr(duration, 'total_seconds'):
            rslt['duration'] = duration.total_seconds()
        else:
            rslt['duration'] = duration.days * 3600 + duration.seconds

        rslt['period_start'] = rslt['duration_start']
        rslt['period_end'] = rslt['duration_end']
        # Period is not supported, set it to zero
        rslt['period'] = 0
        rslt['groupby'] = None

        return [models.Statistics(**(rslt))]
 def get_interval(ignore_self, event_filter, period):
     assert event_filter.start
     assert event_filter.end
     if (event_filter.start > end or event_filter.end < start):
         return []
     duration_start = max(event_filter.start, start)
     duration_end = min(event_filter.end, end)
     duration = timeutils.delta_seconds(duration_start, duration_end)
     return [
         models.Statistics(
             min=0,
             max=0,
             avg=0,
             sum=0,
             count=0,
             period=None,
             period_start=None,
             period_end=None,
             duration=duration,
             duration_start=duration_start,
             duration_end=duration_end,
         )
     ]
示例#20
0
    def get_meter_statistics(self,
                             sample_filter,
                             period=None,
                             groupby=None,
                             aggregate=None):
        """Return an iterable of models.Statistics instances.

        Items are containing meter statistics described by the query
        parameters. The filter must have a meter value set.

        .. note::

          Due to HBase limitations the aggregations are implemented
          in the driver itself, therefore this method will be quite slow
          because of all the Thrift traffic it is going to create.
        """
        if groupby:
            raise ceilometer.NotImplementedError("Group by not implemented.")

        if aggregate:
            raise ceilometer.NotImplementedError(
                'Selectable aggregates not implemented')

        with self.conn_pool.connection() as conn:
            meter_table = conn.table(self.METER_TABLE)
            q, start, stop, columns = (
                hbase_utils.make_sample_query_from_filter(sample_filter))
            # These fields are used in statistics' calculating
            columns.extend(
                ['f:timestamp', 'f:counter_volume', 'f:counter_unit'])
            meters = map(
                hbase_utils.deserialize_entry,
                list(meter for (ignored, meter) in meter_table.scan(
                    filter=q, row_start=start, row_stop=stop,
                    columns=columns)))

        if sample_filter.start:
            start_time = sample_filter.start
        elif meters:
            start_time = meters[-1][0]['timestamp']
        else:
            start_time = None

        if sample_filter.end:
            end_time = sample_filter.end
        elif meters:
            end_time = meters[0][0]['timestamp']
        else:
            end_time = None

        results = []

        if not period:
            period = 0
            period_start = start_time
            period_end = end_time

        # As our HBase meters are stored as newest-first, we need to iterate
        # in the reverse order
        for meter in meters[::-1]:
            ts = meter[0]['timestamp']
            if period:
                offset = int(
                    timeutils.delta_seconds(start_time, ts) / period) * period
                period_start = start_time + datetime.timedelta(0, offset)

            if not results or not results[-1].period_start == period_start:
                if period:
                    period_end = period_start + datetime.timedelta(0, period)
                results.append(
                    models.Statistics(unit='',
                                      count=0,
                                      min=0,
                                      max=0,
                                      avg=0,
                                      sum=0,
                                      period=period,
                                      period_start=period_start,
                                      period_end=period_end,
                                      duration=None,
                                      duration_start=None,
                                      duration_end=None,
                                      groupby=None))
            self._update_meter_stats(results[-1], meter[0])
        return results
示例#21
0
    def get_meter_statistics(self,
                             sample_filter,
                             period=None,
                             groupby=None,
                             aggregate=None):
        """Return an iterable of models.Statistics instance.

        Items are containing meter statistics described by the query
        parameters. The filter must have a meter value set.
        """
        if (groupby and set(groupby) -
                set(['user_id', 'project_id', 'resource_id', 'source'])):
            raise ceilometer.NotImplementedError(
                "Unable to group by these fields")

        if aggregate:
            raise ceilometer.NotImplementedError(
                'Selectable aggregates not implemented')

        q = pymongo_utils.make_query_from_filter(sample_filter)

        if period:
            if sample_filter.start:
                period_start = sample_filter.start
            else:
                period_start = self.db.meter.find(limit=1,
                                                  sort=[('timestamp',
                                                         pymongo.ASCENDING)
                                                        ])[0]['timestamp']

        if groupby:
            sort_keys = ['counter_name'] + groupby + ['timestamp']
        else:
            sort_keys = ['counter_name', 'timestamp']

        sort_instructions = self._build_sort_instructions(sort_keys=sort_keys,
                                                          sort_dir='asc')
        meters = self.db.meter.find(q, sort=sort_instructions)

        def _group_key(meter):
            # the method to define a key for groupby call
            key = {}
            for y in sort_keys:
                if y == 'timestamp' and period:
                    key[y] = (
                        timeutils.delta_seconds(period_start, meter[y]) //
                        period)
                elif y != 'timestamp':
                    key[y] = meter[y]
            return key

        def _to_offset(periods):
            return {
                'days': (periods * period) // self.SECONDS_IN_A_DAY,
                'seconds': (periods * period) % self.SECONDS_IN_A_DAY
            }

        for key, grouped_meters in itertools.groupby(meters, key=_group_key):
            stat = models.Statistics(unit=None,
                                     min=sys.maxint,
                                     max=-sys.maxint,
                                     avg=0,
                                     sum=0,
                                     count=0,
                                     period=0,
                                     period_start=0,
                                     period_end=0,
                                     duration=0,
                                     duration_start=0,
                                     duration_end=0,
                                     groupby=None)

            for meter in grouped_meters:
                stat.unit = meter.get('counter_unit', '')
                m_volume = meter.get('counter_volume')
                if stat.min > m_volume:
                    stat.min = m_volume
                if stat.max < m_volume:
                    stat.max = m_volume
                stat.sum += m_volume
                stat.count += 1
                if stat.duration_start == 0:
                    stat.duration_start = meter['timestamp']
                stat.duration_end = meter['timestamp']
                if groupby and not stat.groupby:
                    stat.groupby = {}
                    for group_key in groupby:
                        stat.groupby[group_key] = meter[group_key]

            stat.duration = timeutils.delta_seconds(stat.duration_start,
                                                    stat.duration_end)
            stat.avg = stat.sum / stat.count
            if period:
                stat.period = period
                periods = key.get('timestamp')
                stat.period_start = (
                    period_start + datetime.timedelta(**(_to_offset(periods))))
                stat.period_end = (
                    period_start +
                    datetime.timedelta(**(_to_offset(periods + 1))))
            else:
                stat.period_start = stat.duration_start
                stat.period_end = stat.duration_end
            yield stat
示例#22
0
    def get_meter_statistics(self, sample_filter, period=None, groupby=None):
        """Return an iterable of models.Statistics instances containing meter
        statistics described by the query parameters.

        The filter must have a meter value set.

        .. note::

           Due to HBase limitations the aggregations are implemented
           in the driver itself, therefore this method will be quite slow
           because of all the Thrift traffic it is going to create.

        """
        if groupby:
            raise NotImplementedError("Group by not implemented.")

        meter_table = self.conn.table(self.METER_TABLE)

        q, start, stop = make_query_from_filter(sample_filter)

        meters = list(meter for (ignored, meter) in meter_table.scan(
            filter=q, row_start=start, row_stop=stop))

        if sample_filter.start:
            start_time = sample_filter.start
        elif meters:
            start_time = timeutils.parse_strtime(meters[-1]['f:timestamp'])
        else:
            start_time = None

        if sample_filter.end:
            end_time = sample_filter.end
        elif meters:
            end_time = timeutils.parse_strtime(meters[0]['f:timestamp'])
        else:
            end_time = None

        results = []

        if not period:
            period = 0
            period_start = start_time
            period_end = end_time

        # As our HBase meters are stored as newest-first, we need to iterate
        # in the reverse order
        for meter in meters[::-1]:
            ts = timeutils.parse_strtime(meter['f:timestamp'])
            if period:
                offset = int(
                    timeutils.delta_seconds(start_time, ts) / period) * period
                period_start = start_time + datetime.timedelta(0, offset)

            if not len(results) or not results[-1].period_start == \
                    period_start:
                if period:
                    period_end = period_start + datetime.timedelta(0, period)
                results.append(
                    models.Statistics(unit='',
                                      count=0,
                                      min=0,
                                      max=0,
                                      avg=0,
                                      sum=0,
                                      period=period,
                                      period_start=period_start,
                                      period_end=period_end,
                                      duration=None,
                                      duration_start=None,
                                      duration_end=None,
                                      groupby=None))
            self._update_meter_stats(results[-1], meter)
        return results
示例#23
0
    def get_meter_statistics(self,
                             filter,
                             period=None,
                             groupby=None,
                             aggregate=None):
        """Return a dictionary containing meter statistics.

        Meter statistics is described by the query parameters.
        The filter must have a meter value set.

        { 'min':
          'max':
          'avg':
          'sum':
          'count':
          'period':
          'period_start':
          'period_end':
          'duration':
          'duration_start':
          'duration_end':
          }
        """
        if filter:
            if not filter.meter:
                raise ceilometer.NotImplementedError('Query without meter '
                                                     'not implemented')
        else:
            raise ceilometer.NotImplementedError('Query without filter '
                                                 'not implemented')

        if groupby:
            raise ceilometer.NotImplementedError('Groupby not implemented')

        if filter.metaquery:
            raise ceilometer.NotImplementedError('Metaquery not implemented')

        if filter.message_id:
            raise ceilometer.NotImplementedError('Message_id query '
                                                 'not implemented')

        if filter.start_timestamp_op and filter.start_timestamp_op != 'ge':
            raise ceilometer.NotImplementedError(
                ('Start time op %s '
                 'not implemented') % filter.start_timestamp_op)

        if filter.end_timestamp_op and filter.end_timestamp_op != 'le':
            raise ceilometer.NotImplementedError(
                ('End time op %s '
                 'not implemented') % filter.end_timestamp_op)

        if not filter.start_timestamp:
            filter.start_timestamp = timeutils.isotime(
                datetime.datetime(1970, 1, 1))

        # TODO(monasca): Add this a config parameter
        allowed_stats = ['avg', 'min', 'max', 'sum', 'count']
        if aggregate:
            not_allowed_stats = [
                a.func for a in aggregate if a.func not in allowed_stats
            ]
            if not_allowed_stats:
                raise ceilometer.NotImplementedError(
                    ('Aggregate function(s) '
                     '%s not implemented') % not_allowed_stats)

            statistics = [a.func for a in aggregate if a.func in allowed_stats]
        else:
            statistics = allowed_stats

        dims_filter = dict(user_id=filter.user,
                           project_id=filter.project,
                           source=filter.source,
                           resource_id=filter.resource)
        dims_filter = {k: v for k, v in dims_filter.items() if v is not None}

        period = period if period \
            else cfg.CONF.monasca.default_stats_period

        _search_args = dict(name=filter.meter,
                            dimensions=dims_filter,
                            start_time=filter.start_timestamp,
                            end_time=filter.end_timestamp,
                            period=period,
                            statistics=','.join(statistics),
                            merge_metrics=True)

        _search_args = {k: v for k, v in _search_args.items() if v is not None}

        stats_list = self.mc.statistics_list(**_search_args)
        for stats in stats_list:
            for s in stats['statistics']:
                stats_dict = self._convert_to_dict(s, stats['columns'])
                ts_start = timeutils.parse_isotime(stats_dict['timestamp'])
                ts_end = ts_start + datetime.timedelta(0, period)
                del stats_dict['timestamp']
                if 'count' in stats_dict:
                    stats_dict['count'] = int(stats_dict['count'])
                yield api_models.Statistics(
                    unit=stats['dimensions'].get('unit'),
                    period=period,
                    period_start=ts_start,
                    period_end=ts_end,
                    duration=period,
                    duration_start=ts_start,
                    duration_end=ts_end,
                    groupby={u'': u''},
                    **stats_dict)
    def get_meter_statistics(self, filter, period=None, groupby=None,
                             aggregate=None):
        """Return a dictionary containing meter statistics.

        Meter statistics is described by the query parameters.
        The filter must have a meter value set.

        { 'min':
          'max':
          'avg':
          'sum':
          'count':
          'period':
          'period_start':
          'period_end':
          'duration':
          'duration_start':
          'duration_end':
          }
        """
        if filter:
            if not filter.meter:
                raise ceilometer.NotImplementedError('Query without meter '
                                                     'not implemented')
        else:
            raise ceilometer.NotImplementedError('Query without filter '
                                                 'not implemented')

        allowed_groupby = ['user_id', 'project_id', 'resource_id', 'source']

        if groupby:
            if len(groupby) > 1:
                raise ceilometer.NotImplementedError('Only one groupby '
                                                     'supported')

            groupby = groupby[0]
            if groupby not in allowed_groupby:
                raise ceilometer.NotImplementedError('Groupby %s not'
                                                     ' implemented' % groupby)

        if filter.metaquery:
            raise ceilometer.NotImplementedError('Metaquery not implemented')

        if filter.message_id:
            raise ceilometer.NotImplementedError('Message_id query '
                                                 'not implemented')

        if filter.start_timestamp_op and filter.start_timestamp_op != 'ge':
            raise ceilometer.NotImplementedError(('Start time op %s '
                                                  'not implemented') %
                                                 filter.start_timestamp_op)

        if filter.end_timestamp_op and filter.end_timestamp_op != 'le':
            raise ceilometer.NotImplementedError(('End time op %s '
                                                  'not implemented') %
                                                 filter.end_timestamp_op)
        if not filter.start_timestamp:
            filter.start_timestamp = timeutils.isotime(
                datetime.datetime(1970, 1, 1))
        else:
            filter.start_timestamp = timeutils.isotime(filter.start_timestamp)

        if filter.end_timestamp:
            filter.end_timestamp = timeutils.isotime(filter.end_timestamp)

        # TODO(monasca): Add this a config parameter
        allowed_stats = ['avg', 'min', 'max', 'sum', 'count']
        if aggregate:
            not_allowed_stats = [a.func for a in aggregate
                                 if a.func not in allowed_stats]
            if not_allowed_stats:
                raise ceilometer.NotImplementedError(('Aggregate function(s) '
                                                      '%s not implemented') %
                                                     not_allowed_stats)

            statistics = [a.func for a in aggregate
                          if a.func in allowed_stats]
        else:
            statistics = allowed_stats

        dims_filter = dict(user_id=filter.user,
                           project_id=filter.project,
                           source=filter.source,
                           resource_id=filter.resource
                           )
        dims_filter = {k: v for k, v in dims_filter.items() if v is not None}

        period = period if period \
            else cfg.CONF.monasca.default_stats_period

        if groupby:
            _metric_args = dict(name=filter.meter,
                                dimensions=dims_filter)
            group_stats_list = []

            for metric in self.mc.metrics_list(**_metric_args):
                _search_args = dict(
                    name=metric['name'],
                    dimensions=metric['dimensions'],
                    start_time=filter.start_timestamp,
                    end_time=filter.end_timestamp,
                    period=period,
                    statistics=','.join(statistics),
                    merge_metrics=False)

                _search_args = {k: v for k, v in _search_args.items()
                                if v is not None}
                stats_list = self.mc.statistics_list(**_search_args)
                group_stats_list.extend(stats_list)

            group_stats_dict = {}

            for stats in group_stats_list:
                groupby_val = stats['dimensions'].get(groupby)
                stats_list = group_stats_dict.get(groupby_val)
                if stats_list:
                    stats_list.append(stats)
                else:
                    group_stats_dict[groupby_val] = [stats]

            def get_max(items):
                return max(items)

            def get_min(items):
                return min(items)

            def get_avg(items):
                return sum(items)/len(items)

            def get_sum(items):
                return sum(items)

            def get_count(items):
                count = 0
                for item in items:
                    count = count + item
                return count

            for group_key, stats_group in group_stats_dict.iteritems():
                max_list = []
                min_list = []
                avg_list = []
                sum_list = []
                count_list = []
                ts_list = []
                group_statistics = {}
                for stats in stats_group:
                    for s in stats['statistics']:
                        stats_dict = self._convert_to_dict(s, stats['columns'])

                        if 'max' in stats['columns']:
                            max_list.append(stats_dict['max'])
                        if 'min' in stats['columns']:
                            min_list.append(stats_dict['min'])
                        if 'avg' in stats['columns']:
                            avg_list.append(stats_dict['avg'])
                        if 'sum' in stats['columns']:
                            sum_list.append(stats_dict['sum'])
                        if 'count' in stats['columns']:
                            count_list.append(stats_dict['count'])

                        ts_list.append(stats_dict['timestamp'])

                        group_statistics['unit'] = (stats['dimensions'].
                                                    get('unit'))

                if len(max_list):
                    group_statistics['max'] = get_max(max_list)
                if len(min_list):
                    group_statistics['min'] = get_min(min_list)
                if len(avg_list):
                    group_statistics['avg'] = get_avg(avg_list)
                if len(sum_list):
                    group_statistics['sum'] = get_sum(sum_list)
                if len(count_list):
                    group_statistics['count'] = get_count(count_list)

                group_statistics['end_timestamp'] = get_max(ts_list)
                group_statistics['timestamp'] = get_min(ts_list)

                ts_start = timeutils.parse_isotime(
                    group_statistics['timestamp']).replace(tzinfo=None)

                ts_end = timeutils.parse_isotime(
                    group_statistics['end_timestamp']).replace(tzinfo=None)

                del group_statistics['end_timestamp']

                if 'count' in group_statistics:
                    group_statistics['count'] = int(group_statistics['count'])
                unit = group_statistics['unit']
                del group_statistics['unit']
                if aggregate:
                        group_statistics['aggregate'] = {}
                        for a in aggregate:
                            key = '%s%s' % (a.func, '/%s' % a.param if a.param
                                            else '')
                            group_statistics['aggregate'][key] = (
                                group_statistics.get(key))
                yield api_models.Statistics(
                    unit=unit,
                    period=period,
                    period_start=ts_start,
                    period_end=ts_end,
                    duration=period,
                    duration_start=ts_start,
                    duration_end=ts_end,
                    groupby={groupby: group_key},
                    **group_statistics
                )
        else:
            _search_args = dict(
                name=filter.meter,
                dimensions=dims_filter,
                start_time=filter.start_timestamp,
                end_time=filter.end_timestamp,
                period=period,
                statistics=','.join(statistics),
                merge_metrics=True)

            _search_args = {k: v for k, v in _search_args.items()
                            if v is not None}
            stats_list = self.mc.statistics_list(**_search_args)
            for stats in stats_list:
                for s in stats['statistics']:
                    stats_dict = self._convert_to_dict(s, stats['columns'])
                    ts_start = timeutils.parse_isotime(
                        stats_dict['timestamp']).replace(tzinfo=None)
                    ts_end = (ts_start + datetime.timedelta(
                        0, period)).replace(tzinfo=None)
                    del stats_dict['timestamp']
                    if 'count' in stats_dict:
                        stats_dict['count'] = int(stats_dict['count'])

                    if aggregate:
                        stats_dict['aggregate'] = {}
                        for a in aggregate:
                            key = '%s%s' % (a.func, '/%s' % a.param if a.param
                                            else '')
                            stats_dict['aggregate'][key] = stats_dict.get(key)

                    yield api_models.Statistics(
                        unit=stats['dimensions'].get('unit'),
                        period=period,
                        period_start=ts_start,
                        period_end=ts_end,
                        duration=period,
                        duration_start=ts_start,
                        duration_end=ts_end,
                        groupby={u'': u''},
                        **stats_dict
                    )