示例#1
0
    def test_storage_can_handle_large_values(self):
        f = storage.SampleFilter(meter='dummyBigCounter', )
        results = list(self.conn.get_samples(f))
        self.assertEqual(results[0].counter_volume, 3372036854775807)

        f = storage.SampleFilter(meter='dummySmallCounter', )
        results = list(self.conn.get_samples(f))
        self.assertEqual(results[0].counter_volume, -3372036854775807)
示例#2
0
    def test_get_samples_timestamp_filter_exclusive_range(self):
        with mock.patch("ceilometer.monasca_client.Client") as mock_client:
            conn = impl_monasca.Connection("127.0.0.1:8080")

            metrics_list_mock = mock_client().metrics_list
            metrics_list_mock.return_value = (
                TestGetSamples.dummy_metrics_mocked_return_value)
            ml_mock = mock_client().measurements_list
            ml_mock.return_value = (
                TestGetSamples.dummy_get_samples_mocked_return_value)

            start_time = datetime.datetime(2015, 3, 20)
            end_time = datetime.datetime(2015, 4, 1, 12, 00, 00)

            sample_filter = storage.SampleFilter(
                meter='specific meter',
                start_timestamp=timeutils.isotime(start_time),
                start_timestamp_op='gt',
                end_timestamp=timeutils.isotime(end_time),
                end_timestamp_op='lt')
            list(conn.get_samples(sample_filter))
            self.assertEqual(True, ml_mock.called)
            self.assertEqual(1, ml_mock.call_count)
            self.assertEqual(
                dict(dimensions=dict(datasource='ceilometer'),
                     name='specific meter',
                     start_time='2015-03-20T00:00:00.001000Z',
                     end_time='2015-04-01T11:59:59.999000Z',
                     start_timestamp_op='ge',
                     end_timestamp_op='le',
                     group_by='*'), ml_mock.call_args_list[0][1])
示例#3
0
    def test_stats_list(self):
        with mock.patch("ceilometer.monasca_client.Client") as mock_client:
            conn = impl_monasca.Connection("127.0.0.1:8080")
            sl_mock = mock_client().statistics_list
            sl_mock.return_value = [{
                'statistics': [['2014-10-24T12:12:12Z', 0.008],
                               ['2014-10-24T12:52:12Z', 0.018]],
                'dimensions': {
                    'unit': 'gb'
                },
                'columns': ['timestamp', 'min']
            }]

            sf = storage.SampleFilter()
            sf.meter = "image"
            aggregate = Aggregate()
            aggregate.func = 'min'
            sf.start_timestamp = timeutils.parse_isotime(
                '2014-10-24T12:12:42').replace(tzinfo=None)
            stats = list(
                conn.get_meter_statistics(sf, aggregate=[aggregate],
                                          period=30))

            self.assertEqual(2, len(stats))
            self.assertEqual('gb', stats[0].unit)
            self.assertEqual('gb', stats[1].unit)
            self.assertEqual(0.008, stats[0].min)
            self.assertEqual(0.018, stats[1].min)
            self.assertEqual(30, stats[0].period)
            self.assertEqual('2014-10-24T12:12:42',
                             stats[0].period_end.isoformat())
            self.assertEqual('2014-10-24T12:52:42',
                             stats[1].period_end.isoformat())
            self.assertIsNotNone(stats[0].as_dict().get('aggregate'))
            self.assertEqual({u'min': 0.008}, stats[0].as_dict()['aggregate'])
示例#4
0
def _list_samples(meter, project=None, resource=None, source=None, user=None):
    """Return a list of raw samples.

    Note: the API talks about "events" these are equivelent to samples.
    but we still need to return the samples within the "events" dict
    to maintain API compatibilty.
    """
    q_ts = _get_query_timestamps(flask.request.args)
    f = storage.SampleFilter(
        user=user,
        project=project,
        source=source,
        meter=meter,
        resource=resource,
        start=q_ts['start_timestamp'],
        end=q_ts['end_timestamp'],
        metaquery=_get_metaquery(flask.request.args),
    )
    samples = flask.request.storage_conn.get_samples(f)

    jsonified = flask.jsonify(events=[s.as_dict() for s in samples])
    if request_wants_html():
        return flask.templating.render_template('list_event.html',
                                                user=user,
                                                project=project,
                                                source=source,
                                                meter=meter,
                                                resource=resource,
                                                events=jsonified)
    return jsonified
    def test_stats_list(self, mock_mdf):
        with mock.patch("ceilometer.monasca_client.Client") as mock_client:
            conn = impl_monasca.Connection("127.0.0.1:8080")
            sl_mock = mock_client().statistics_list
            sl_mock.return_value = [{
                'statistics': [['2014-10-24T12:12:12Z', 0.008],
                               ['2014-10-24T12:52:12Z', 0.018]],
                'dimensions': {
                    'unit': 'gb'
                },
                'columns': ['timestamp', 'min']
            }]

            sf = storage.SampleFilter()
            sf.meter = "image"
            stats = list(conn.get_meter_statistics(sf, period=30))

            self.assertEqual(2, len(stats))
            self.assertEqual('gb', stats[0].unit)
            self.assertEqual('gb', stats[1].unit)
            self.assertEqual(0.008, stats[0].min)
            self.assertEqual(0.018, stats[1].min)
            self.assertEqual(30, stats[0].period)
            self.assertEqual('2014-10-24T12:12:42+00:00',
                             stats[0].period_end.isoformat())
            self.assertEqual('2014-10-24T12:52:42+00:00',
                             stats[1].period_end.isoformat())
示例#6
0
    def test_not_implemented_params(self, mock_mdf):
        with mock.patch("ceilometer.monasca_client.Client"):
            conn = impl_monasca.Connection("127.0.0.1:8080")

            self.assertRaisesWithMessage("Query without filter "
                                         "not implemented",
                                         ceilometer.NotImplementedError,
                                         lambda: list(
                                             conn.get_meter_statistics(None)))

            sf = storage.SampleFilter()
            self.assertRaisesWithMessage("Query without meter "
                                         "not implemented",
                                         ceilometer.NotImplementedError,
                                         lambda: list(
                                             conn.get_meter_statistics(sf)))

            sf.meter = "image"
            self.assertRaisesWithMessage("Groupby message_id not implemented",
                                         ceilometer.NotImplementedError,
                                         lambda: list(
                                             conn.get_meter_statistics(
                                                 sf,
                                                 groupby=['message_id'])))

            sf.metaquery = "metaquery"
            self.assertRaisesWithMessage("Metaquery not implemented",
                                         ceilometer.NotImplementedError,
                                         lambda: list(
                                             conn.get_meter_statistics(sf)))

            sf.metaquery = None
            sf.start_timestamp_op = 'le'
            self.assertRaisesWithMessage("Start time op le not implemented",
                                         ceilometer.NotImplementedError,
                                         lambda: list(
                                             conn.get_meter_statistics(sf)))

            sf.start_timestamp_op = None
            sf.end_timestamp_op = 'ge'
            self.assertRaisesWithMessage("End time op ge not implemented",
                                         ceilometer.NotImplementedError,
                                         lambda: list(
                                             conn.get_meter_statistics(sf)))

            sf.end_timestamp_op = None
            sf.message_id = "message_id"
            self.assertRaisesWithMessage("Message_id query not implemented",
                                         ceilometer.NotImplementedError,
                                         lambda: list(
                                             conn.get_meter_statistics(sf)))

            sf.message_id = None
            aggregate = [self.Aggregate(func='stddev', param='test')]
            self.assertRaisesWithMessage("Aggregate function(s) ['stddev']"
                                         " not implemented",
                                         ceilometer.NotImplementedError,
                                         lambda: list(
                                             conn.get_meter_statistics(
                                                 sf, aggregate=aggregate)))
示例#7
0
    def get_meters(self,
                   user=None,
                   project=None,
                   resource=None,
                   source=None,
                   metaquery=None,
                   limit=None):
        """Return an iterable of api_models.Meter instances

        :param user: Optional ID for user that owns the resource.
        :param project: Optional ID for project that owns the resource.
        :param resource: Optional ID of the resource.
        :param source: Optional source filter.
        :param metaquery: Optional dict with metadata to match on.
        :param limit: Maximum number of results to return.
        """
        if limit == 0:
            return
        s_filter = storage.SampleFilter(user=user,
                                        project=project,
                                        source=source,
                                        metaquery=metaquery,
                                        resource=resource)

        # NOTE(gordc): get latest sample of each meter/resource. we do not
        #              filter here as we want to filter only on latest record.
        session = self._engine_facade.get_session()
        subq = session.query(func.max(models.Sample.id).label('id')).join(
            models.Resource,
            models.Resource.internal_id == models.Sample.resource_id).group_by(
                models.Sample.meter_id, models.Resource.resource_id)
        if resource:
            subq = subq.filter(models.Resource.resource_id == resource)
        subq = subq.subquery()

        # get meter details for samples.
        query_sample = (session.query(
            models.Sample.meter_id, models.Meter.name, models.Meter.type,
            models.Meter.unit, models.Resource.resource_id,
            models.Resource.project_id,
            models.Resource.source_id, models.Resource.user_id).join(
                subq, subq.c.id == models.Sample.id).join(
                    models.Meter,
                    models.Meter.id == models.Sample.meter_id).join(
                        models.Resource, models.Resource.internal_id ==
                        models.Sample.resource_id))
        query_sample = make_query_from_filter(session,
                                              query_sample,
                                              s_filter,
                                              require_meter=False)

        query_sample = query_sample.limit(limit) if limit else query_sample
        for row in query_sample.all():
            yield api_models.Meter(name=row.name,
                                   type=row.type,
                                   unit=row.unit,
                                   resource_id=row.resource_id,
                                   project_id=row.project_id,
                                   source=row.source_id,
                                   user_id=row.user_id)
示例#8
0
    def test_get_resources_limit(self, mdf_mock):
        with mock.patch("ceilometer.monasca_client.Client") as mock_client:
            conn = impl_monasca.Connection("127.0.0.1:8080")

            mnl_mock = mock_client().metrics_list
            mnl_mock.return_value = [{'name': 'metric1',
                                      'dimensions': {'resource_id': 'abcd'}},
                                     {'name': 'metric2',
                                      'dimensions': {'resource_id': 'abcd'}}
                                     ]

            dummy_get_resources_mocked_return_value = (
                [{u'dimensions': {u'resource_id': u'abcd'},
                  u'measurements': [[u'2015-04-14T17:52:31Z', 1.0, {}],
                                    [u'2015-04-15T17:52:31Z', 2.0, {}],
                                    [u'2015-04-16T17:52:31Z', 3.0, {}]],
                  u'id': u'2015-04-14T18:42:31Z',
                  u'columns': [u'timestamp', u'value', u'value_meta'],
                  u'name': u'image'}])

            ml_mock = mock_client().measurements_list
            ml_mock.return_value = (
                TestGetSamples.dummy_metrics_mocked_return_value
            )
            ml_mock = mock_client().measurements_list
            ml_mock.return_value = (
                dummy_get_resources_mocked_return_value)

            sample_filter = storage.SampleFilter(
                meter='specific meter', end_timestamp='2015-04-20T00:00:00Z')
            resources = list(conn.get_resources(sample_filter, limit=2))
            self.assertEqual(2, len(resources))
            self.assertEqual(True, ml_mock.called)
            self.assertEqual(2, ml_mock.call_count)
    def test_stats_list_called_with(self, mock_mdf):
        with mock.patch("ceilometer.monasca_client.Client") as mock_client:
            conn = impl_monasca.Connection("127.0.0.1:8080")
            sl_mock = mock_client().statistics_list

            sf = storage.SampleFilter()
            sf.meter = "image"
            sf.project = "project_id"
            sf.user = "******"
            sf.resource = "resource_id"
            sf.source = "source_id"
            aggregate = [self.Aggregate(func="min", param="some")]
            list(conn.get_meter_statistics(sf, period=10, aggregate=aggregate))

            self.assertEqual(True, sl_mock.called)
            self.assertEqual(
                {
                    'merge_metrics': True,
                    'dimensions': {
                        'source': 'source_id',
                        'project_id': 'project_id',
                        'user_id': 'user_id',
                        'resource_id': 'resource_id'
                    },
                    'start_time': '1970-01-01T00:00:00Z',
                    'period': 10,
                    'statistics': 'min',
                    'name': 'image'
                }, sl_mock.call_args[1])
示例#10
0
 def test_by_user_period_start_end(self):
     f = storage.SampleFilter(
         user='******',
         meter='volume.size',
         start='2012-09-25T10:28:00',
         end='2012-09-25T11:28:00',
     )
     results = list(self.conn.get_meter_statistics(f, period=1800))
     self.assertEqual(len(results), 1)
     r = results[0]
     self.assertEqual(r.period_start,
                      datetime.datetime(2012, 9, 25, 10, 28))
     self.assertEqual(r.count, 1)
     self.assertEqual(r.avg, 8)
     self.assertEqual(r.min, 8)
     self.assertEqual(r.max, 8)
     self.assertEqual(r.sum, 8)
     self.assertEqual(r.period, 1800)
     self.assertEqual(r.period_end,
                      r.period_start + datetime.timedelta(seconds=1800))
     self.assertEqual(r.duration, 0)
     self.assertEqual(r.duration_start,
                      datetime.datetime(2012, 9, 25, 10, 30))
     self.assertEqual(r.duration_end,
                      datetime.datetime(2012, 9, 25, 10, 30))
示例#11
0
 def test_no_period_in_query(self):
     f = storage.SampleFilter(
         user='******',
         meter='volume.size',
     )
     results = list(self.conn.get_meter_statistics(f))[0]
     assert results.period == 0
示例#12
0
 def test_get_samples_by_resource(self):
     f = storage.SampleFilter(user='******', resource='resource-id')
     results = list(self.conn.get_samples(f))
     assert results
     meter = results[0]
     assert meter is not None
     assert meter.as_dict() == self.msg1
示例#13
0
def show_resources(db, args):
    if args:
        users = args
    else:
        users = sorted(db.get_users())
    for u in users:
        print u
        for resource in db.get_resources(user=u):
            print '  %(resource_id)s %(timestamp)s' % resource
            for k, v in sorted(resource['metadata'].iteritems()):
                print '      %-10s : %s' % (k, v)
            for meter in resource['meter']:
                totals = db.get_statistics(
                    storage.SampleFilter(
                        user=u,
                        meter=meter['counter_name'],
                        resource=resource['resource_id'],
                    ))
                # FIXME(dhellmann): Need a way to tell whether to use
                # max() or sum() by meter name without hard-coding.
                if meter['counter_name'] in ['cpu', 'disk']:
                    value = totals[0]['max']
                else:
                    value = totals[0]['sum']
                print '    %s (%s): %s' % \
                    (meter['counter_name'], meter['counter_type'],
                     value)
示例#14
0
 def test_get_samples_in_default_order(self):
     f = storage.SampleFilter()
     prev_timestamp = None
     for sample in self.conn.get_samples(f):
         if prev_timestamp is not None:
             self.assertTrue(prev_timestamp >= sample.timestamp)
         prev_timestamp = sample.timestamp
示例#15
0
    def test_get_samples_by_both_times(self):
        start_ts = datetime.datetime(2012, 7, 2, 10, 42)
        end_ts = datetime.datetime(2012, 7, 2, 10, 43)
        f = storage.SampleFilter(
            start=start_ts,
            end=end_ts,
        )

        results = list(self.conn.get_samples(f))
        assert len(results) == 1
        assert results[0].timestamp == start_ts

        f.start_timestamp_op = 'gt'
        f.end_timestamp_op = 'lt'
        results = list(self.conn.get_samples(f))
        assert len(results) == 0

        f.start_timestamp_op = 'ge'
        f.end_timestamp_op = 'lt'
        results = list(self.conn.get_samples(f))
        assert len(results) == 1
        assert results[0].timestamp == start_ts

        f.start_timestamp_op = 'gt'
        f.end_timestamp_op = 'le'
        results = list(self.conn.get_samples(f))
        assert len(results) == 1
        assert results[0].timestamp == end_ts

        f.start_timestamp_op = 'ge'
        f.end_timestamp_op = 'le'
        results = list(self.conn.get_samples(f))
        assert len(results) == 2
        assert results[0].timestamp == end_ts
        assert results[1].timestamp == start_ts
示例#16
0
def compute_duration_by_resource(resource, meter):
    """Return the earliest timestamp, last timestamp,
    and duration for the resource and meter.

    :param resource: The ID of the resource.
    :param meter: The name of the meter.
    :param start_timestamp: ISO-formatted string of the
        earliest timestamp to return.
    :param end_timestamp: ISO-formatted string of the
        latest timestamp to return.
    :param search_offset: Number of minutes before
        and after start and end timestamps to query.
    """
    q_ts = _get_query_timestamps(flask.request.args)
    start_timestamp = q_ts['start_timestamp']
    end_timestamp = q_ts['end_timestamp']

    # Query the database for the interval of timestamps
    # within the desired range.
    f = storage.SampleFilter(
        meter=meter,
        project=acl.get_limited_to_project(flask.request.headers),
        resource=resource,
        start=q_ts['query_start'],
        end=q_ts['query_end'],
    )
    stats = flask.request.storage_conn.get_meter_statistics(f)
    min_ts, max_ts = stats.duration_start, stats.duration_end

    # "Clamp" the timestamps we return to the original time
    # range, excluding the offset.
    LOG.debug('start_timestamp %s, end_timestamp %s, min_ts %s, max_ts %s',
              start_timestamp, end_timestamp, min_ts, max_ts)
    if start_timestamp and min_ts and min_ts < start_timestamp:
        min_ts = start_timestamp
        LOG.debug('clamping min timestamp to range')
    if end_timestamp and max_ts and max_ts > end_timestamp:
        max_ts = end_timestamp
        LOG.debug('clamping max timestamp to range')

    # If we got valid timestamps back, compute a duration in minutes.
    #
    # If the min > max after clamping then we know the
    # timestamps on the samples fell outside of the time
    # range we care about for the query, so treat them as
    # "invalid."
    #
    # If the timestamps are invalid, return None as a
    # sentinal indicating that there is something "funny"
    # about the range.
    if min_ts and max_ts and (min_ts <= max_ts):
        duration = timeutils.delta_seconds(min_ts, max_ts)
    else:
        min_ts = max_ts = duration = None

    return flask.jsonify(
        start_timestamp=min_ts,
        end_timestamp=max_ts,
        duration=duration,
    )
示例#17
0
def make_unit_query(sample_filter):
    """Make query for the collecting unit for the meter."""

    sample_filter = storage.SampleFilter(meter=sample_filter.meter)
    return construct_query("unit",
                           make_simple_filter_query(sample_filter),
                           limit=1)
示例#18
0
    def statistics(self, q=[], period=None):
        """Computes the statistics of the samples in the time range given.

        :param q: Filter rules for the data to be returned.
        :param period: Returned result will be an array of statistics for a
                       period long of that number of seconds.
        """
        if period and period < 0:
            error = _("Period must be positive.")
            pecan.response.translatable_error = error
            raise wsme.exc.ClientSideError(error)

        kwargs = _query_to_kwargs(q, storage.SampleFilter.__init__)
        kwargs['meter'] = self._id
        f = storage.SampleFilter(**kwargs)
        computed = pecan.request.storage_conn.get_meter_statistics(f, period)
        LOG.debug('computed value coming from %r', pecan.request.storage_conn)
        # Find the original timestamp in the query to use for clamping
        # the duration returned in the statistics.
        start = end = None
        for i in q:
            if i.field == 'timestamp' and i.op in ('lt', 'le'):
                end = timeutils.parse_isotime(i.value).replace(tzinfo=None)
            elif i.field == 'timestamp' and i.op in ('gt', 'ge'):
                start = timeutils.parse_isotime(i.value).replace(tzinfo=None)

        return [
            Statistics(start_timestamp=start, end_timestamp=end, **c.as_dict())
            for c in computed
        ]
示例#19
0
    def clear_expired_metering_data(self, ttl):
        """Clear expired data from the backend storage system according to the
        time-to-live.

        :param ttl: Number of seconds to keep records for.

        """
        # Before mongodb 2.2 we need to clear expired data manually
        if not self._is_natively_ttl_supported():
            end = timeutils.utcnow() - datetime.timedelta(seconds=ttl)
            f = storage.SampleFilter(end=end)
            q = make_query_from_filter(f, require_meter=False)
            self.db.meter.remove(q)

        results = self.db.meter.group(
            key={},
            condition={},
            reduce=self.REDUCE_GROUP_CLEAN,
            initial={
                'resources': [],
                'users': [],
                'projects': [],
            }
        )[0]

        self.db.user.remove({'_id': {'$nin': results['users']}})
        self.db.project.remove({'_id': {'$nin': results['projects']}})
        self.db.resource.remove({'_id': {'$nin': results['resources']}})
示例#20
0
    def get_resources(self, user=None, project=None, source=None,
                      start_timestamp=None, start_timestamp_op=None,
                      end_timestamp=None, end_timestamp_op=None,
                      metaquery={}, resource=None, pagination=None):
        """Return an iterable of models.Resource instances

        :param user: Optional ID for user that owns the resource.
        :param project: Optional ID for project that owns the resource.
        :param source: Optional source filter.
        :param start_timestamp: Optional modified timestamp start range.
        :param start_timestamp_op: Optional start time operator, like ge, gt.
        :param end_timestamp: Optional modified timestamp end range.
        :param end_timestamp_op: Optional end time operator, like lt, le.
        :param metaquery: Optional dict with metadata to match on.
        :param resource: Optional resource filter.
        :param pagination: Optional pagination query.
        """
        if pagination:
            raise NotImplementedError('Pagination not implemented')

        sample_filter = storage.SampleFilter(
            user=user, project=project,
            start=start_timestamp, start_timestamp_op=start_timestamp_op,
            end=end_timestamp, end_timestamp_op=end_timestamp_op,
            resource=resource, source=source, metaquery=metaquery)
        q, start_row, stop_row = make_sample_query_from_filter(
            sample_filter, require_meter=False)
        with self.conn_pool.connection() as conn:
            meter_table = conn.table(self.METER_TABLE)
            LOG.debug(_("Query Meter table: %s") % q)
            meters = meter_table.scan(filter=q, row_start=start_row,
                                      row_stop=stop_row)
            d_meters = []
            for i, m in meters:
                d_meters.append(deserialize_entry(m))

            # We have to sort on resource_id before we can group by it.
            # According to the itertools documentation a new group is
            # generated when the value of the key function changes
            # (it breaks there).
            meters = sorted(d_meters, key=_resource_id_from_record_tuple)
            for resource_id, r_meters in itertools.groupby(
                    meters, key=_resource_id_from_record_tuple):
                # We need deserialized entry(data[0]), sources (data[1]) and
                # metadata(data[3])
                meter_rows = [(data[0], data[1], data[3]) for data in sorted(
                    r_meters, key=_timestamp_from_record_tuple)]
                latest_data = meter_rows[-1]
                min_ts = meter_rows[0][0]['timestamp']
                max_ts = latest_data[0]['timestamp']
                yield models.Resource(
                    resource_id=resource_id,
                    first_sample_timestamp=min_ts,
                    last_sample_timestamp=max_ts,
                    project_id=latest_data[0]['project_id'],
                    source=latest_data[1][0],
                    user_id=latest_data[0]['user_id'],
                    metadata=latest_data[2],
                )
    def test_get_samples_results(self, mdf_mock):
        with mock.patch("ceilometer.monasca_client.Client") as mock_client:
            conn = impl_monasca.Connection("127.0.0.1:8080")
            ml_mock = mock_client().measurements_list
            ml_mock.return_value = ([{
                u'dimensions': {
                    'source': 'some source',
                    'project_id': 'some project ID',
                    'resource_id': 'some resource ID',
                    'type': 'some type',
                    'unit': 'some unit'
                },
                u'measurements': [[u'2015-04-01T02:03:04Z', 1.0, {}],
                                  [u'2015-04-11T22:33:44Z', 2.0, {}]],
                u'id':
                u'2015-04-14T18:42:31Z',
                u'columns': [u'timestamp', u'value', u'value_meta'],
                u'name':
                u'image'
            }])
            sample_filter = storage.SampleFilter(
                meter='specific meter', start_timestamp='2015-03-20T00:00:00Z')
            results = list(conn.get_samples(sample_filter))
            self.assertEqual(True, ml_mock.called)

            self.assertEqual(results[0].counter_name,
                             ml_mock.return_value[0].get('name'))
            self.assertEqual(
                results[0].counter_type,
                ml_mock.return_value[0].get('dimensions').get('type'))
            self.assertEqual(
                results[0].counter_unit,
                ml_mock.return_value[0].get('dimensions').get('unit'))
            self.assertEqual(results[0].counter_volume,
                             ml_mock.return_value[0].get('measurements')[0][1])
            self.assertEqual(results[0].message_id,
                             ml_mock.return_value[0].get('id'))
            self.assertEqual(results[0].message_signature, '')
            self.assertEqual(
                results[0].project_id,
                ml_mock.return_value[0].get('dimensions').get('project_id'))
            self.assertEqual(
                results[0].recorded_at,
                dateutil.parser.parse(
                    ml_mock.return_value[0].get('measurements')[0][0]))
            self.assertEqual(
                results[0].resource_id,
                ml_mock.return_value[0].get('dimensions').get('resource_id'))
            self.assertEqual(results[0].resource_metadata, {})
            self.assertEqual(
                results[0].source,
                ml_mock.return_value[0].get('dimensions').get('source'))
            self.assertEqual(
                results[0].timestamp,
                dateutil.parser.parse(
                    ml_mock.return_value[0].get('measurements')[0][0]))
            self.assertEqual(results[0].user_id, None)

            self.assertEqual(1, ml_mock.call_count)
示例#22
0
 def test_get_samples_by_start_time(self):
     f = storage.SampleFilter(
         user='******',
         start=datetime.datetime(2012, 7, 2, 10, 41),
     )
     results = list(self.conn.get_samples(f))
     assert len(results) == 1
     assert results[0].timestamp == datetime.datetime(2012, 7, 2, 10, 41)
示例#23
0
 def test_get_samples_by_project(self):
     f = storage.SampleFilter(project='project-id')
     results = list(self.conn.get_samples(f))
     assert results
     for meter in results:
         assert meter.as_dict() in [
             self.msg0, self.msg1, self.msg2, self.msg3
         ]
示例#24
0
    def test_get_samples_not_implemented_params(self):
        with mock.patch("ceilometer.monasca_client.Client"):
            conn = impl_monasca.Connection("127.0.0.1:8080")

            sample_filter = storage.SampleFilter(meter='specific meter',
                                                 message_id='specific message')
            self.assertRaises(ceilometer.NotImplementedError,
                              lambda: list(conn.get_samples(sample_filter)))
示例#25
0
 def test_get_samples_by_both_times(self):
     f = storage.SampleFilter(
         start=datetime.datetime(2012, 7, 2, 10, 42),
         end=datetime.datetime(2012, 7, 2, 10, 43),
     )
     results = list(self.conn.get_samples(f))
     length = len(results)
     assert length == 1
     assert results[0].timestamp == datetime.datetime(2012, 7, 2, 10, 42)
示例#26
0
    def get_meters(self,
                   user=None,
                   project=None,
                   resource=None,
                   source=None,
                   metaquery=None,
                   pagination=None):
        """Return an iterable of api_models.Meter instances

        :param user: Optional ID for user that owns the resource.
        :param project: Optional ID for project that owns the resource.
        :param resource: Optional ID of the resource.
        :param source: Optional source filter.
        :param metaquery: Optional dict with metadata to match on.
        :param pagination: Optional pagination query.
        """

        if pagination:
            raise NotImplementedError('Pagination not implemented')

        s_filter = storage.SampleFilter(user=user,
                                        project=project,
                                        source=source,
                                        metaquery=metaquery,
                                        resource=resource)

        session = self._engine_facade.get_session()

        # sample_subq is used to reduce sample records
        # by selecting a record for each (resource_id, meter_id).
        # max() is used to choice a sample record, so the latest record
        # is selected for each (resource_id, meter_id).
        sample_subq = (session.query(func.max(
            models.Sample.id).label('id')).group_by(models.Sample.meter_id,
                                                    models.Sample.resource_id))
        sample_subq = sample_subq.subquery()

        # SELECT sample.* FROM sample INNER JOIN
        #  (SELECT max(sample.id) AS id FROM sample
        #   GROUP BY sample.resource_id, sample.meter_id) AS anon_2
        # ON sample.id = anon_2.id
        query_sample = (session.query(models.MeterSample).join(
            sample_subq, models.MeterSample.id == sample_subq.c.id))
        query_sample = make_query_from_filter(session,
                                              query_sample,
                                              s_filter,
                                              require_meter=False)

        for sample in query_sample.all():
            yield api_models.Meter(name=sample.counter_name,
                                   type=sample.counter_type,
                                   unit=sample.counter_unit,
                                   resource_id=sample.resource_id,
                                   project_id=sample.project_id,
                                   source=sample.source_id,
                                   user_id=sample.user_id)
示例#27
0
    def test_stats_list_with_groupby(self):
        with mock.patch("ceilometer.monasca_client.Client") as mock_client:
            conn = impl_monasca.Connection("127.0.0.1:8080")
            sl_mock = mock_client().statistics_list
            sl_mock.return_value = [{
                'statistics': [['2014-10-24T12:12:12Z', 0.008, 1.3, 3, 0.34],
                               ['2014-10-24T12:20:12Z', 0.078, 1.25, 2, 0.21],
                               ['2014-10-24T12:52:12Z', 0.018, 0.9, 4, 0.14]],
                'dimensions': {
                    'project_id': '1234',
                    'unit': 'gb'
                },
                'columns': ['timestamp', 'min', 'max', 'count', 'avg']
            }, {
                'statistics': [['2014-10-24T12:14:12Z', 0.45, 2.5, 2, 2.1],
                               ['2014-10-24T12:20:12Z', 0.58, 3.2, 3, 3.4],
                               ['2014-10-24T13:52:42Z', 1.67, 3.5, 1, 5.3]],
                'dimensions': {
                    'project_id': '5678',
                    'unit': 'gb'
                },
                'columns': ['timestamp', 'min', 'max', 'count', 'avg']
            }]

            sf = storage.SampleFilter()
            sf.meter = "image"
            sf.start_timestamp = timeutils.parse_isotime(
                '2014-10-24T12:12:42').replace(tzinfo=None)
            groupby = ['project_id']
            stats = list(
                conn.get_meter_statistics(sf, period=30, groupby=groupby))

            self.assertEqual(2, len(stats))

            for stat in stats:
                self.assertIsNotNone(stat.groupby)
                project_id = stat.groupby.get('project_id')
                self.assertIn(project_id, ['1234', '5678'])
                if project_id == '1234':
                    self.assertEqual(0.008, stat.min)
                    self.assertEqual(1.3, stat.max)
                    self.assertEqual(0.23, stat.avg)
                    self.assertEqual(9, stat.count)
                    self.assertEqual(30, stat.period)
                    self.assertEqual('2014-10-24T12:12:12',
                                     stat.period_start.isoformat())
                if project_id == '5678':
                    self.assertEqual(0.45, stat.min)
                    self.assertEqual(3.5, stat.max)
                    self.assertEqual(3.6, stat.avg)
                    self.assertEqual(6, stat.count)
                    self.assertEqual(30, stat.period)
                    self.assertEqual('2014-10-24T13:52:42',
                                     stat.period_end.isoformat())
示例#28
0
def show_raw(db, args):
    fmt = '    %(timestamp)s %(counter_name)10s %(counter_volume)s'
    for u in sorted(db.get_users()):
        print(u)
        for resource in db.get_resources(user=u):
            print('  ', resource['resource_id'])
            for sample in db.get_samples(storage.SampleFilter(
                    user=u,
                    resource=resource['resource_id'],
            )):
                print(fmt % sample)
示例#29
0
 def test_by_user_period(self):
     f = storage.SampleFilter(
         user='******',
         meter='volume.size',
         start='2012-09-25T10:28:00',
     )
     try:
         self.conn.get_meter_statistics(f, period=7200)
         got_not_imp = False
     except NotImplementedError:
         got_not_imp = True
     self.assertTrue(got_not_imp)
示例#30
0
 def test_get_samples_by_metaquery(self):
     q = {'metadata.display_name': 'test-server'}
     f = storage.SampleFilter(metaquery=q)
     got_not_imp = False
     try:
         results = list(self.conn.get_samples(f))
         assert results
         for meter in results:
             assert meter.as_dict() in self.msgs
     except NotImplementedError:
         got_not_imp = True
         self.assertTrue(got_not_imp)