def get_samples(self, event_filter): """Return an iterable of api_models.Samples """ query = self.session.query(Meter) query = make_query_from_filter(query, event_filter, require_meter=False) samples = query.all() for s in samples: # Remove the id generated by the database when # the event was inserted. It is an implementation # detail that should not leak outside of the driver. yield api_models.Sample( # Replace 'sources' with 'source' to meet the caller's # expectation, Meter.sources contains one and only one # source in the current implementation. source=s.sources[0].id, counter_name=s.counter_name, counter_type=s.counter_type, counter_unit=s.counter_unit, counter_volume=s.counter_volume, user_id=s.user_id, project_id=s.project_id, resource_id=s.resource_id, timestamp=s.timestamp, resource_metadata=s.resource_metadata, message_id=s.message_id, message_signature=s.message_signature, )
def _retrieve_samples(self, query, orderby, limit): if limit is not None: samples = self.db.meter.find(query, limit=limit, sort=orderby) else: samples = self.db.meter.find(query, sort=orderby) for s in samples: # Remove the ObjectId generated by the database when # the sample was inserted. It is an implementation # detail that should not leak outside of the driver. del s['_id'] # Backward compatibility for samples without units s['counter_unit'] = s.get('counter_unit', '') # Compatibility with MongoDB 3.+ s['counter_volume'] = float(s.get('counter_volume')) # Tolerate absence of recorded_at in older datapoints s['recorded_at'] = s.get('recorded_at') # Check samples for metadata and "unquote" key if initially it # was started with '$'. if s.get('resource_metadata'): s['resource_metadata'] = pymongo_utils.unquote_keys( s.get('resource_metadata')) yield models.Sample(**s)
def get_samples(self, sample_filter, limit=None): """Return an iterable of model.Sample instances. :param sample_filter: Filter. :param limit: Maximum number of results to return. """ if limit == 0: return q = make_query_from_filter(sample_filter, require_meter=False) if limit: samples = self.db.meter.find(q, limit=limit, sort=[("timestamp", pymongo.DESCENDING)]) else: samples = self.db.meter.find(q, sort=[("timestamp", pymongo.DESCENDING)]) for s in samples: # Remove the ObjectId generated by the database when # the sample was inserted. It is an implementation # detail that should not leak outside of the driver. del s['_id'] # Backward compatibility for samples without units s['counter_unit'] = s.get('counter_unit', '') yield models.Sample(**s)
def _retrieve_samples(self, query): samples = query.all() for s in samples: # Remove the id generated by the database when # the sample was inserted. It is an implementation # detail that should not leak outside of the driver. yield api_models.Sample( # Replace 'sources' with 'source' to meet the caller's # expectation, Meter.sources contains one and only one # source in the current implementation. source=s.sources[0].id, counter_name=s.counter_name, counter_type=s.counter_type, counter_unit=s.counter_unit, counter_volume=s.counter_volume, user_id=s.user_id, project_id=s.project_id, resource_id=s.resource_id, timestamp=s.timestamp, recorded_at=s.recorded_at, resource_metadata=s.resource_metadata, message_id=s.message_id, message_signature=s.message_signature, )
def get_samples(self, sample_filter, limit=None): """Return an iterable of models.Sample instances. :param sample_filter: Filter. :param limit: Maximum number of results to return. """ if limit == 0: return with self.conn_pool.connection() as conn: meter_table = conn.table(self.METER_TABLE) q, start, stop, columns = ( hbase_utils.make_sample_query_from_filter(sample_filter, require_meter=False)) LOG.debug("Query Meter Table: %s", q) gen = meter_table.scan(filter=q, row_start=start, row_stop=stop, limit=limit, columns=columns) for ignored, meter in gen: d_meter = hbase_utils.deserialize_entry(meter)[0] d_meter['message']['counter_volume'] = (float( d_meter['message']['counter_volume'])) d_meter['message']['recorded_at'] = d_meter['recorded_at'] yield models.Sample(**d_meter['message'])
def get_samples(self, event_filter): """Return an iterable of models.Sample instances """ q, start, stop = make_query_from_filter(event_filter, require_meter=False) LOG.debug("q: %s" % q) gen = self.meter.scan(filter=q, row_start=start, row_stop=stop) for ignored, meter in gen: meter = json.loads(meter['f:message']) meter['timestamp'] = timeutils.parse_strtime(meter['timestamp']) yield models.Sample(**meter)
def get_samples(self, sample_filter): """Return an iterable of samples as created by :func:`ceilometer.meter.meter_message_from_counter`. """ q = make_query_from_filter(sample_filter, require_meter=False) samples = self.db.meter.find(q) for s in samples: # Remove the ObjectId generated by the database when # the sample was inserted. It is an implementation # detail that should not leak outside of the driver. del s['_id'] yield models.Sample(**s)
def test_query_samples_timestamp_gt_lt(self): SAMPLES = [[ storage_models.Sample( counter_name="instance", counter_type="gauge", counter_unit="instance", counter_volume=1, project_id="123", user_id="456", resource_id="789", resource_metadata={}, source="openstack", recorded_at=timeutils.utcnow(), timestamp=timeutils.utcnow(), message_id="0", message_signature='', ) ]] * 2 samples = SAMPLES[:] def _get_samples(*args, **kwargs): return samples.pop() with mock.patch("ceilometer.monasca_client.Client"): conn = impl_monasca.Connection("127.0.0.1:8080") with mock.patch.object(conn, 'get_samples') as gsm: gsm.side_effect = _get_samples start = datetime.datetime(2014, 10, 24, 13, 52, 42) end = datetime.datetime(2014, 10, 24, 14, 52, 42) ts_query = { 'or': [{ '>': { "timestamp": start } }, { '<': { "timestamp": end } }] } query = { 'and': [{ '=': { 'counter_name': 'instance' } }, ts_query] } samples = conn.query_samples(query, None, 100) self.assertEqual(2, len(samples)) self.assertEqual(2, gsm.call_count)
def _retrieve_samples(self, query, orderby, limit): if limit is not None: samples = self.db.meter.find(query, limit=limit, sort=orderby) else: samples = self.db.meter.find(query, sort=orderby) for s in samples: # Remove the ObjectId generated by the database when # the sample was inserted. It is an implementation # detail that should not leak outside of the driver. del s['_id'] # Backward compatibility for samples without units s['counter_unit'] = s.get('counter_unit', '') yield models.Sample(**s)
def get_samples(self, sample_filter, limit=None): """Return an iterable of model.Sample instances. :param sample_filter: Filter. :param limit: Maximum number of results to return. """ if limit == 0: return q = make_query_from_filter(sample_filter, require_meter=False) samples = self.db.meter.find(q).limit(limit or 0) for s in samples: # Remove the ObjectId generated by the database when # the sample was inserted. It is an implementation # detail that should not leak outside of the driver. del s['_id'] yield models.Sample(**s)
def point_to_sample(point): """Transform the point to a Sample object.""" return models.Sample( point.get("source"), point["meter"], point.get("type"), point.get("unit", ""), float(point["value"]), point.get("user_id"), point.get("project_id"), point.get("resource_id"), utils.sanitize_timestamp(point["timestamp"]), transform_metadata(point), point.get("message_id"), point.get("message_signature"), recorded_at=(utils.sanitize_timestamp(point['recorded_at']) if "recorded_at" in point else None))
def get_samples(sample_filter, limit=None): """Return an iterable of api_models.Samples. :param sample_filter: Filter. :param limit: Maximum number of results to return. """ if limit == 0: return session = sqlalchemy_session.get_session() query = session.query(models.Meter) query = make_query_from_filter(session, query, sample_filter, require_meter=False) if limit: query = query.limit(limit) samples = query.from_self()\ .order_by(desc(models.Meter.timestamp)).all() for s in samples: # Remove the id generated by the database when # the sample was inserted. It is an implementation # detail that should not leak outside of the driver. yield api_models.Sample( # Replace 'sources' with 'source' to meet the caller's # expectation, Meter.sources contains one and only one # source in the current implementation. source=s.sources[0].id, counter_name=s.counter_name, counter_type=s.counter_type, counter_unit=s.counter_unit, counter_volume=s.counter_volume, user_id=s.user_id, project_id=s.project_id, resource_id=s.resource_id, timestamp=s.timestamp, resource_metadata=s.resource_metadata, message_id=s.message_id, message_signature=s.message_signature, )
def _retrieve_samples(query): samples = query.all() for s in samples: # Remove the id generated by the database when # the sample was inserted. It is an implementation # detail that should not leak outside of the driver. yield api_models.Sample( source=s.source_id, counter_name=s.counter_name, counter_type=s.counter_type, counter_unit=s.counter_unit, counter_volume=s.counter_volume, user_id=s.user_id, project_id=s.project_id, resource_id=s.resource_id, timestamp=s.timestamp, recorded_at=s.recorded_at, resource_metadata=s.resource_metadata, message_id=s.message_id, message_signature=s.message_signature, )
def get_samples(self, sample_filter, limit=None): """Return an iterable of models.Sample instances. :param sample_filter: Filter. :param limit: Maximum number of results to return. """ with self.conn_pool.connection() as conn: meter_table = conn.table(self.METER_TABLE) q, start, stop = make_sample_query_from_filter(sample_filter, require_meter=False) LOG.debug(_("Query Meter Table: %s") % q) gen = meter_table.scan(filter=q, row_start=start, row_stop=stop) for ignored, meter in gen: if limit is not None: if limit == 0: break else: limit -= 1 d_meter = deserialize_entry(meter)[0] d_meter['message']['recorded_at'] = d_meter['recorded_at'] yield models.Sample(**d_meter['message'])
def test_query_samples(self, mdf_mock): SAMPLES = [[ storage_models.Sample( counter_name="instance", counter_type="gauge", counter_unit="instance", counter_volume=1, project_id="123", user_id="456", resource_id="789", resource_metadata={}, source="openstack", recorded_at=timeutils.utcnow(), timestamp=timeutils.utcnow(), message_id="0", message_signature='',) ]] * 2 samples = SAMPLES[:] def _get_samples(*args, **kwargs): return samples.pop() with mock.patch("ceilometer.monasca_client.Client"): conn = impl_monasca.Connection("127.0.0.1:8080") with mock.patch.object(conn, 'get_samples') as gsm: gsm.side_effect = _get_samples query = {'or': [{'=': {"project_id": "123"}}, {'=': {"user_id": "456"}}]} samples = conn.query_samples(query, None, 100) self.assertEqual(2, len(samples)) self.assertEqual(2, gsm.call_count) samples = SAMPLES[:] query = {'and': [{'=': {"project_id": "123"}}, {'>': {"counter_volume": 2}}]} samples = conn.query_samples(query, None, 100) self.assertEqual(0, len(samples)) self.assertEqual(3, gsm.call_count)
def make_sample(data): """Transform HBase fields to Sample model.""" data = json.loads(data['f:message']) data['timestamp'] = timeutils.parse_strtime(data['timestamp']) return models.Sample(**data)
def get_samples(self, sample_filter, limit=None): """Return an iterable of dictionaries containing sample information. { 'source': source of the resource, 'counter_name': name of the resource, 'counter_type': type of the sample (gauge, delta, cumulative), 'counter_unit': unit of the sample, 'counter_volume': volume of the sample, 'user_id': UUID of user owning the resource, 'project_id': UUID of project owning the resource, 'resource_id': UUID of the resource, 'timestamp': timestamp of the sample, 'resource_metadata': metadata of the sample, 'message_id': message ID of the sample, 'message_signature': message signature of the sample, 'recorded_at': time the sample was recorded } :param sample_filter: constraints for the sample search. :param limit: Maximum number of results to return. """ if not sample_filter or not sample_filter.meter: raise ceilometer.NotImplementedError( "Supply meter name at the least") if (sample_filter.start_timestamp_op and sample_filter.start_timestamp_op != 'ge'): raise ceilometer.NotImplementedError( ('Start time op %s ' 'not implemented') % sample_filter.start_timestamp_op) if (sample_filter.end_timestamp_op and sample_filter.end_timestamp_op != 'le'): raise ceilometer.NotImplementedError( ('End time op %s ' 'not implemented') % sample_filter.end_timestamp_op) q = {} if sample_filter.metaquery: q = self._convert_metaquery(sample_filter.metaquery) if sample_filter.message_id: raise ceilometer.NotImplementedError('message_id not ' 'implemented ' 'in get_samples') if not sample_filter.start_timestamp: sample_filter.start_timestamp = \ timeutils.isotime(datetime.datetime(1970, 1, 1)) else: sample_filter.start_timestamp = \ timeutils.isotime(sample_filter.start_timestamp) if sample_filter.end_timestamp: sample_filter.end_timestamp = \ timeutils.isotime(sample_filter.end_timestamp) _dimensions = dict(user_id=sample_filter.user, project_id=sample_filter.project, resource_id=sample_filter.resource, source=sample_filter.source) _dimensions = {k: v for k, v in _dimensions.items() if v is not None} _search_args = dict( name=sample_filter.meter, start_time=sample_filter.start_timestamp, start_timestamp_op=(sample_filter.start_timestamp_op), end_time=sample_filter.end_timestamp, end_timestamp_op=sample_filter.end_timestamp_op, limit=limit, merge_metrics=True, dimensions=_dimensions) _search_args = {k: v for k, v in _search_args.items() if v is not None} for sample in self.mc.measurements_list(**_search_args): LOG.debug(_('Retrieved sample: %s'), sample) d = sample['dimensions'] for measurement in sample['measurements']: meas_dict = self._convert_to_dict(measurement, sample['columns']) vm = meas_dict['value_meta'] if not self._match_metaquery_to_value_meta(q, vm): continue yield api_models.Sample( source=d.get('source'), counter_name=sample['name'], counter_type=d.get('type'), counter_unit=d.get('unit'), counter_volume=meas_dict['value'], user_id=d.get('user_id'), project_id=d.get('project_id'), resource_id=d.get('resource_id'), timestamp=timeutils.parse_isotime(meas_dict['timestamp']), resource_metadata=meas_dict['value_meta'], message_id=sample['id'], message_signature='', recorded_at=(timeutils.parse_isotime( meas_dict['timestamp'])))
def get_samples(self, sample_filter, limit=None): """Return an iterable of dictionaries containing sample information. { 'source': source of the resource, 'counter_name': name of the resource,if groupby: raise ceilometer.NotImplementedError('Groupby not implemented') 'counter_type': type of the sample (gauge, delta, cumulative), 'counter_unit': unit of the sample, 'counter_volume': volume of the sample, 'user_id': UUID of user owning the resource, 'project_id': UUID of project owning the resource, 'resource_id': UUID of the resource, 'timestamp': timestamp of the sample, 'resource_metadata': metadata of the sample, 'message_id': message ID of the sample, 'message_signature': message signature of the sample, 'recorded_at': time the sample was recorded } :param sample_filter: constraints for the sample search. :param limit: Maximum number of results to return. """ if limit == 0: return if not sample_filter or not sample_filter.meter: raise ceilometer.NotImplementedError( "Supply meter name at the least") if (sample_filter.start_timestamp_op and sample_filter.start_timestamp_op != 'ge'): raise ceilometer.NotImplementedError(('Start time op %s ' 'not implemented') % sample_filter. start_timestamp_op) if (sample_filter.end_timestamp_op and sample_filter.end_timestamp_op != 'le'): raise ceilometer.NotImplementedError(('End time op %s ' 'not implemented') % sample_filter. end_timestamp_op) q = {} if sample_filter.metaquery: q = self._convert_metaquery(sample_filter.metaquery) if sample_filter.message_id: raise ceilometer.NotImplementedError('message_id not ' 'implemented ' 'in get_samples') if not sample_filter.start_timestamp: sample_filter.start_timestamp = datetime.datetime(1970, 1, 1) if not sample_filter.end_timestamp: sample_filter.end_timestamp = datetime.datetime.utcnow() _dimensions = dict( user_id=sample_filter.user, project_id=sample_filter.project, resource_id=sample_filter.resource, source=sample_filter.source, # Dynamic sample filter attributes, these fields are useful for # filtering result. unit=getattr(sample_filter, 'unit', None), type=getattr(sample_filter, 'type', None), ) _dimensions = {k: v for k, v in _dimensions.items() if v is not None} _metric_args = dict(name=sample_filter.meter, dimensions=_dimensions) start_ts = timeutils.isotime(sample_filter.start_timestamp) end_ts = timeutils.isotime(sample_filter.end_timestamp) _search_args = dict( start_time=start_ts, start_timestamp_op=sample_filter.start_timestamp_op, end_time=end_ts, end_timestamp_op=sample_filter.end_timestamp_op, merge_metrics=False ) result_count = 0 for metric in self.mc.metrics_list( **_metric_args): _search_args['name'] = metric['name'] _search_args['dimensions'] = metric['dimensions'] _search_args = {k: v for k, v in _search_args.items() if v is not None} for sample in self.mc.measurements_list(**_search_args): d = sample['dimensions'] for meas in sample['measurements']: m = self._convert_to_dict( meas, sample['columns']) vm = m['value_meta'] if not self._match_metaquery_to_value_meta(q, vm): continue result_count += 1 yield api_models.Sample( source=d.get('source'), counter_name=sample['name'], counter_type=d.get('type'), counter_unit=d.get('unit'), counter_volume=m['value'], user_id=d.get('user_id'), project_id=d.get('project_id'), resource_id=d.get('resource_id'), timestamp=timeutils.parse_isotime(m['timestamp']), resource_metadata=m['value_meta'], message_id=sample['id'], message_signature='', recorded_at=(timeutils.parse_isotime(m['timestamp']))) if result_count == limit: return