def test_convert(self): data_set = metrics_pb2.MetricsDataSet() data_set.metric_name = 'foo' data = data_set.data.add() data.bool_value = True data = data_set.data.add() data.bool_value = False data = data_set.data.add() data.int64_value = 200 data = data_set.data.add() data.double_value = 123.456 popo = pb_to_popo.convert(data_set) expected = { 'metric_name': 'foo', 'data': [ { 'bool_value': True }, { 'bool_value': False }, { 'int64_value': 200L }, { 'double_value': 123.456 }, ], } self.assertDictEqual(expected, popo)
def test_populate_data_set(self): interface.state.metric_name_prefix = '/infra/test/' scenarios = [ (metrics.CounterMetric, 'desc', metrics_pb2.CUMULATIVE), (metrics.GaugeMetric, 'desc', metrics_pb2.GAUGE)] for m_ctor, desc, stream_kind in scenarios: m = m_ctor(m_ctor.__name__, desc, None, units=metrics.MetricsDataUnits.SECONDS) data_set = metrics_pb2.MetricsDataSet() m.populate_data_set(data_set) self.assertEqual(stream_kind, data_set.stream_kind) self.assertEqual('/infra/test/%s' % m_ctor.__name__, data_set.metric_name) self.assertEqual(desc, data_set.description) self.assertEqual('s', data_set.annotations.unit)
def _generate_proto(): """Generate MetricsPayload for global_monitor.send().""" proto = metrics_pb2.MetricsPayload() # Key: Target, value: MetricsCollection. collections = {} # Key: (Target, metric name) tuple, value: MetricsDataSet. data_sets = {} count = 0 for (target, metric, start_time, end_time, fields_values) in state.store.get_all(): for fields, value in fields_values.iteritems(): if count >= METRICS_DATA_LENGTH_LIMIT: yield proto proto = metrics_pb2.MetricsPayload() collections.clear() data_sets.clear() count = 0 if target not in collections: collections[target] = proto.metrics_collection.add() target.populate_target_pb(collections[target]) collection = collections[target] key = (target, metric.name) new_data_set = None if key not in data_sets: new_data_set = metrics_pb2.MetricsDataSet() metric.populate_data_set(new_data_set) data = metrics_pb2.MetricsData() metric.populate_data(data, start_time, end_time, fields, value) # All required data protos have been successfully populated. Now we can # insert them in serialized proto and bookeeping data structures. if new_data_set is not None: collection.metrics_data_set.add().CopyFrom(new_data_set) data_sets[key] = collection.metrics_data_set[-1] data_sets[key].data.add().CopyFrom(data) count += 1 if count > 0: yield proto
def test_populate_field_descriptors(self): data_set_pb = metrics_pb2.MetricsDataSet() m = metrics.Metric('test', 'test', [ metrics.IntegerField('a'), metrics.BooleanField('b'), metrics.StringField('c'), ]) m._populate_field_descriptors(data_set_pb) field_type = metrics_pb2.MetricsDataSet.MetricFieldDescriptor self.assertEqual(3, len(data_set_pb.field_descriptor)) self.assertEqual('a', data_set_pb.field_descriptor[0].name) self.assertEqual(field_type.INT64, data_set_pb.field_descriptor[0].field_type) self.assertEqual('b', data_set_pb.field_descriptor[1].name) self.assertEqual(field_type.BOOL, data_set_pb.field_descriptor[1].field_type) self.assertEqual('c', data_set_pb.field_descriptor[2].name) self.assertEqual(field_type.STRING, data_set_pb.field_descriptor[2].field_type)
def _generate_proto(): """Generate MetricsPayload for global_monitor.send().""" proto = metrics_pb2.MetricsPayload() # Key: Target, value: MetricsCollection. collections = {} # pylint: disable=redefined-outer-name # Key: (Target, metric name) tuple, value: MetricsDataSet. data_sets = {} count = 0 for (target, metric, start_times, end_time, fields_values) in state.store.get_all(): for fields, value in six.iteritems(fields_values): # In default, the start time of all data points for a single stream # should be set with the first time of a value change in the stream, # until metric.reset() invoked. # # e.g., # At 00:00. # {value: 1, # fields: ('metric:result': 'success', 'metric:command': 'get_name'), # start_timestamp=0, end_timestamp=0} # # At 00:01. # {value: 1, # fields: ('metric:result': 'success', 'metric:command': 'get_name'), # start_timestamp=0, end_timestamp=1} # # At 00:02. # {value: 2, # fields: ('metric:result': 'success', 'metric:command': 'get_name'), # start_timestamp=0, end_timestamp=2} # # This is important for cumulative metrics, because the monitoring # backend detects the restart of a monitoring target and inserts a reset # point to make Delta()/Rate() computation results accurate. # If a given metric has own start_time, which can be set via # metric.dangerously_set_start_time(), then report all the data points # with the metric-level start_time. # # Otherwise, report data points with the first value change time. start_time = metric.start_time or start_times.get(fields, end_time) if count >= METRICS_DATA_LENGTH_LIMIT: yield proto proto = metrics_pb2.MetricsPayload() collections.clear() data_sets.clear() count = 0 if target not in collections: collections[target] = proto.metrics_collection.add() if isinstance(target, tuple): _populate_root_labels(collections[target].root_labels, target) else: target.populate_target_pb(collections[target]) collection = collections[target] key = (target, metric.name) new_data_set = None if key not in data_sets: new_data_set = metrics_pb2.MetricsDataSet() metric.populate_data_set(new_data_set) data = metrics_pb2.MetricsData() metric.populate_data(data, start_time, end_time, fields, value) # All required data protos have been successfully populated. Now we can # insert them in serialized proto and bookeeping data structures. if new_data_set is not None: collection.metrics_data_set.add().CopyFrom(new_data_set) data_sets[key] = collection.metrics_data_set[-1] data_sets[key].data.add().CopyFrom(data) count += 1 if count > 0: yield proto