def test_constructor_defaults(self): name = "testName" description = "testMeasure" measure = measure_module.BaseMeasure(name=name, description=description) self.assertEqual(None, measure.unit)
def test_constructor_explicit(self): name = "testName" description = "testMeasure" unit = "testUnit" measure = measure_module.BaseMeasure(name=name, description=description, unit=unit) self.assertEqual("testName", measure.name) self.assertEqual("testMeasure", measure.description) self.assertEqual("testUnit", measure.unit)
def _record_metrics(self, metrics): # The list of view data is what we are going to use for the # final export to exporter. view_data_changed: List[ViewData] = [] # Walk the protobufs and convert them to ViewData for metric in metrics: descriptor = metric.metric_descriptor timeseries = metric.timeseries if len(timeseries) == 0: continue columns = [label_key.key for label_key in descriptor.label_keys] start_time = timeseries[0].start_timestamp.seconds # Create the view and view_data measure = measure_module.BaseMeasure( descriptor.name, descriptor.description, descriptor.unit) view = self.view_manager.measure_to_view_map.get_view( descriptor.name, None) if not view: view = View( descriptor.name, descriptor.description, columns, measure, aggregation=None) self.view_manager.measure_to_view_map.register_view( view, start_time) view_data = (self.view_manager.measure_to_view_map. _measure_to_view_data_list_map[measure.name][-1]) view_data_changed.append(view_data) # Create the aggregation and fill it in the our stats for series in timeseries: tag_vals = tuple(val.value for val in series.label_values) for point in series.points: if point.HasField("int64_value"): data = CountAggregationData(point.int64_value) elif point.HasField("double_value"): data = LastValueAggregationData( ValueDouble, point.double_value) elif point.HasField("distribution_value"): dist_value = point.distribution_value counts_per_bucket = [ bucket.count for bucket in dist_value.buckets ] bucket_bounds = ( dist_value.bucket_options.explicit.bounds) data = DistributionAggregationData( dist_value.sum / dist_value.count, dist_value.count, dist_value.sum_of_squared_deviation, counts_per_bucket, bucket_bounds) else: raise ValueError("Summary is not supported") view_data.tag_value_aggregation_data_map[tag_vals] = data # Finally, export all the values self.view_manager.measure_to_view_map.export(view_data_changed)