def test_translate_sum_double(self): expected = ExportMetricsServiceRequest( resource_metrics=[ pb2.ResourceMetrics( resource=OTLPResource( attributes=[ KeyValue(key="a", value=AnyValue(int_value=1)), KeyValue( key="b", value=AnyValue(bool_value=False) ), ] ), instrumentation_library_metrics=[ pb2.InstrumentationLibraryMetrics( instrumentation_library=InstrumentationLibrary( name="first_name", version="first_version" ), metrics=[ pb2.Metric( name="sum_double", unit="s", description="foo", sum=pb2.Sum( data_points=[ pb2.NumberDataPoint( attributes=[ KeyValue( key="a", value=AnyValue( int_value=1 ), ), KeyValue( key="b", value=AnyValue( bool_value=True ), ), ], start_time_unix_nano=1641946015139533244, time_unix_nano=1641946016139533244, as_double=2.98, ) ], aggregation_temporality=AggregationTemporality.CUMULATIVE, is_monotonic=True, ), ) ], ) ], ) ] ) # pylint: disable=protected-access actual = self.exporter._translate_data([self.metrics["sum_double"]]) self.assertEqual(expected, actual)
def test_translate_gauge_double(self): expected = ExportMetricsServiceRequest( resource_metrics=[ pb2.ResourceMetrics( resource=OTLPResource( attributes=[ KeyValue(key="a", value=AnyValue(int_value=1)), KeyValue( key="b", value=AnyValue(bool_value=False) ), ] ), instrumentation_library_metrics=[ pb2.InstrumentationLibraryMetrics( instrumentation_library=InstrumentationLibrary( name="first_name", version="first_version" ), metrics=[ pb2.Metric( name="gauge_double", unit="s", description="foo", gauge=pb2.Gauge( data_points=[ pb2.NumberDataPoint( attributes=[ KeyValue( key="a", value=AnyValue( int_value=1 ), ), KeyValue( key="b", value=AnyValue( bool_value=True ), ), ], time_unix_nano=1641946016139533244, as_double=52.028, ) ], ), ) ], ) ], ) ] ) # pylint: disable=protected-access actual = self.exporter._translate_data([self.metrics["gauge_double"]]) self.assertEqual(expected, actual)
def _translate_data(self, data: Sequence[Metric]) -> ExportMetricsServiceRequest: sdk_resource_instrumentation_library_metrics = {} for metric in data: resource = metric.resource instrumentation_library_map = ( sdk_resource_instrumentation_library_metrics.get(resource, {})) if not instrumentation_library_map: sdk_resource_instrumentation_library_metrics[ resource] = instrumentation_library_map instrumentation_library_metrics = instrumentation_library_map.get( metric.instrumentation_info) if not instrumentation_library_metrics: if metric.instrumentation_info is not None: instrumentation_library_map[ metric. instrumentation_info] = pb2.InstrumentationLibraryMetrics( instrumentation_library=InstrumentationLibrary( name=metric.instrumentation_info.name, version=metric.instrumentation_info.version, )) else: instrumentation_library_map[ metric. instrumentation_info] = pb2.InstrumentationLibraryMetrics( ) instrumentation_library_metrics = instrumentation_library_map.get( metric.instrumentation_info) pbmetric = pb2.Metric( name=metric.name, description=metric.description, unit=metric.unit, ) if isinstance(metric.point, Gauge): pt = pb2.NumberDataPoint( attributes=self._translate_attributes(metric.attributes), time_unix_nano=metric.point.time_unix_nano, ) if isinstance(metric.point.value, int): pt.as_int = metric.point.value else: pt.as_double = metric.point.value pbmetric.gauge.data_points.append(pt) elif isinstance(metric.point, Histogram): pt = pb2.HistogramDataPoint( attributes=self._translate_attributes(metric.attributes), time_unix_nano=metric.point.time_unix_nano, start_time_unix_nano=metric.point.start_time_unix_nano, count=sum(metric.point.bucket_counts), sum=metric.point.sum, bucket_counts=metric.point.bucket_counts, explicit_bounds=metric.point.explicit_bounds, ) pbmetric.histogram.aggregation_temporality = ( metric.point.aggregation_temporality) pbmetric.histogram.data_points.append(pt) elif isinstance(metric.point, Sum): pt = pb2.NumberDataPoint( attributes=self._translate_attributes(metric.attributes), start_time_unix_nano=metric.point.start_time_unix_nano, time_unix_nano=metric.point.time_unix_nano, ) if isinstance(metric.point.value, int): pt.as_int = metric.point.value else: pt.as_double = metric.point.value # note that because sum is a message type, the fields must be # set individually rather than instantiating a pb2.Sum and setting # it once pbmetric.sum.aggregation_temporality = ( metric.point.aggregation_temporality) pbmetric.sum.is_monotonic = metric.point.is_monotonic pbmetric.sum.data_points.append(pt) else: logger.warn("unsupported datapoint type %s", metric.point) continue instrumentation_library_metrics.metrics.append(pbmetric, ) return ExportMetricsServiceRequest(resource_metrics=get_resource_data( sdk_resource_instrumentation_library_metrics, pb2.ResourceMetrics, "metrics", ))