def test_get_collector_metric_type(self): result = metrics_exporter.get_collector_metric_type( Counter("testName", "testDescription", "unit", int, None)) self.assertIs(result, metrics_pb2.MetricDescriptor.CUMULATIVE_INT64) result = metrics_exporter.get_collector_metric_type( Counter("testName", "testDescription", "unit", float, None)) self.assertIs(result, metrics_pb2.MetricDescriptor.CUMULATIVE_DOUBLE) result = metrics_exporter.get_collector_metric_type( Measure("testName", "testDescription", "unit", None, None)) self.assertIs(result, metrics_pb2.MetricDescriptor.UNSPECIFIED)
def test_create_timeseries(self): def create_label(name, value): label = Label() label.name = name label.value = value return label sum_aggregator = SumAggregator() sum_aggregator.update(5) sum_aggregator.take_checkpoint() export_record = ExportRecord( Counter("testname", "testdesc", "testunit", int, None), get_dict_as_key({"record_name": "record_value"}), sum_aggregator, Resource({"resource_name": "resource_value"}), ) expected_timeseries = TimeSeries() expected_timeseries.labels.append(create_label("__name__", "testname")) expected_timeseries.labels.append( create_label("resource_name", "resource_value")) expected_timeseries.labels.append( create_label("record_name", "record_value")) sample = expected_timeseries.samples.add() sample.timestamp = int(sum_aggregator.last_update_timestamp / 1000000) sample.value = 5.0 timeseries = self.exporter._create_timeseries(export_record, "testname", 5.0) self.assertEqual(timeseries, expected_timeseries)
def test_convert_from_min_max_sum_count(self): min_max_sum_count_record = ExportRecord( Counter("testname", "testdesc", "testunit", int, None), None, MinMaxSumCountAggregator(), Resource({}), ) min_max_sum_count_record.aggregator.update(5) min_max_sum_count_record.aggregator.update(1) min_max_sum_count_record.aggregator.take_checkpoint() expected_min_timeseries = self.exporter._create_timeseries( min_max_sum_count_record, "testname_min", 1.0) expected_max_timeseries = self.exporter._create_timeseries( min_max_sum_count_record, "testname_max", 5.0) expected_sum_timeseries = self.exporter._create_timeseries( min_max_sum_count_record, "testname_sum", 6.0) expected_count_timeseries = self.exporter._create_timeseries( min_max_sum_count_record, "testname_count", 2.0) timeseries = self.exporter._convert_from_min_max_sum_count( min_max_sum_count_record) self.assertEqual(timeseries[0], expected_min_timeseries) self.assertEqual(timeseries[1], expected_max_timeseries) self.assertEqual(timeseries[2], expected_sum_timeseries) self.assertEqual(timeseries[3], expected_count_timeseries)
def test_convert_from_value_observer(self): value_observer_record = ExportRecord( Counter("testname", "testdesc", "testunit", int, None), None, ValueObserverAggregator(), Resource({}), ) value_observer_record.aggregator.update(5) value_observer_record.aggregator.update(1) value_observer_record.aggregator.update(2) value_observer_record.aggregator.take_checkpoint() expected_min_timeseries = self.exporter._create_timeseries( value_observer_record, "testname_min", 1.0) expected_max_timeseries = self.exporter._create_timeseries( value_observer_record, "testname_max", 5.0) expected_sum_timeseries = self.exporter._create_timeseries( value_observer_record, "testname_sum", 8.0) expected_count_timeseries = self.exporter._create_timeseries( value_observer_record, "testname_count", 3.0) expected_last_timeseries = self.exporter._create_timeseries( value_observer_record, "testname_last", 2.0) timeseries = self.exporter._convert_from_value_observer( value_observer_record) self.assertEqual(timeseries[0], expected_min_timeseries) self.assertEqual(timeseries[1], expected_max_timeseries) self.assertEqual(timeseries[2], expected_sum_timeseries) self.assertEqual(timeseries[3], expected_count_timeseries) self.assertEqual(timeseries[4], expected_last_timeseries)
def test_valid_convert_to_timeseries(self): test_records = [ ExportRecord( Counter("testname", "testdesc", "testunit", int, None), None, SumAggregator(), Resource({}), ), ExportRecord( Counter("testname", "testdesc", "testunit", int, None), None, MinMaxSumCountAggregator(), Resource({}), ), ExportRecord( Counter("testname", "testdesc", "testunit", int, None), None, HistogramAggregator(), Resource({}), ), ExportRecord( Counter("testname", "testdesc", "testunit", int, None), None, LastValueAggregator(), Resource({}), ), ExportRecord( Counter("testname", "testdesc", "testunit", int, None), None, ValueObserverAggregator(), Resource({}), ), ] for record in test_records: record.aggregator.update(5) record.aggregator.take_checkpoint() data = self.exporter._convert_to_timeseries(test_records) self.assertIsInstance(data, list) self.assertEqual(len(data), 13) for timeseries in data: self.assertIsInstance(timeseries, TimeSeries)
def test_valid_export(self, mock_post): mock_post.return_value.configure_mock(**{"status_code": 200}) test_metric = Counter("testname", "testdesc", "testunit", int, None) labels = get_dict_as_key({"environment": "testing"}) record = ExportRecord(test_metric, labels, SumAggregator(), Resource({})) result = self.exporter.export([record]) self.assertIs(result, MetricsExportResult.SUCCESS) self.assertEqual(mock_post.call_count, 1) result = self.exporter.export([]) self.assertIs(result, MetricsExportResult.SUCCESS)
def test_convert_from_last_value(self): last_value_record = ExportRecord( Counter("testname", "testdesc", "testunit", int, None), None, LastValueAggregator(), Resource({}), ) last_value_record.aggregator.update(1) last_value_record.aggregator.update(5) last_value_record.aggregator.take_checkpoint() expected_timeseries = self.exporter._create_timeseries( last_value_record, "testname_last", 5.0) timeseries = self.exporter._convert_from_last_value(last_value_record) self.assertEqual(timeseries[0], expected_timeseries)
def test_convert_from_sum(self): sum_record = ExportRecord( Counter("testname", "testdesc", "testunit", int, None), None, SumAggregator(), Resource({}), ) sum_record.aggregator.update(3) sum_record.aggregator.update(2) sum_record.aggregator.take_checkpoint() expected_timeseries = self.exporter._create_timeseries( sum_record, "testname_sum", 5.0) timeseries = self.exporter._convert_from_sum(sum_record) self.assertEqual(timeseries[0], expected_timeseries)
def setUp(self): self.exporter = OTLPMetricsExporter() resource = SDKResource(OrderedDict([("a", 1), ("b", False)])) self.counter_metric_record = MetricRecord( Counter( "a", "b", "c", int, MeterProvider(resource=resource, ).get_meter(__name__), ("d", ), ), OrderedDict([("e", "f")]), SumAggregator(), resource, )
def setUp(self): self.exporter = OTLPMetricsExporter(insecure=True) resource = SDKResource(OrderedDict([("a", 1), ("b", False)])) self.counter_metric_record = MetricRecord( Counter( "c", "d", "e", int, MeterProvider(resource=resource, ).get_meter(__name__), ("f", ), ), [("g", "h")], SumAggregator(), resource, ) Configuration._reset() # pylint: disable=protected-access
def test_convert_from_histogram(self): histogram_record = ExportRecord( Counter("testname", "testdesc", "testunit", int, None), None, HistogramAggregator(), Resource({}), ) histogram_record.aggregator.update(5) histogram_record.aggregator.update(2) histogram_record.aggregator.update(-1) histogram_record.aggregator.take_checkpoint() expected_le_0_timeseries = self.exporter._create_timeseries( histogram_record, "testname_histogram", 1.0, ("le", "0")) expected_le_inf_timeseries = self.exporter._create_timeseries( histogram_record, "testname_histogram", 2.0, ("le", "+Inf")) timeseries = self.exporter._convert_from_histogram(histogram_record) self.assertEqual(timeseries[0], expected_le_0_timeseries) self.assertEqual(timeseries[1], expected_le_inf_timeseries)
def test_translate_counter_export_record(self, mock_time_ns): mock_time_ns.configure_mock(**{"return_value": 1}) counter_export_record = ExportRecord( Counter("c", "d", "e", int, self.meter, ("f",),), [("g", "h")], SumAggregator(), self.resource, ) counter_export_record.aggregator.checkpoint = 1 counter_export_record.aggregator.initial_checkpoint_timestamp = 1 counter_export_record.aggregator.last_update_timestamp = 1 expected = ExportMetricsServiceRequest( resource_metrics=[ ResourceMetrics( resource=OTLPResource( attributes=[ KeyValue(key="a", value=AnyValue(int_value=1)), KeyValue( key="b", value=AnyValue(bool_value=False) ), ] ), instrumentation_library_metrics=[ InstrumentationLibraryMetrics( instrumentation_library=InstrumentationLibrary( name="name", version="version", ), metrics=[ OTLPMetric( name="c", description="d", unit="e", int_sum=IntSum( data_points=[ IntDataPoint( labels=[ StringKeyValue( key="g", value="h" ) ], value=1, time_unix_nano=1, start_time_unix_nano=1, ) ], aggregation_temporality=( AggregationTemporality.AGGREGATION_TEMPORALITY_CUMULATIVE ), is_monotonic=True, ), ) ], ) ], ) ] ) # pylint: disable=protected-access actual = self.exporter._translate_data([counter_export_record]) self.assertEqual(expected, actual)