def test_get_collector_point(self): aggregator = aggregate.SumAggregator() int_counter = self._meter.create_metric( "testName", "testDescription", "unit", int, Counter ) float_counter = self._meter.create_metric( "testName", "testDescription", "unit", float, Counter ) valuerecorder = self._meter.create_metric( "testName", "testDescription", "unit", float, ValueRecorder ) result = metrics_exporter.get_collector_point( MetricRecord(int_counter, self._key_labels, aggregator) ) self.assertIsInstance(result, metrics_pb2.Point) self.assertIsInstance(result.timestamp, Timestamp) self.assertEqual(result.int64_value, 0) aggregator.update(123.5) aggregator.take_checkpoint() result = metrics_exporter.get_collector_point( MetricRecord(float_counter, self._key_labels, aggregator) ) self.assertEqual(result.double_value, 123.5) self.assertRaises( TypeError, metrics_exporter.get_collector_point( MetricRecord(valuerecorder, self._key_labels, aggregator) ), )
def test_get_collector_point(self): aggregator = aggregate.CounterAggregator() label_set = self._meter.get_label_set({"environment": "staging"}) int_counter = self._meter.create_metric("testName", "testDescription", "unit", int, Counter) float_counter = self._meter.create_metric("testName", "testDescription", "unit", float, Counter) measure = self._meter.create_metric("testName", "testDescription", "unit", float, Measure) result = metrics_exporter.get_collector_point( MetricRecord(aggregator, label_set, int_counter)) self.assertIsInstance(result, metrics_pb2.Point) self.assertIsInstance(result.timestamp, Timestamp) self.assertEqual(result.int64_value, 0) aggregator.update(123.5) aggregator.take_checkpoint() result = metrics_exporter.get_collector_point( MetricRecord(aggregator, label_set, float_counter)) self.assertEqual(result.double_value, 123.5) self.assertRaises( TypeError, metrics_exporter.get_collector_point( MetricRecord(aggregator, label_set, measure)), )
def test_value_recorder_to_envelope(self): aggregator = MinMaxSumCountAggregator() aggregator.update(123) aggregator.take_checkpoint() record = MetricRecord(self._test_value_recorder, self._test_labels, aggregator) exporter = self._exporter envelope = exporter._metric_to_envelope(record) self.assertIsInstance(envelope, Envelope) self.assertEqual(envelope.ver, 1) self.assertEqual(envelope.name, "Microsoft.ApplicationInsights.Metric") self.assertEqual(envelope.time, ns_to_iso_str(aggregator.last_update_timestamp)) self.assertEqual(envelope.sample_rate, None) self.assertEqual(envelope.seq, None) self.assertEqual(envelope.ikey, "1234abcd-5678-4efa-8abc-1234567890ab") self.assertEqual(envelope.flags, None) self.assertIsInstance(envelope.data, Data) self.assertIsInstance(envelope.data.base_data, MetricData) self.assertEqual(envelope.data.base_data.ver, 2) self.assertEqual(len(envelope.data.base_data.metrics), 1) self.assertIsInstance(envelope.data.base_data.metrics[0], DataPoint) self.assertEqual(envelope.data.base_data.metrics[0].ns, "testdesc") self.assertEqual(envelope.data.base_data.metrics[0].name, "testname") self.assertEqual(envelope.data.base_data.metrics[0].value, 1) self.assertEqual(envelope.data.base_data.properties["environment"], "staging") self.assertIsNotNone(envelope.tags["ai.cloud.role"]) self.assertIsNotNone(envelope.tags["ai.cloud.roleInstance"]) self.assertIsNotNone(envelope.tags["ai.device.id"]) self.assertIsNotNone(envelope.tags["ai.device.locale"]) self.assertIsNotNone(envelope.tags["ai.device.osVersion"]) self.assertIsNotNone(envelope.tags["ai.device.type"]) self.assertIsNotNone(envelope.tags["ai.internal.sdkVersion"])
def test_export(self): mock_client = mock.MagicMock() mock_export = mock.MagicMock() mock_client.Export = mock_export host_name = "testHostName" collector_exporter = metrics_exporter.CollectorMetricsExporter( client=mock_client, host_name=host_name) test_metric = self._meter.create_metric("testname", "testdesc", "unit", int, Counter, ["environment"]) record = MetricRecord(aggregate.CounterAggregator(), self._test_label_set, test_metric) result = collector_exporter.export([record]) self.assertIs(result, MetricsExportResult.SUCCESS) # pylint: disable=unsubscriptable-object export_arg = mock_export.call_args[0] service_request = next(export_arg[0]) output_metrics = getattr(service_request, "metrics") output_node = getattr(service_request, "node") self.assertEqual(len(output_metrics), 1) self.assertIsNotNone(getattr(output_node, "library_info")) self.assertIsNotNone(getattr(output_node, "service_info")) output_identifier = getattr(output_node, "identifier") self.assertEqual(getattr(output_identifier, "host_name"), "testHostName")
def test_export_histogram(self): client = mock.Mock() with mock.patch( "opentelemetry.exporter.cloud_monitoring.time_ns", lambda: NANOS_PER_SECOND, ): exporter = CloudMonitoringMetricsExporter( project_id=self.project_id, client=client) exporter.project_name = self.project_name client.create_metric_descriptor.return_value = MetricDescriptor( **{ "name": None, "type": "custom.googleapis.com/OpenTelemetry/name", "display_name": "name", "description": "description", "labels": [], "metric_kind": "CUMULATIVE", "value_type": "DISTRIBUTION", }) aggregator = HistogramAggregator(config={"bounds": [2, 4, 6]}) aggregator.checkpoint = OrderedDict([(2, 1), (4, 2), (6, 4), (">", 3)]) aggregator.last_update_timestamp = (WRITE_INTERVAL + 1) * NANOS_PER_SECOND exporter.export( [MetricRecord( MockMetric(meter=MockMeter()), (), aggregator, )]) series = TimeSeries() series.metric.type = "custom.googleapis.com/OpenTelemetry/name" point = { "interval": { "start_time": { "seconds": 1 }, "end_time": { "seconds": 11 }, }, "value": { "distribution_value": { "count": 10, "bucket_options": { "explicit_buckets": { "bounds": [2.0, 4.0, 6.0] } }, "bucket_counts": [1, 2, 4, 3], } }, } series.points.add(**point) client.create_time_series.assert_has_calls( [mock.call(self.project_name, [series])])
def test_export(self): meter_provider = metrics.MeterProvider() meter = meter_provider.get_meter(__name__) exporter = ConsoleMetricsExporter() metric = metrics.Counter( "available memory", "available memory", "bytes", int, meter, ("environment", ), ) labels = {"environment": "staging"} aggregator = SumAggregator() record = MetricRecord(metric, labels, aggregator, meter_provider.resource) result = '{}(data="{}", labels="{}", value={}, resource={})'.format( ConsoleMetricsExporter.__name__, metric, labels, aggregator.checkpoint, meter_provider.resource.attributes, ) with mock.patch("sys.stdout") as mock_stdout: exporter.export([record]) mock_stdout.write.assert_any_call(result)
def test_export(self): channel = grpc.insecure_channel(self.address) transport = metric_service_grpc_transport.MetricServiceGrpcTransport( channel=channel) exporter = CloudMonitoringMetricsExporter( self.project_id, client=MetricServiceClient(transport=transport)) meter = metrics.MeterProvider().get_meter(__name__) counter = meter.create_metric( name="name", description="desc", unit="1", value_type=int, metric_type=metrics.Counter, ) sum_agg = SumAggregator() sum_agg.checkpoint = 1 sum_agg.last_update_timestamp = (WRITE_INTERVAL + 2) * NANOS_PER_SECOND result = exporter.export( [MetricRecord( counter, labels=(), aggregator=sum_agg, )]) self.assertEqual(result, MetricsExportResult.SUCCESS)
def test_export(self): mock_client = mock.MagicMock() mock_export = mock.MagicMock() mock_client.Export = mock_export host_name = "testHostName" collector_exporter = metrics_exporter.OpenCensusMetricsExporter( client=mock_client, host_name=host_name ) test_metric = self._meter.create_counter( "testname", "testdesc", "unit", int, self._labels.keys(), ) record = MetricRecord( test_metric, self._key_labels, aggregate.SumAggregator(), metrics.get_meter_provider().resource, ) result = collector_exporter.export([record]) self.assertIs(result, MetricsExportResult.SUCCESS) # pylint: disable=unsubscriptable-object export_arg = mock_export.call_args[0] service_request = next(export_arg[0]) output_metrics = getattr(service_request, "metrics") output_node = getattr(service_request, "node") self.assertEqual(len(output_metrics), 1) self.assertIsNotNone(getattr(output_node, "library_info")) self.assertIsNotNone(getattr(output_node, "service_info")) output_identifier = getattr(output_node, "identifier") self.assertEqual( getattr(output_identifier, "host_name"), "testHostName" )
def test_counter_to_prometheus(self): meter = get_meter_provider().get_meter(__name__) metric = meter.create_metric( "test@name", "testdesc", "unit", int, metrics.Counter, ["environment@", "os"], ) kvp = {"environment@": "staging", "os": "Windows"} label_set = meter.get_label_set(kvp) aggregator = CounterAggregator() aggregator.update(123) aggregator.take_checkpoint() record = MetricRecord(aggregator, label_set, metric) collector = CustomCollector("testprefix") collector.add_metrics_data([record]) for prometheus_metric in collector.collect(): self.assertEqual(type(prometheus_metric), CounterMetricFamily) self.assertEqual(prometheus_metric.name, "testprefix_test_name") self.assertEqual(prometheus_metric.documentation, "testdesc") self.assertTrue(len(prometheus_metric.samples) == 1) self.assertEqual(prometheus_metric.samples[0].value, 123) self.assertTrue(len(prometheus_metric.samples[0].labels) == 2) self.assertEqual( prometheus_metric.samples[0].labels["environment_"], "staging") self.assertEqual(prometheus_metric.samples[0].labels["os"], "Windows")
def test_get_value_observer_metric_descriptor(self): client = mock.Mock() exporter = CloudMonitoringMetricsExporter(project_id=self.project_id, client=client) exporter.project_name = self.project_name record = MetricRecord( MockMetric(), (), ValueObserverAggregator(), Resource.create_empty(), ) exporter._get_metric_descriptor(record) client.create_metric_descriptor.assert_called_with( self.project_name, MetricDescriptor( **{ "name": None, "type": "custom.googleapis.com/OpenTelemetry/name", "display_name": "name", "description": "description", "labels": [], "metric_kind": "GAUGE", "value_type": "INT64", }), )
def test_export(self, mte, transmit): record = MetricRecord(SumAggregator(), self._test_labels, self._test_metric) exporter = self._exporter mte.return_value = Envelope() transmit.return_value = ExportResult.SUCCESS result = exporter.export([record]) self.assertEqual(result, MetricsExportResult.SUCCESS)
def test_unique_identifier(self): client = mock.Mock() exporter1 = CloudMonitoringMetricsExporter( project_id=self.project_id, client=client, add_unique_identifier=True, ) exporter2 = CloudMonitoringMetricsExporter( project_id=self.project_id, client=client, add_unique_identifier=True, ) exporter1.project_name = self.project_name exporter2.project_name = self.project_name client.create_metric_descriptor.return_value = MetricDescriptor( **{ "name": None, "type": "custom.googleapis.com/OpenTelemetry/name", "display_name": "name", "description": "description", "labels": [ LabelDescriptor(key=UNIQUE_IDENTIFIER_KEY, value_type="STRING"), ], "metric_kind": "CUMULATIVE", "value_type": "DOUBLE", }) sum_agg_one = SumAggregator() sum_agg_one.update(1) metric_record = MetricRecord( MockMetric(), (), sum_agg_one, ) exporter1.export([metric_record]) exporter2.export([metric_record]) ( first_call, second_call, ) = client.create_metric_descriptor.call_args_list self.assertEqual(first_call[0][1].labels[0].key, UNIQUE_IDENTIFIER_KEY) self.assertEqual(second_call[0][1].labels[0].key, UNIQUE_IDENTIFIER_KEY) first_call, second_call = client.create_time_series.call_args_list self.assertNotEqual( first_call[0][1][0].metric.labels[UNIQUE_IDENTIFIER_KEY], second_call[0][1][0].metric.labels[UNIQUE_IDENTIFIER_KEY], )
def test_export_exception(self, mte, transmit, logger_mock): record = MetricRecord(SumAggregator(), self._test_labels, self._test_metric) exporter = self._exporter mte.return_value = Envelope() transmit.side_effect = throw(Exception) result = exporter.export([record]) self.assertEqual(result, MetricsExportResult.FAILURE) self.assertEqual(logger_mock.exception.called, True)
def test_export(self): with self._registry_register_patch: record = MetricRecord(CounterAggregator(), self._test_label_set, self._test_metric) exporter = PrometheusMetricsExporter() result = exporter.export([record]) # pylint: disable=protected-access self.assertEqual(len(exporter._collector._metrics_to_export), 1) self.assertIs(result, MetricsExportResult.SUCCESS)
def checkpoint_set(self) -> Sequence[MetricRecord]: """Returns a list of MetricRecords used for exporting. The list of MetricRecords is a snapshot created from the current data in all of the aggregators in this batcher. """ metric_records = [] for (instrument, labels), aggregator in self._batch_map.items(): metric_records.append(MetricRecord(instrument, labels, aggregator)) return metric_records
def test_export(self, ): record = MetricRecord(CounterAggregator(), self._test_labels, self._test_metric) exporter = self._exporter with mock.patch( "azure_monitor.export.metrics.AzureMonitorMetricsExporter._transmit" ) as transmit: # noqa: E501 transmit.return_value = ExportResult.SUCCESS result = exporter.export([record]) self.assertEqual(result, MetricsExportResult.SUCCESS)
def test_live_metric_envelope_documents(self): aggregator = SumAggregator() aggregator.update(123) aggregator.take_checkpoint() record = MetricRecord(self._test_metric, self._test_labels, aggregator) exporter = LiveMetricsExporter( instrumentation_key=self._instrumentation_key, span_processor=self._span_processor, ) request_data = RemoteDependency( name="testName", id="", result_code="testResultCode", duration="testDuration", success=True, properties={}, measurements={}, ) request_data.properties["test_property1"] = "test_property1Value" request_data.properties["test_property2"] = "test_property2Value" request_data.measurements[ "test_measurement1"] = "test_measurement1Value" request_data.measurements[ "test_measurement2"] = "test_measurement2Value" test_envelope = Envelope(data=Data(base_type="RemoteDependencyData", base_data=request_data)) self._span_processor.documents.append(test_envelope) envelope = exporter._metric_to_live_metrics_envelope([record]) self.assertIsInstance(envelope, LiveMetricEnvelope) self.assertEqual(len(envelope.documents), 1) self.assertEqual( envelope.documents[0].quickpulse_type, "DependencyTelemetryDocument", ) self.assertEqual(envelope.documents[0].document_type, "RemoteDependency") self.assertEqual(envelope.documents[0].version, "1.0") self.assertEqual(envelope.documents[0].operation_id, "") self.assertEqual(len(envelope.documents[0].properties), 4) self.assertEqual( envelope.documents[0].properties["test_measurement1"], "test_measurement1Value", ) self.assertEqual( envelope.documents[0].properties["test_measurement2"], "test_measurement2Value", ) self.assertEqual( envelope.documents[0].properties["test_property1"], "test_property1Value", ) self.assertEqual( envelope.documents[0].properties["test_property2"], "test_property2Value", )
def test_invalid_metric(self): meter = get_meter_provider().get_meter(__name__) metric = meter.create_metric("tesname", "testdesc", "unit", int, TestMetric) kvp = {"environment": "staging"} label_set = meter.get_label_set(kvp) record = MetricRecord(None, label_set, metric) collector = CustomCollector("testprefix") collector.add_metrics_data([record]) collector.collect() self.assertLogs("opentelemetry.ext.prometheus", level="WARNING")
def test_export_failed_retryable(self, mte, transmit): record = MetricRecord(SumAggregator(), self._test_labels, self._test_metric) exporter = self._exporter transmit.return_value = ExportResult.FAILED_RETRYABLE mte.return_value = Envelope() storage_mock = mock.Mock() exporter.storage.put = storage_mock result = exporter.export([record]) self.assertEqual(result, MetricsExportResult.FAILURE) self.assertEqual(storage_mock.call_count, 1)
def test_export_exception(self, logger_mock): record = MetricRecord(CounterAggregator(), self._test_labels, self._test_metric) exporter = self._exporter with mock.patch( "azure_monitor.export.metrics.AzureMonitorMetricsExporter._transmit", throw(Exception), ): # noqa: E501 result = exporter.export([record]) self.assertEqual(result, MetricsExportResult.FAILED_NOT_RETRYABLE) self.assertEqual(logger_mock.exception.called, True)
def test_invalid_metric(self): meter = get_meter_provider().get_meter(__name__) metric = meter.create_metric("tesname", "testdesc", "unit", int, StubMetric) labels = {"environment": "staging"} key_labels = get_dict_as_key(labels) record = MetricRecord(metric, key_labels, None) collector = CustomCollector("testprefix") collector.add_metrics_data([record]) collector.collect() self.assertLogs("opentelemetry.exporter.prometheus", level="WARNING")
def test_export(self): with self._registry_register_patch: record = MetricRecord( self._test_metric, self._labels_key, SumAggregator(), get_meter_provider().resource, ) exporter = PrometheusMetricsExporter() result = exporter.export([record]) # pylint: disable=protected-access self.assertEqual(len(exporter._collector._metrics_to_export), 1) self.assertIs(result, MetricsExportResult.SUCCESS)
def test_export_exception(self): record = MetricRecord(self._test_metric, self._test_labels, SumAggregator()) exporter = LiveMetricsExporter( instrumentation_key=self._instrumentation_key, span_processor=self._span_processor, ) with mock.patch( "azure_monitor.sdk.auto_collection.live_metrics.sender.LiveMetricsSender.post", throw(Exception), ): result = exporter.export([record]) self.assertEqual(result, MetricsExportResult.FAILURE)
def test_export_failed_retryable(self): record = MetricRecord(CounterAggregator(), self._test_labels, self._test_metric) exporter = self._exporter with mock.patch( "azure_monitor.export.metrics.AzureMonitorMetricsExporter._transmit" ) as transmit: # noqa: E501 transmit.return_value = ExportResult.FAILED_RETRYABLE storage_mock = mock.Mock() exporter.storage.put = storage_mock result = exporter.export([record]) self.assertEqual(result, MetricsExportResult.FAILED_RETRYABLE) self.assertEqual(storage_mock.call_count, 1)
def test_export_failed(self): record = MetricRecord(self._test_metric, self._test_labels, SumAggregator()) exporter = LiveMetricsExporter( instrumentation_key=self._instrumentation_key, span_processor=self._span_processor, ) with mock.patch( "azure_monitor.sdk.auto_collection.live_metrics.sender.LiveMetricsSender.post" ) as request: response = requests.Response() response.status_code = 400 request.return_value = response result = exporter.export([record]) self.assertEqual(result, MetricsExportResult.FAILURE)
def test_live_metric_envelope_counter(self): aggregator = SumAggregator() aggregator.update(123) aggregator.take_checkpoint() record = MetricRecord(self._test_metric, self._test_labels, aggregator) exporter = LiveMetricsExporter( instrumentation_key=self._instrumentation_key, span_processor=self._span_processor, ) envelope = exporter._metric_to_live_metrics_envelope([record]) self.assertIsInstance(envelope, LiveMetricEnvelope) self.assertEqual(envelope.documents, []) self.assertEqual(envelope.metrics[0].name, "testname") self.assertEqual(envelope.metrics[0].value, 123) self.assertEqual(envelope.metrics[0].weight, 1)
def checkpoint_set(self) -> Sequence[MetricRecord]: """Returns a list of MetricRecords used for exporting. The list of MetricRecords is a snapshot created from the current data in all of the aggregators in this processor. """ metric_records = [] # pylint: disable=W0612 for ( (instrument, aggregator_type, _, labels), aggregator, ) in self._batch_map.items(): metric_records.append( MetricRecord(instrument, labels, aggregator, self._resource) ) return metric_records
def setUp(self): self.exporter = OTLPMetricsExporter() resource = SDKResource(OrderedDict([("a", 1), ("b", False)])) self.counter_metric_record = MetricRecord( Counter( "a", "b", "c", int, MeterProvider(resource=resource, ).get_meter(__name__), ("d", ), ), OrderedDict([("e", "f")]), SumAggregator(), resource, )
def test_translate_to_collector(self): test_metric = self._meter.create_metric( "testname", "testdesc", "unit", int, Counter, ) aggregator = aggregate.SumAggregator() aggregator.update(123) aggregator.take_checkpoint() record = MetricRecord(test_metric, self._key_labels, aggregator,) output_metrics = metrics_exporter.translate_to_collector([record]) self.assertEqual(len(output_metrics), 1) self.assertIsInstance(output_metrics[0], metrics_pb2.Metric) self.assertEqual(output_metrics[0].metric_descriptor.name, "testname") self.assertEqual( output_metrics[0].metric_descriptor.description, "testdesc" ) self.assertEqual(output_metrics[0].metric_descriptor.unit, "unit") self.assertEqual( output_metrics[0].metric_descriptor.type, metrics_pb2.MetricDescriptor.CUMULATIVE_INT64, ) self.assertEqual( len(output_metrics[0].metric_descriptor.label_keys), 1 ) self.assertEqual( output_metrics[0].metric_descriptor.label_keys[0].key, "environment", ) self.assertEqual(len(output_metrics[0].timeseries), 1) self.assertEqual(len(output_metrics[0].timeseries[0].label_values), 1) self.assertEqual( output_metrics[0].timeseries[0].label_values[0].has_value, True ) self.assertEqual( output_metrics[0].timeseries[0].label_values[0].value, "staging" ) self.assertEqual(len(output_metrics[0].timeseries[0].points), 1) self.assertEqual( output_metrics[0].timeseries[0].points[0].timestamp.seconds, record.aggregator.last_update_timestamp // 1000000000, ) self.assertEqual( output_metrics[0].timeseries[0].points[0].timestamp.nanos, record.aggregator.last_update_timestamp % 1000000000, ) self.assertEqual( output_metrics[0].timeseries[0].points[0].int64_value, 123 )
def test_export_value_observer(self): client = mock.Mock() with mock.patch( "opentelemetry.exporter.cloud_monitoring.time_ns", lambda: NANOS_PER_SECOND, ): exporter = CloudMonitoringMetricsExporter( project_id=self.project_id, client=client) exporter.project_name = self.project_name client.create_metric_descriptor.return_value = MetricDescriptor( **{ "name": None, "type": "custom.googleapis.com/OpenTelemetry/name", "display_name": "name", "description": "description", "labels": [], "metric_kind": "GAUGE", "value_type": "INT64", }) aggregator = ValueObserverAggregator() aggregator.checkpoint = aggregator._TYPE(1, 2, 3, 4, 5) aggregator.last_update_timestamp = (WRITE_INTERVAL + 1) * NANOS_PER_SECOND exporter.export([ MetricRecord( MockMetric(meter=mock_meter()), (), aggregator, Resource.create_empty(), ) ]) series = TimeSeries() series.metric_kind = MetricDescriptor.MetricKind.GAUGE series.metric.type = "custom.googleapis.com/OpenTelemetry/name" point = series.points.add() point.value.int64_value = 5 point.interval.end_time.seconds = WRITE_INTERVAL + 1 point.interval.end_time.nanos = 0 point.interval.start_time.seconds = WRITE_INTERVAL + 1 point.interval.start_time.nanos = 0 client.create_time_series.assert_has_calls( [mock.call(self.project_name, [series])])