Beispiel #1
0
 def test_ungrouped_batcher_process_exists(self):
     meter = metrics.MeterProvider().get_meter(__name__)
     batcher = UngroupedBatcher(True)
     aggregator = SumAggregator()
     aggregator2 = SumAggregator()
     metric = metrics.Counter(
         "available memory",
         "available memory",
         "bytes",
         int,
         meter,
         ("environment", ),
     )
     labels = ()
     _batch_map = {}
     _batch_map[(metric, labels)] = aggregator
     aggregator2.update(1.0)
     batcher._batch_map = _batch_map
     record = metrics.Record(metric, labels, aggregator2)
     batcher.process(record)
     self.assertEqual(len(batcher._batch_map), 1)
     self.assertIsNotNone(batcher._batch_map.get((metric, labels)))
     self.assertEqual(batcher._batch_map.get((metric, labels)).current, 0)
     self.assertEqual(
         batcher._batch_map.get((metric, labels)).checkpoint, 1.0)
Beispiel #2
0
    def test_export(self):
        channel = grpc.insecure_channel(self.address)
        transport = metric_service_grpc_transport.MetricServiceGrpcTransport(
            channel=channel)
        exporter = CloudMonitoringMetricsExporter(
            self.project_id, client=MetricServiceClient(transport=transport))

        meter = metrics.MeterProvider().get_meter(__name__)
        counter = meter.create_metric(
            name="name",
            description="desc",
            unit="1",
            value_type=int,
            metric_type=metrics.Counter,
        )

        sum_agg = SumAggregator()
        sum_agg.checkpoint = 1
        sum_agg.last_update_timestamp = (WRITE_INTERVAL + 2) * NANOS_PER_SECOND

        result = exporter.export(
            [MetricRecord(
                counter,
                labels=(),
                aggregator=sum_agg,
            )])

        self.assertEqual(result, MetricsExportResult.SUCCESS)
 def test_update(self, time_mock):
     time_mock.return_value = 123
     sum_agg = SumAggregator()
     sum_agg.update(1.0)
     sum_agg.update(2.0)
     self.assertEqual(sum_agg.current, 3.0)
     self.assertEqual(sum_agg.last_update_timestamp, 123)
    def test_unique_identifier(self):
        client = mock.Mock()
        exporter1 = CloudMonitoringMetricsExporter(
            project_id=self.project_id,
            client=client,
            add_unique_identifier=True,
        )
        exporter2 = CloudMonitoringMetricsExporter(
            project_id=self.project_id,
            client=client,
            add_unique_identifier=True,
        )
        exporter1.project_name = self.project_name
        exporter2.project_name = self.project_name

        client.create_metric_descriptor.return_value = MetricDescriptor(
            **{
                "name":
                None,
                "type":
                "custom.googleapis.com/OpenTelemetry/name",
                "display_name":
                "name",
                "description":
                "description",
                "labels": [
                    LabelDescriptor(key=UNIQUE_IDENTIFIER_KEY,
                                    value_type="STRING"),
                ],
                "metric_kind":
                "CUMULATIVE",
                "value_type":
                "DOUBLE",
            })

        sum_agg_one = SumAggregator()
        sum_agg_one.update(1)
        metric_record = MetricRecord(
            MockMetric(),
            (),
            sum_agg_one,
        )
        exporter1.export([metric_record])
        exporter2.export([metric_record])

        (
            first_call,
            second_call,
        ) = client.create_metric_descriptor.call_args_list
        self.assertEqual(first_call[0][1].labels[0].key, UNIQUE_IDENTIFIER_KEY)
        self.assertEqual(second_call[0][1].labels[0].key,
                         UNIQUE_IDENTIFIER_KEY)

        first_call, second_call = client.create_time_series.call_args_list
        self.assertNotEqual(
            first_call[0][1][0].metric.labels[UNIQUE_IDENTIFIER_KEY],
            second_call[0][1][0].metric.labels[UNIQUE_IDENTIFIER_KEY],
        )
    def test_concurrent_update(self):
        sum_agg = SumAggregator()

        with concurrent.futures.ThreadPoolExecutor(max_workers=2) as executor:
            fut1 = executor.submit(self.call_update, sum_agg)
            fut2 = executor.submit(self.call_update, sum_agg)

            updapte_total = fut1.result() + fut2.result()

        sum_agg.take_checkpoint()
        self.assertEqual(updapte_total, sum_agg.checkpoint)
 def test_merge(self):
     sum_agg = SumAggregator()
     sum_agg2 = SumAggregator()
     sum_agg.checkpoint = 1.0
     sum_agg2.checkpoint = 3.0
     sum_agg2.last_update_timestamp = 123
     sum_agg.merge(sum_agg2)
     self.assertEqual(sum_agg.checkpoint, 4.0)
     self.assertEqual(sum_agg.last_update_timestamp, 123)
Beispiel #7
0
 def test_finished_collection_stateless(self):
     meter = metrics.MeterProvider().get_meter(__name__)
     batcher = Batcher(False)
     aggregator = SumAggregator()
     metric = metrics.Counter("available memory", "available memory",
                              "bytes", int, meter)
     aggregator.update(1.0)
     labels = ()
     _batch_map = {}
     _batch_map[(metric, SumAggregator, tuple(), labels)] = aggregator
     batcher._batch_map = _batch_map
     batcher.finished_collection()
     self.assertEqual(len(batcher._batch_map), 0)
 def test_finished_collection_stateful(self):
     meter_provider = metrics.MeterProvider()
     meter = meter_provider.get_meter(__name__)
     processor = Processor(True, meter_provider.resource)
     aggregator = SumAggregator()
     metric = metrics.Counter("available memory", "available memory",
                              "bytes", int, meter)
     aggregator.update(1.0)
     labels = ()
     _batch_map = {}
     _batch_map[(metric, SumAggregator, tuple(), labels)] = aggregator
     processor._batch_map = _batch_map
     processor.finished_collection()
     self.assertEqual(len(processor._batch_map), 1)
Beispiel #9
0
    def aggregator_for(self, instrument_type: Type[InstrumentT]) -> Aggregator:
        """Returns an aggregator based on metric instrument type.

        Aggregators keep track of and updates values when metrics get updated.
        """
        # pylint:disable=R0201
        if issubclass(instrument_type, (Counter, UpDownCounter)):
            return SumAggregator()
        if issubclass(instrument_type, (SumObserver, UpDownSumObserver)):
            return LastValueAggregator()
        if issubclass(instrument_type, ValueRecorder):
            return MinMaxSumCountAggregator()
        if issubclass(instrument_type, ValueObserver):
            return ValueObserverAggregator()
        # TODO: Add other aggregators
        return SumAggregator()
 def test_export(self):
     meter_provider = metrics.MeterProvider()
     meter = meter_provider.get_meter(__name__)
     exporter = ConsoleMetricsExporter()
     metric = metrics.Counter(
         "available memory",
         "available memory",
         "bytes",
         int,
         meter,
         ("environment", ),
     )
     labels = {"environment": "staging"}
     aggregator = SumAggregator()
     record = MetricRecord(metric, labels, aggregator,
                           meter_provider.resource)
     result = '{}(data="{}", labels="{}", value={}, resource={})'.format(
         ConsoleMetricsExporter.__name__,
         metric,
         labels,
         aggregator.checkpoint,
         meter_provider.resource.attributes,
     )
     with mock.patch("sys.stdout") as mock_stdout:
         exporter.export([record])
         mock_stdout.write.assert_any_call(result)
Beispiel #11
0
 def test_checkpoint_set(self):
     meter = metrics.MeterProvider().get_meter(__name__)
     batcher = Batcher(True)
     aggregator = SumAggregator()
     metric = metrics.Counter("available memory", "available memory",
                              "bytes", int, meter)
     aggregator.update(1.0)
     labels = ()
     _batch_map = {}
     _batch_map[(metric, SumAggregator, tuple(), labels)] = aggregator
     batcher._batch_map = _batch_map
     records = batcher.checkpoint_set()
     self.assertEqual(len(records), 1)
     self.assertEqual(records[0].instrument, metric)
     self.assertEqual(records[0].labels, labels)
     self.assertEqual(records[0].aggregator, aggregator)
Beispiel #12
0
 def test_batcher_process_not_stateful(self):
     meter = metrics.MeterProvider().get_meter(__name__)
     batcher = Batcher(True)
     aggregator = SumAggregator()
     metric = metrics.Counter("available memory", "available memory",
                              "bytes", int, meter)
     labels = ()
     _batch_map = {}
     batch_key = (metric, SumAggregator, tuple(), labels)
     aggregator.update(1.0)
     batcher._batch_map = _batch_map
     record = metrics.Record(metric, labels, aggregator)
     batcher.process(record)
     self.assertEqual(len(batcher._batch_map), 1)
     self.assertIsNotNone(batcher._batch_map.get(batch_key))
     self.assertEqual(batcher._batch_map.get(batch_key).current, 0)
     self.assertEqual(batcher._batch_map.get(batch_key).checkpoint, 1.0)
Beispiel #13
0
 def test_export(self, mte, transmit):
     record = MetricRecord(SumAggregator(), self._test_labels,
                           self._test_metric)
     exporter = self._exporter
     mte.return_value = Envelope()
     transmit.return_value = ExportResult.SUCCESS
     result = exporter.export([record])
     self.assertEqual(result, MetricsExportResult.SUCCESS)
Beispiel #14
0
    def test_translate_updowncounter_export_record(self, mock_time_ns):
        mock_time_ns.configure_mock(**{"return_value": 1})

        counter_export_record = ExportRecord(
            UpDownCounter("c", "d", "e", int, self.meter),
            [("g", "h")],
            SumAggregator(),
            self.resource,
        )

        counter_export_record.aggregator.checkpoint = 1
        counter_export_record.aggregator.initial_checkpoint_timestamp = 1
        counter_export_record.aggregator.last_update_timestamp = 1

        expected = ExportMetricsServiceRequest(resource_metrics=[
            ResourceMetrics(
                resource=OTLPResource(attributes=[
                    KeyValue(key="a", value=AnyValue(int_value=1)),
                    KeyValue(key="b", value=AnyValue(bool_value=False)),
                ]),
                instrumentation_library_metrics=[
                    InstrumentationLibraryMetrics(
                        instrumentation_library=InstrumentationLibrary(
                            name="name",
                            version="version",
                        ),
                        metrics=[
                            OTLPMetric(
                                name="c",
                                description="d",
                                unit="e",
                                int_sum=IntSum(
                                    data_points=[
                                        IntDataPoint(
                                            labels=[
                                                StringKeyValue(key="g",
                                                               value="h")
                                            ],
                                            value=1,
                                            time_unix_nano=1,
                                            start_time_unix_nano=1,
                                        )
                                    ],
                                    aggregation_temporality=(
                                        AggregationTemporality.
                                        AGGREGATION_TEMPORALITY_CUMULATIVE),
                                ),
                            )
                        ],
                    )
                ],
            )
        ])

        # pylint: disable=protected-access
        actual = self.exporter._translate_data([counter_export_record])

        self.assertEqual(expected, actual)
Beispiel #15
0
 def test_export_exception(self, mte, transmit, logger_mock):
     record = MetricRecord(SumAggregator(), self._test_labels,
                           self._test_metric)
     exporter = self._exporter
     mte.return_value = Envelope()
     transmit.side_effect = throw(Exception)
     result = exporter.export([record])
     self.assertEqual(result, MetricsExportResult.FAILURE)
     self.assertEqual(logger_mock.exception.called, True)
Beispiel #16
0
 def test_processor_process_not_exists(self):
     meter_provider = metrics.MeterProvider()
     meter = meter_provider.get_meter(__name__)
     processor = Processor(True, meter_provider.resource)
     aggregator = SumAggregator()
     metric = metrics.Counter("available memory", "available memory",
                              "bytes", int, meter)
     labels = ()
     _batch_map = {}
     batch_key = (metric, SumAggregator, tuple(), labels)
     aggregator.update(1.0)
     processor._batch_map = _batch_map
     accumulation = metrics.Accumulation(metric, labels, aggregator)
     processor.process(accumulation)
     self.assertEqual(len(processor._batch_map), 1)
     self.assertIsNotNone(processor._batch_map.get(batch_key))
     self.assertEqual(processor._batch_map.get(batch_key).current, 0)
     self.assertEqual(processor._batch_map.get(batch_key).checkpoint, 1.0)
 def test_export(self):
     with self._registry_register_patch:
         record = MetricRecord(
             self._test_metric, self._labels_key, SumAggregator(),
         )
         exporter = PrometheusMetricsExporter()
         result = exporter.export([record])
         # pylint: disable=protected-access
         self.assertEqual(len(exporter._collector._metrics_to_export), 1)
         self.assertIs(result, MetricsExportResult.SUCCESS)
Beispiel #18
0
 def test_finished_collection_stateful(self):
     meter = metrics.MeterProvider().get_meter(__name__)
     batcher = UngroupedBatcher(True)
     aggregator = SumAggregator()
     metric = metrics.Counter(
         "available memory",
         "available memory",
         "bytes",
         int,
         meter,
         ("environment", ),
     )
     aggregator.update(1.0)
     labels = ()
     _batch_map = {}
     _batch_map[(metric, labels)] = aggregator
     batcher._batch_map = _batch_map
     batcher.finished_collection()
     self.assertEqual(len(batcher._batch_map), 1)
Beispiel #19
0
 def test_export_failed_retryable(self, mte, transmit):
     record = MetricRecord(SumAggregator(), self._test_labels,
                           self._test_metric)
     exporter = self._exporter
     transmit.return_value = ExportResult.FAILED_RETRYABLE
     mte.return_value = Envelope()
     storage_mock = mock.Mock()
     exporter.storage.put = storage_mock
     result = exporter.export([record])
     self.assertEqual(result, MetricsExportResult.FAILURE)
     self.assertEqual(storage_mock.call_count, 1)
    def test_valid_export(self, mock_post):
        mock_post.return_value.configure_mock(**{"status_code": 200})
        test_metric = Counter("testname", "testdesc", "testunit", int, None)
        labels = get_dict_as_key({"environment": "testing"})
        record = ExportRecord(test_metric, labels, SumAggregator(),
                              Resource({}))
        result = self.exporter.export([record])
        self.assertIs(result, MetricsExportResult.SUCCESS)
        self.assertEqual(mock_post.call_count, 1)

        result = self.exporter.export([])
        self.assertIs(result, MetricsExportResult.SUCCESS)
Beispiel #21
0
    def test_create_timeseries(self):
        def create_label(name, value):
            label = Label()
            label.name = name
            label.value = value
            return label

        sum_aggregator = SumAggregator()
        sum_aggregator.update(5)
        sum_aggregator.take_checkpoint()
        export_record = ExportRecord(
            Counter("testname", "testdesc", "testunit", int, None),
            get_dict_as_key({"record_name": "record_value"}),
            sum_aggregator,
            Resource({"resource_name": "resource_value"}),
        )

        expected_timeseries = TimeSeries()
        expected_timeseries.labels.append(create_label("__name__", "testname"))
        expected_timeseries.labels.append(
            create_label("resource_name", "resource_value"))
        expected_timeseries.labels.append(
            create_label("record_name", "record_value"))

        sample = expected_timeseries.samples.add()
        sample.timestamp = int(sum_aggregator.last_update_timestamp / 1000000)
        sample.value = 5.0

        timeseries = self.exporter._create_timeseries(export_record,
                                                      "testname", 5.0)
        self.assertEqual(timeseries, expected_timeseries)
Beispiel #22
0
    def test_metric_to_envelope(self):
        aggregator = SumAggregator()
        aggregator.update(123)
        aggregator.take_checkpoint()
        record = MetricRecord(self._test_metric, self._test_labels, aggregator)
        exporter = self._exporter
        envelope = exporter._metric_to_envelope(record)
        self.assertIsInstance(envelope, Envelope)
        self.assertEqual(envelope.ver, 1)
        self.assertEqual(envelope.name, "Microsoft.ApplicationInsights.Metric")
        self.assertEqual(envelope.time,
                         ns_to_iso_str(aggregator.last_update_timestamp))
        self.assertEqual(envelope.sample_rate, None)
        self.assertEqual(envelope.seq, None)
        self.assertEqual(envelope.ikey, "1234abcd-5678-4efa-8abc-1234567890ab")
        self.assertEqual(envelope.flags, None)

        self.assertIsInstance(envelope.data, Data)
        self.assertIsInstance(envelope.data.base_data, MetricData)
        self.assertEqual(envelope.data.base_data.ver, 2)
        self.assertEqual(len(envelope.data.base_data.metrics), 1)
        self.assertIsInstance(envelope.data.base_data.metrics[0], DataPoint)
        self.assertEqual(envelope.data.base_data.metrics[0].ns, "testdesc")
        self.assertEqual(envelope.data.base_data.metrics[0].name, "testname")
        self.assertEqual(envelope.data.base_data.metrics[0].value, 123)
        self.assertEqual(envelope.data.base_data.properties["environment"],
                         "staging")
        self.assertIsNotNone(envelope.tags["ai.cloud.role"])
        self.assertIsNotNone(envelope.tags["ai.cloud.roleInstance"])
        self.assertIsNotNone(envelope.tags["ai.device.id"])
        self.assertIsNotNone(envelope.tags["ai.device.locale"])
        self.assertIsNotNone(envelope.tags["ai.device.osVersion"])
        self.assertIsNotNone(envelope.tags["ai.device.type"])
        self.assertIsNotNone(envelope.tags["ai.internal.sdkVersion"])
Beispiel #23
0
    def test_counter_to_prometheus(self):
        meter = get_meter_provider().get_meter(__name__)
        metric = meter.create_counter(
            "test@name",
            "testdesc",
            "unit",
            int,
        )
        labels = {"environment@": "staging", "os": "Windows"}
        key_labels = get_dict_as_key(labels)
        aggregator = SumAggregator()
        aggregator.update(123)
        aggregator.take_checkpoint()
        record = ExportRecord(metric, key_labels, aggregator,
                              get_meter_provider().resource)
        collector = CustomCollector("testprefix")
        collector.add_metrics_data([record])

        for prometheus_metric in collector.collect():
            self.assertEqual(type(prometheus_metric), CounterMetricFamily)
            self.assertEqual(prometheus_metric.name, "testprefix_test_name")
            self.assertEqual(prometheus_metric.documentation, "testdesc")
            self.assertTrue(len(prometheus_metric.samples) == 1)
            self.assertEqual(prometheus_metric.samples[0].value, 123)
            self.assertTrue(len(prometheus_metric.samples[0].labels) == 2)
            self.assertEqual(
                prometheus_metric.samples[0].labels["environment_"], "staging")
            self.assertEqual(prometheus_metric.samples[0].labels["os"],
                             "Windows")
Beispiel #24
0
 def test_export_exception(self):
     record = MetricRecord(self._test_metric, self._test_labels,
                           SumAggregator())
     exporter = LiveMetricsExporter(
         instrumentation_key=self._instrumentation_key,
         span_processor=self._span_processor,
     )
     with mock.patch(
             "azure_monitor.sdk.auto_collection.live_metrics.sender.LiveMetricsSender.post",
             throw(Exception),
     ):
         result = exporter.export([record])
         self.assertEqual(result, MetricsExportResult.FAILURE)
Beispiel #25
0
 def test_export_failed(self):
     record = MetricRecord(self._test_metric, self._test_labels,
                           SumAggregator())
     exporter = LiveMetricsExporter(
         instrumentation_key=self._instrumentation_key,
         span_processor=self._span_processor,
     )
     with mock.patch(
             "azure_monitor.sdk.auto_collection.live_metrics.sender.LiveMetricsSender.post"
     ) as request:
         response = requests.Response()
         response.status_code = 400
         request.return_value = response
         result = exporter.export([record])
         self.assertEqual(result, MetricsExportResult.FAILURE)
Beispiel #26
0
    def test_convert_from_sum(self):
        sum_record = ExportRecord(
            Counter("testname", "testdesc", "testunit", int, None),
            None,
            SumAggregator(),
            Resource({}),
        )
        sum_record.aggregator.update(3)
        sum_record.aggregator.update(2)
        sum_record.aggregator.take_checkpoint()

        expected_timeseries = self.exporter._create_timeseries(
            sum_record, "testname_sum", 5.0)
        timeseries = self.exporter._convert_from_sum(sum_record)
        self.assertEqual(timeseries[0], expected_timeseries)
 def setUp(self):
     self.exporter = OTLPMetricsExporter()
     resource = SDKResource(OrderedDict([("a", 1), ("b", False)]))
     self.counter_metric_record = MetricRecord(
         Counter(
             "a",
             "b",
             "c",
             int,
             MeterProvider(resource=resource, ).get_meter(__name__),
             ("d", ),
         ),
         OrderedDict([("e", "f")]),
         SumAggregator(),
         resource,
     )
Beispiel #28
0
 def test_live_metric_envelope_documents(self):
     aggregator = SumAggregator()
     aggregator.update(123)
     aggregator.take_checkpoint()
     record = MetricRecord(self._test_metric, self._test_labels, aggregator)
     exporter = LiveMetricsExporter(
         instrumentation_key=self._instrumentation_key,
         span_processor=self._span_processor,
     )
     request_data = RemoteDependency(
         name="testName",
         id="",
         result_code="testResultCode",
         duration="testDuration",
         success=True,
         properties={},
         measurements={},
     )
     request_data.properties["test_property1"] = "test_property1Value"
     request_data.properties["test_property2"] = "test_property2Value"
     request_data.measurements[
         "test_measurement1"] = "test_measurement1Value"
     request_data.measurements[
         "test_measurement2"] = "test_measurement2Value"
     test_envelope = Envelope(data=Data(base_type="RemoteDependencyData",
                                        base_data=request_data))
     self._span_processor.documents.append(test_envelope)
     envelope = exporter._metric_to_live_metrics_envelope([record])
     self.assertIsInstance(envelope, LiveMetricEnvelope)
     self.assertEqual(len(envelope.documents), 1)
     self.assertEqual(
         envelope.documents[0].quickpulse_type,
         "DependencyTelemetryDocument",
     )
     self.assertEqual(envelope.documents[0].document_type,
                      "RemoteDependency")
     self.assertEqual(envelope.documents[0].version, "1.0")
     self.assertEqual(envelope.documents[0].operation_id, "")
     self.assertEqual(len(envelope.documents[0].properties), 4)
     self.assertEqual(
         envelope.documents[0].properties["test_measurement1"],
         "test_measurement1Value",
     )
     self.assertEqual(
         envelope.documents[0].properties["test_measurement2"],
         "test_measurement2Value",
     )
     self.assertEqual(
         envelope.documents[0].properties["test_property1"],
         "test_property1Value",
     )
     self.assertEqual(
         envelope.documents[0].properties["test_property2"],
         "test_property2Value",
     )
    def setUp(self):
        self.exporter = OTLPMetricsExporter(insecure=True)
        resource = SDKResource(OrderedDict([("a", 1), ("b", False)]))

        self.counter_metric_record = MetricRecord(
            Counter(
                "c",
                "d",
                "e",
                int,
                MeterProvider(resource=resource, ).get_meter(__name__),
                ("f", ),
            ),
            [("g", "h")],
            SumAggregator(),
            resource,
        )

        Configuration._reset()  # pylint: disable=protected-access
    def test_concurrent_update_and_checkpoint(self):
        sum_agg = SumAggregator()
        checkpoint_total = 0

        with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor:
            fut = executor.submit(self.call_update, sum_agg)

            while not fut.done():
                sum_agg.take_checkpoint()
                checkpoint_total += sum_agg.checkpoint

        sum_agg.take_checkpoint()
        checkpoint_total += sum_agg.checkpoint

        self.assertEqual(fut.result(), checkpoint_total)