Ejemplo n.º 1
0
 def test_update(self, time_mock):
     time_mock.return_value = 123
     sum_agg = SumAggregator()
     sum_agg.update(1.0)
     sum_agg.update(2.0)
     self.assertEqual(sum_agg.current, 3.0)
     self.assertEqual(sum_agg.last_update_timestamp, 123)
Ejemplo n.º 2
0
    def test_metric_to_envelope(self):
        aggregator = SumAggregator()
        aggregator.update(123)
        aggregator.take_checkpoint()
        record = MetricRecord(self._test_metric, self._test_labels, aggregator)
        exporter = self._exporter
        envelope = exporter._metric_to_envelope(record)
        self.assertIsInstance(envelope, Envelope)
        self.assertEqual(envelope.ver, 1)
        self.assertEqual(envelope.name, "Microsoft.ApplicationInsights.Metric")
        self.assertEqual(envelope.time,
                         ns_to_iso_str(aggregator.last_update_timestamp))
        self.assertEqual(envelope.sample_rate, None)
        self.assertEqual(envelope.seq, None)
        self.assertEqual(envelope.ikey, "1234abcd-5678-4efa-8abc-1234567890ab")
        self.assertEqual(envelope.flags, None)

        self.assertIsInstance(envelope.data, Data)
        self.assertIsInstance(envelope.data.base_data, MetricData)
        self.assertEqual(envelope.data.base_data.ver, 2)
        self.assertEqual(len(envelope.data.base_data.metrics), 1)
        self.assertIsInstance(envelope.data.base_data.metrics[0], DataPoint)
        self.assertEqual(envelope.data.base_data.metrics[0].ns, "testdesc")
        self.assertEqual(envelope.data.base_data.metrics[0].name, "testname")
        self.assertEqual(envelope.data.base_data.metrics[0].value, 123)
        self.assertEqual(envelope.data.base_data.properties["environment"],
                         "staging")
        self.assertIsNotNone(envelope.tags["ai.cloud.role"])
        self.assertIsNotNone(envelope.tags["ai.cloud.roleInstance"])
        self.assertIsNotNone(envelope.tags["ai.device.id"])
        self.assertIsNotNone(envelope.tags["ai.device.locale"])
        self.assertIsNotNone(envelope.tags["ai.device.osVersion"])
        self.assertIsNotNone(envelope.tags["ai.device.type"])
        self.assertIsNotNone(envelope.tags["ai.internal.sdkVersion"])
Ejemplo n.º 3
0
    def test_counter_to_prometheus(self):
        meter = get_meter_provider().get_meter(__name__)
        metric = meter.create_counter(
            "test@name",
            "testdesc",
            "unit",
            int,
        )
        labels = {"environment@": "staging", "os": "Windows"}
        key_labels = get_dict_as_key(labels)
        aggregator = SumAggregator()
        aggregator.update(123)
        aggregator.take_checkpoint()
        record = ExportRecord(metric, key_labels, aggregator,
                              get_meter_provider().resource)
        collector = CustomCollector("testprefix")
        collector.add_metrics_data([record])

        for prometheus_metric in collector.collect():
            self.assertEqual(type(prometheus_metric), CounterMetricFamily)
            self.assertEqual(prometheus_metric.name, "testprefix_test_name")
            self.assertEqual(prometheus_metric.documentation, "testdesc")
            self.assertTrue(len(prometheus_metric.samples) == 1)
            self.assertEqual(prometheus_metric.samples[0].value, 123)
            self.assertTrue(len(prometheus_metric.samples[0].labels) == 2)
            self.assertEqual(
                prometheus_metric.samples[0].labels["environment_"], "staging")
            self.assertEqual(prometheus_metric.samples[0].labels["os"],
                             "Windows")
Ejemplo n.º 4
0
 def test_ungrouped_batcher_process_exists(self):
     meter = metrics.MeterProvider().get_meter(__name__)
     batcher = UngroupedBatcher(True)
     aggregator = SumAggregator()
     aggregator2 = SumAggregator()
     metric = metrics.Counter(
         "available memory",
         "available memory",
         "bytes",
         int,
         meter,
         ("environment", ),
     )
     labels = ()
     _batch_map = {}
     _batch_map[(metric, labels)] = aggregator
     aggregator2.update(1.0)
     batcher._batch_map = _batch_map
     record = metrics.Record(metric, labels, aggregator2)
     batcher.process(record)
     self.assertEqual(len(batcher._batch_map), 1)
     self.assertIsNotNone(batcher._batch_map.get((metric, labels)))
     self.assertEqual(batcher._batch_map.get((metric, labels)).current, 0)
     self.assertEqual(
         batcher._batch_map.get((metric, labels)).checkpoint, 1.0)
Ejemplo n.º 5
0
    def test_create_timeseries(self):
        def create_label(name, value):
            label = Label()
            label.name = name
            label.value = value
            return label

        sum_aggregator = SumAggregator()
        sum_aggregator.update(5)
        sum_aggregator.take_checkpoint()
        export_record = ExportRecord(
            Counter("testname", "testdesc", "testunit", int, None),
            get_dict_as_key({"record_name": "record_value"}),
            sum_aggregator,
            Resource({"resource_name": "resource_value"}),
        )

        expected_timeseries = TimeSeries()
        expected_timeseries.labels.append(create_label("__name__", "testname"))
        expected_timeseries.labels.append(
            create_label("resource_name", "resource_value"))
        expected_timeseries.labels.append(
            create_label("record_name", "record_value"))

        sample = expected_timeseries.samples.add()
        sample.timestamp = int(sum_aggregator.last_update_timestamp / 1000000)
        sample.value = 5.0

        timeseries = self.exporter._create_timeseries(export_record,
                                                      "testname", 5.0)
        self.assertEqual(timeseries, expected_timeseries)
    def test_unique_identifier(self):
        client = mock.Mock()
        exporter1 = CloudMonitoringMetricsExporter(
            project_id=self.project_id,
            client=client,
            add_unique_identifier=True,
        )
        exporter2 = CloudMonitoringMetricsExporter(
            project_id=self.project_id,
            client=client,
            add_unique_identifier=True,
        )
        exporter1.project_name = self.project_name
        exporter2.project_name = self.project_name

        client.create_metric_descriptor.return_value = MetricDescriptor(
            **{
                "name":
                None,
                "type":
                "custom.googleapis.com/OpenTelemetry/name",
                "display_name":
                "name",
                "description":
                "description",
                "labels": [
                    LabelDescriptor(key=UNIQUE_IDENTIFIER_KEY,
                                    value_type="STRING"),
                ],
                "metric_kind":
                "CUMULATIVE",
                "value_type":
                "DOUBLE",
            })

        sum_agg_one = SumAggregator()
        sum_agg_one.update(1)
        metric_record = MetricRecord(
            MockMetric(),
            (),
            sum_agg_one,
        )
        exporter1.export([metric_record])
        exporter2.export([metric_record])

        (
            first_call,
            second_call,
        ) = client.create_metric_descriptor.call_args_list
        self.assertEqual(first_call[0][1].labels[0].key, UNIQUE_IDENTIFIER_KEY)
        self.assertEqual(second_call[0][1].labels[0].key,
                         UNIQUE_IDENTIFIER_KEY)

        first_call, second_call = client.create_time_series.call_args_list
        self.assertNotEqual(
            first_call[0][1][0].metric.labels[UNIQUE_IDENTIFIER_KEY],
            second_call[0][1][0].metric.labels[UNIQUE_IDENTIFIER_KEY],
        )
Ejemplo n.º 7
0
 def test_live_metric_envelope_documents(self):
     aggregator = SumAggregator()
     aggregator.update(123)
     aggregator.take_checkpoint()
     record = MetricRecord(self._test_metric, self._test_labels, aggregator)
     exporter = LiveMetricsExporter(
         instrumentation_key=self._instrumentation_key,
         span_processor=self._span_processor,
     )
     request_data = RemoteDependency(
         name="testName",
         id="",
         result_code="testResultCode",
         duration="testDuration",
         success=True,
         properties={},
         measurements={},
     )
     request_data.properties["test_property1"] = "test_property1Value"
     request_data.properties["test_property2"] = "test_property2Value"
     request_data.measurements[
         "test_measurement1"] = "test_measurement1Value"
     request_data.measurements[
         "test_measurement2"] = "test_measurement2Value"
     test_envelope = Envelope(data=Data(base_type="RemoteDependencyData",
                                        base_data=request_data))
     self._span_processor.documents.append(test_envelope)
     envelope = exporter._metric_to_live_metrics_envelope([record])
     self.assertIsInstance(envelope, LiveMetricEnvelope)
     self.assertEqual(len(envelope.documents), 1)
     self.assertEqual(
         envelope.documents[0].quickpulse_type,
         "DependencyTelemetryDocument",
     )
     self.assertEqual(envelope.documents[0].document_type,
                      "RemoteDependency")
     self.assertEqual(envelope.documents[0].version, "1.0")
     self.assertEqual(envelope.documents[0].operation_id, "")
     self.assertEqual(len(envelope.documents[0].properties), 4)
     self.assertEqual(
         envelope.documents[0].properties["test_measurement1"],
         "test_measurement1Value",
     )
     self.assertEqual(
         envelope.documents[0].properties["test_measurement2"],
         "test_measurement2Value",
     )
     self.assertEqual(
         envelope.documents[0].properties["test_property1"],
         "test_property1Value",
     )
     self.assertEqual(
         envelope.documents[0].properties["test_property2"],
         "test_property2Value",
     )
Ejemplo n.º 8
0
 def test_finished_collection_stateless(self):
     meter = metrics.MeterProvider().get_meter(__name__)
     batcher = Batcher(False)
     aggregator = SumAggregator()
     metric = metrics.Counter("available memory", "available memory",
                              "bytes", int, meter)
     aggregator.update(1.0)
     labels = ()
     _batch_map = {}
     _batch_map[(metric, SumAggregator, tuple(), labels)] = aggregator
     batcher._batch_map = _batch_map
     batcher.finished_collection()
     self.assertEqual(len(batcher._batch_map), 0)
Ejemplo n.º 9
0
 def test_finished_collection_stateful(self):
     meter_provider = metrics.MeterProvider()
     meter = meter_provider.get_meter(__name__)
     processor = Processor(True, meter_provider.resource)
     aggregator = SumAggregator()
     metric = metrics.Counter("available memory", "available memory",
                              "bytes", int, meter)
     aggregator.update(1.0)
     labels = ()
     _batch_map = {}
     _batch_map[(metric, SumAggregator, tuple(), labels)] = aggregator
     processor._batch_map = _batch_map
     processor.finished_collection()
     self.assertEqual(len(processor._batch_map), 1)
Ejemplo n.º 10
0
    def test_live_metric_envelope_counter(self):
        aggregator = SumAggregator()
        aggregator.update(123)
        aggregator.take_checkpoint()
        record = MetricRecord(self._test_metric, self._test_labels, aggregator)
        exporter = LiveMetricsExporter(
            instrumentation_key=self._instrumentation_key,
            span_processor=self._span_processor,
        )

        envelope = exporter._metric_to_live_metrics_envelope([record])
        self.assertIsInstance(envelope, LiveMetricEnvelope)
        self.assertEqual(envelope.documents, [])
        self.assertEqual(envelope.metrics[0].name, "testname")
        self.assertEqual(envelope.metrics[0].value, 123)
        self.assertEqual(envelope.metrics[0].weight, 1)
Ejemplo n.º 11
0
 def test_checkpoint_set(self):
     meter = metrics.MeterProvider().get_meter(__name__)
     batcher = Batcher(True)
     aggregator = SumAggregator()
     metric = metrics.Counter("available memory", "available memory",
                              "bytes", int, meter)
     aggregator.update(1.0)
     labels = ()
     _batch_map = {}
     _batch_map[(metric, SumAggregator, tuple(), labels)] = aggregator
     batcher._batch_map = _batch_map
     records = batcher.checkpoint_set()
     self.assertEqual(len(records), 1)
     self.assertEqual(records[0].instrument, metric)
     self.assertEqual(records[0].labels, labels)
     self.assertEqual(records[0].aggregator, aggregator)
Ejemplo n.º 12
0
 def test_batcher_process_not_stateful(self):
     meter = metrics.MeterProvider().get_meter(__name__)
     batcher = Batcher(True)
     aggregator = SumAggregator()
     metric = metrics.Counter("available memory", "available memory",
                              "bytes", int, meter)
     labels = ()
     _batch_map = {}
     batch_key = (metric, SumAggregator, tuple(), labels)
     aggregator.update(1.0)
     batcher._batch_map = _batch_map
     record = metrics.Record(metric, labels, aggregator)
     batcher.process(record)
     self.assertEqual(len(batcher._batch_map), 1)
     self.assertIsNotNone(batcher._batch_map.get(batch_key))
     self.assertEqual(batcher._batch_map.get(batch_key).current, 0)
     self.assertEqual(batcher._batch_map.get(batch_key).checkpoint, 1.0)
Ejemplo n.º 13
0
 def test_processor_process_not_exists(self):
     meter_provider = metrics.MeterProvider()
     meter = meter_provider.get_meter(__name__)
     processor = Processor(True, meter_provider.resource)
     aggregator = SumAggregator()
     metric = metrics.Counter("available memory", "available memory",
                              "bytes", int, meter)
     labels = ()
     _batch_map = {}
     batch_key = (metric, SumAggregator, tuple(), labels)
     aggregator.update(1.0)
     processor._batch_map = _batch_map
     accumulation = metrics.Accumulation(metric, labels, aggregator)
     processor.process(accumulation)
     self.assertEqual(len(processor._batch_map), 1)
     self.assertIsNotNone(processor._batch_map.get(batch_key))
     self.assertEqual(processor._batch_map.get(batch_key).current, 0)
     self.assertEqual(processor._batch_map.get(batch_key).checkpoint, 1.0)
Ejemplo n.º 14
0
 def test_finished_collection_stateful(self):
     meter = metrics.MeterProvider().get_meter(__name__)
     batcher = UngroupedBatcher(True)
     aggregator = SumAggregator()
     metric = metrics.Counter(
         "available memory",
         "available memory",
         "bytes",
         int,
         meter,
         ("environment", ),
     )
     aggregator.update(1.0)
     labels = ()
     _batch_map = {}
     _batch_map[(metric, labels)] = aggregator
     batcher._batch_map = _batch_map
     batcher.finished_collection()
     self.assertEqual(len(batcher._batch_map), 1)
Ejemplo n.º 15
0
 def test_checkpoint(self):
     sum_agg = SumAggregator()
     sum_agg.update(2.0)
     sum_agg.take_checkpoint()
     self.assertEqual(sum_agg.current, 0)
     self.assertEqual(sum_agg.checkpoint, 2.0)