def test_get_collector_point(self):
     aggregator = aggregate.SumAggregator()
     int_counter = self._meter.create_counter(
         "testName",
         "testDescription",
         "unit",
         int,
     )
     float_counter = self._meter.create_counter(
         "testName",
         "testDescription",
         "unit",
         float,
     )
     valuerecorder = self._meter.create_valuerecorder(
         "testName",
         "testDescription",
         "unit",
         float,
     )
     result = metrics_exporter.get_collector_point(
         ExportRecord(
             int_counter,
             self._key_labels,
             aggregator,
             metrics.get_meter_provider().resource,
         ))
     self.assertIsInstance(result, metrics_pb2.Point)
     self.assertIsInstance(result.timestamp, Timestamp)
     self.assertEqual(result.int64_value, 0)
     aggregator.update(123.5)
     aggregator.take_checkpoint()
     result = metrics_exporter.get_collector_point(
         ExportRecord(
             float_counter,
             self._key_labels,
             aggregator,
             metrics.get_meter_provider().resource,
         ))
     self.assertEqual(result.double_value, 123.5)
     self.assertRaises(
         TypeError,
         metrics_exporter.get_collector_point(
             ExportRecord(
                 valuerecorder,
                 self._key_labels,
                 aggregator,
                 metrics.get_meter_provider().resource,
             )),
     )
Example #2
0
    def test_convert_from_min_max_sum_count(self):
        min_max_sum_count_record = ExportRecord(
            Counter("testname", "testdesc", "testunit", int, None),
            None,
            MinMaxSumCountAggregator(),
            Resource({}),
        )
        min_max_sum_count_record.aggregator.update(5)
        min_max_sum_count_record.aggregator.update(1)
        min_max_sum_count_record.aggregator.take_checkpoint()

        expected_min_timeseries = self.exporter._create_timeseries(
            min_max_sum_count_record, "testname_min", 1.0)
        expected_max_timeseries = self.exporter._create_timeseries(
            min_max_sum_count_record, "testname_max", 5.0)
        expected_sum_timeseries = self.exporter._create_timeseries(
            min_max_sum_count_record, "testname_sum", 6.0)
        expected_count_timeseries = self.exporter._create_timeseries(
            min_max_sum_count_record, "testname_count", 2.0)

        timeseries = self.exporter._convert_from_min_max_sum_count(
            min_max_sum_count_record)
        self.assertEqual(timeseries[0], expected_min_timeseries)
        self.assertEqual(timeseries[1], expected_max_timeseries)
        self.assertEqual(timeseries[2], expected_sum_timeseries)
        self.assertEqual(timeseries[3], expected_count_timeseries)
Example #3
0
    def test_convert_from_value_observer(self):
        value_observer_record = ExportRecord(
            Counter("testname", "testdesc", "testunit", int, None),
            None,
            ValueObserverAggregator(),
            Resource({}),
        )
        value_observer_record.aggregator.update(5)
        value_observer_record.aggregator.update(1)
        value_observer_record.aggregator.update(2)
        value_observer_record.aggregator.take_checkpoint()

        expected_min_timeseries = self.exporter._create_timeseries(
            value_observer_record, "testname_min", 1.0)
        expected_max_timeseries = self.exporter._create_timeseries(
            value_observer_record, "testname_max", 5.0)
        expected_sum_timeseries = self.exporter._create_timeseries(
            value_observer_record, "testname_sum", 8.0)
        expected_count_timeseries = self.exporter._create_timeseries(
            value_observer_record, "testname_count", 3.0)
        expected_last_timeseries = self.exporter._create_timeseries(
            value_observer_record, "testname_last", 2.0)
        timeseries = self.exporter._convert_from_value_observer(
            value_observer_record)
        self.assertEqual(timeseries[0], expected_min_timeseries)
        self.assertEqual(timeseries[1], expected_max_timeseries)
        self.assertEqual(timeseries[2], expected_sum_timeseries)
        self.assertEqual(timeseries[3], expected_count_timeseries)
        self.assertEqual(timeseries[4], expected_last_timeseries)
Example #4
0
    def test_create_timeseries(self):
        def create_label(name, value):
            label = Label()
            label.name = name
            label.value = value
            return label

        sum_aggregator = SumAggregator()
        sum_aggregator.update(5)
        sum_aggregator.take_checkpoint()
        export_record = ExportRecord(
            Counter("testname", "testdesc", "testunit", int, None),
            get_dict_as_key({"record_name": "record_value"}),
            sum_aggregator,
            Resource({"resource_name": "resource_value"}),
        )

        expected_timeseries = TimeSeries()
        expected_timeseries.labels.append(create_label("__name__", "testname"))
        expected_timeseries.labels.append(
            create_label("resource_name", "resource_value"))
        expected_timeseries.labels.append(
            create_label("record_name", "record_value"))

        sample = expected_timeseries.samples.add()
        sample.timestamp = int(sum_aggregator.last_update_timestamp / 1000000)
        sample.value = 5.0

        timeseries = self.exporter._create_timeseries(export_record,
                                                      "testname", 5.0)
        self.assertEqual(timeseries, expected_timeseries)
 def test_get_value_observer_metric_descriptor(self):
     client = mock.Mock()
     exporter = CloudMonitoringMetricsExporter(project_id=self.project_id,
                                               client=client)
     exporter.project_name = self.project_name
     record = ExportRecord(
         MockMetric(),
         (),
         ValueObserverAggregator(),
         Resource.create_empty(),
     )
     exporter._get_metric_descriptor(record)
     client.create_metric_descriptor.assert_called_with(
         self.project_name,
         MetricDescriptor(
             **{
                 "name": None,
                 "type": "custom.googleapis.com/OpenTelemetry/name",
                 "display_name": "name",
                 "description": "description",
                 "labels": [],
                 "metric_kind": "GAUGE",
                 "value_type": "INT64",
             }),
     )
Example #6
0
    def test_counter_to_prometheus(self):
        meter = get_meter_provider().get_meter(__name__)
        metric = meter.create_counter(
            "test@name",
            "testdesc",
            "unit",
            int,
        )
        labels = {"environment@": "staging", "os": "Windows"}
        key_labels = get_dict_as_key(labels)
        aggregator = SumAggregator()
        aggregator.update(123)
        aggregator.take_checkpoint()
        record = ExportRecord(metric, key_labels, aggregator,
                              get_meter_provider().resource)
        collector = CustomCollector("testprefix")
        collector.add_metrics_data([record])

        for prometheus_metric in collector.collect():
            self.assertEqual(type(prometheus_metric), CounterMetricFamily)
            self.assertEqual(prometheus_metric.name, "testprefix_test_name")
            self.assertEqual(prometheus_metric.documentation, "testdesc")
            self.assertTrue(len(prometheus_metric.samples) == 1)
            self.assertEqual(prometheus_metric.samples[0].value, 123)
            self.assertTrue(len(prometheus_metric.samples[0].labels) == 2)
            self.assertEqual(
                prometheus_metric.samples[0].labels["environment_"], "staging")
            self.assertEqual(prometheus_metric.samples[0].labels["os"],
                             "Windows")
Example #7
0
 def test_export(self):
     meter_provider = metrics.MeterProvider()
     meter = meter_provider.get_meter(__name__)
     exporter = ConsoleMetricsExporter()
     metric = metrics.Counter(
         "available memory",
         "available memory",
         "bytes",
         int,
         meter,
         ("environment", ),
     )
     labels = {"environment": "staging"}
     aggregator = SumAggregator()
     record = ExportRecord(metric, labels, aggregator,
                           meter_provider.resource)
     result = '{}(data="{}", labels="{}", value={}, resource={})'.format(
         ConsoleMetricsExporter.__name__,
         metric,
         labels,
         aggregator.checkpoint,
         meter_provider.resource.attributes,
     )
     with mock.patch("sys.stdout") as mock_stdout:
         exporter.export([record])
         mock_stdout.write.assert_any_call(result)
    def test_export(self):
        mock_client = mock.MagicMock()
        mock_export = mock.MagicMock()
        mock_client.Export = mock_export
        host_name = "testHostName"
        collector_exporter = metrics_exporter.OpenCensusMetricsExporter(
            client=mock_client, host_name=host_name
        )
        test_metric = self._meter.create_counter(
            "testname", "testdesc", "unit", int,
        )
        record = ExportRecord(
            test_metric,
            self._key_labels,
            aggregate.SumAggregator(),
            metrics.get_meter_provider().resource,
        )

        result = collector_exporter.export([record])
        self.assertIs(result, MetricsExportResult.SUCCESS)
        # pylint: disable=unsubscriptable-object
        export_arg = mock_export.call_args[0]
        service_request = next(export_arg[0])
        output_metrics = getattr(service_request, "metrics")
        output_node = getattr(service_request, "node")
        self.assertEqual(len(output_metrics), 1)
        self.assertIsNotNone(getattr(output_node, "library_info"))
        self.assertIsNotNone(getattr(output_node, "service_info"))
        output_identifier = getattr(output_node, "identifier")
        self.assertEqual(
            getattr(output_identifier, "host_name"), "testHostName"
        )
Example #9
0
    def test_translate_updowncounter_export_record(self, mock_time_ns):
        mock_time_ns.configure_mock(**{"return_value": 1})

        counter_export_record = ExportRecord(
            UpDownCounter("c", "d", "e", int, self.meter),
            [("g", "h")],
            SumAggregator(),
            self.resource,
        )

        counter_export_record.aggregator.checkpoint = 1
        counter_export_record.aggregator.initial_checkpoint_timestamp = 1
        counter_export_record.aggregator.last_update_timestamp = 1

        expected = ExportMetricsServiceRequest(resource_metrics=[
            ResourceMetrics(
                resource=OTLPResource(attributes=[
                    KeyValue(key="a", value=AnyValue(int_value=1)),
                    KeyValue(key="b", value=AnyValue(bool_value=False)),
                ]),
                instrumentation_library_metrics=[
                    InstrumentationLibraryMetrics(
                        instrumentation_library=InstrumentationLibrary(
                            name="name",
                            version="version",
                        ),
                        metrics=[
                            OTLPMetric(
                                name="c",
                                description="d",
                                unit="e",
                                int_sum=IntSum(
                                    data_points=[
                                        IntDataPoint(
                                            labels=[
                                                StringKeyValue(key="g",
                                                               value="h")
                                            ],
                                            value=1,
                                            time_unix_nano=1,
                                            start_time_unix_nano=1,
                                        )
                                    ],
                                    aggregation_temporality=(
                                        AggregationTemporality.
                                        AGGREGATION_TEMPORALITY_CUMULATIVE),
                                ),
                            )
                        ],
                    )
                ],
            )
        ])

        # pylint: disable=protected-access
        actual = self.exporter._translate_data([counter_export_record])

        self.assertEqual(expected, actual)
    def test_unique_identifier(self):
        client = mock.Mock()
        exporter1 = CloudMonitoringMetricsExporter(
            project_id=self.project_id,
            client=client,
            add_unique_identifier=True,
        )
        exporter2 = CloudMonitoringMetricsExporter(
            project_id=self.project_id,
            client=client,
            add_unique_identifier=True,
        )
        exporter1.project_name = self.project_name
        exporter2.project_name = self.project_name

        client.create_metric_descriptor.return_value = MetricDescriptor(
            **{
                "name":
                None,
                "type":
                "custom.googleapis.com/OpenTelemetry/name",
                "display_name":
                "name",
                "description":
                "description",
                "labels": [
                    LabelDescriptor(key=UNIQUE_IDENTIFIER_KEY,
                                    value_type="STRING"),
                ],
                "metric_kind":
                "CUMULATIVE",
                "value_type":
                "DOUBLE",
            })

        sum_agg_one = SumAggregator()
        sum_agg_one.update(1)
        metric_record = ExportRecord(MockMetric(), (), sum_agg_one,
                                     Resource.create_empty())
        exporter1.export([metric_record])
        exporter2.export([metric_record])

        (
            first_call,
            second_call,
        ) = client.create_metric_descriptor.call_args_list
        self.assertEqual(first_call[0][1].labels[0].key, UNIQUE_IDENTIFIER_KEY)
        self.assertEqual(second_call[0][1].labels[0].key,
                         UNIQUE_IDENTIFIER_KEY)

        first_call, second_call = client.create_time_series.call_args_list
        self.assertNotEqual(
            first_call[0][1][0].metric.labels[UNIQUE_IDENTIFIER_KEY],
            second_call[0][1][0].metric.labels[UNIQUE_IDENTIFIER_KEY],
        )
Example #11
0
 def test_invalid_metric(self):
     meter = get_meter_provider().get_meter(__name__)
     metric = StubMetric("tesname", "testdesc", "unit", int, meter)
     labels = {"environment": "staging"}
     key_labels = get_dict_as_key(labels)
     record = ExportRecord(metric, key_labels, None,
                           get_meter_provider().resource)
     collector = CustomCollector("testprefix")
     collector.add_metrics_data([record])
     collector.collect()
     self.assertLogs("opentelemetry.exporter.prometheus", level="WARNING")
Example #12
0
 def test_valid_convert_to_timeseries(self):
     test_records = [
         ExportRecord(
             Counter("testname", "testdesc", "testunit", int, None),
             None,
             SumAggregator(),
             Resource({}),
         ),
         ExportRecord(
             Counter("testname", "testdesc", "testunit", int, None),
             None,
             MinMaxSumCountAggregator(),
             Resource({}),
         ),
         ExportRecord(
             Counter("testname", "testdesc", "testunit", int, None),
             None,
             HistogramAggregator(),
             Resource({}),
         ),
         ExportRecord(
             Counter("testname", "testdesc", "testunit", int, None),
             None,
             LastValueAggregator(),
             Resource({}),
         ),
         ExportRecord(
             Counter("testname", "testdesc", "testunit", int, None),
             None,
             ValueObserverAggregator(),
             Resource({}),
         ),
     ]
     for record in test_records:
         record.aggregator.update(5)
         record.aggregator.take_checkpoint()
     data = self.exporter._convert_to_timeseries(test_records)
     self.assertIsInstance(data, list)
     self.assertEqual(len(data), 13)
     for timeseries in data:
         self.assertIsInstance(timeseries, TimeSeries)
    def test_valid_export(self, mock_post):
        mock_post.return_value.configure_mock(**{"status_code": 200})
        test_metric = Counter("testname", "testdesc", "testunit", int, None)
        labels = get_dict_as_key({"environment": "testing"})
        record = ExportRecord(test_metric, labels, SumAggregator(),
                              Resource({}))
        result = self.exporter.export([record])
        self.assertIs(result, MetricsExportResult.SUCCESS)
        self.assertEqual(mock_post.call_count, 1)

        result = self.exporter.export([])
        self.assertIs(result, MetricsExportResult.SUCCESS)
Example #14
0
 def test_export(self):
     with self._registry_register_patch:
         record = ExportRecord(
             self._test_metric,
             self._labels_key,
             SumAggregator(),
             get_meter_provider().resource,
         )
         exporter = PrometheusMetricsExporter()
         result = exporter.export([record])
         # pylint: disable=protected-access
         self.assertEqual(len(exporter._collector._metrics_to_export), 1)
         self.assertIs(result, MetricsExportResult.SUCCESS)
    def checkpoint_set(self) -> Sequence[ExportRecord]:
        """Returns a list of ExportRecords used for exporting.

        The list of ExportRecords is a snapshot created from the current
        data in all of the aggregators in this processor.
        """
        export_records = []
        # pylint: disable=W0612
        for (
            (instrument, aggregator_type, _, labels),
                aggregator,
        ) in self._batch_map.items():
            export_records.append(
                ExportRecord(instrument, labels, aggregator, self._resource))
        return export_records
Example #16
0
    def test_convert_from_sum(self):
        sum_record = ExportRecord(
            Counter("testname", "testdesc", "testunit", int, None),
            None,
            SumAggregator(),
            Resource({}),
        )
        sum_record.aggregator.update(3)
        sum_record.aggregator.update(2)
        sum_record.aggregator.take_checkpoint()

        expected_timeseries = self.exporter._create_timeseries(
            sum_record, "testname_sum", 5.0)
        timeseries = self.exporter._convert_from_sum(sum_record)
        self.assertEqual(timeseries[0], expected_timeseries)
Example #17
0
    def test_convert_from_last_value(self):
        last_value_record = ExportRecord(
            Counter("testname", "testdesc", "testunit", int, None),
            None,
            LastValueAggregator(),
            Resource({}),
        )
        last_value_record.aggregator.update(1)
        last_value_record.aggregator.update(5)
        last_value_record.aggregator.take_checkpoint()

        expected_timeseries = self.exporter._create_timeseries(
            last_value_record, "testname_last", 5.0)
        timeseries = self.exporter._convert_from_last_value(last_value_record)
        self.assertEqual(timeseries[0], expected_timeseries)
    def test_export_value_observer(self):
        client = mock.Mock()

        with mock.patch(
                "opentelemetry.exporter.cloud_monitoring.time_ns",
                lambda: NANOS_PER_SECOND,
        ):
            exporter = CloudMonitoringMetricsExporter(
                project_id=self.project_id, client=client)

        exporter.project_name = self.project_name

        client.create_metric_descriptor.return_value = MetricDescriptor(
            **{
                "name": None,
                "type": "custom.googleapis.com/OpenTelemetry/name",
                "display_name": "name",
                "description": "description",
                "labels": [],
                "metric_kind": "GAUGE",
                "value_type": "INT64",
            })

        aggregator = ValueObserverAggregator()
        aggregator.checkpoint = aggregator._TYPE(1, 2, 3, 4, 5)
        aggregator.last_update_timestamp = (WRITE_INTERVAL +
                                            1) * NANOS_PER_SECOND
        exporter.export([
            ExportRecord(
                MockMetric(meter=mock_meter()),
                (),
                aggregator,
                Resource.create_empty(),
            )
        ])

        series = TimeSeries()
        series.metric_kind = MetricDescriptor.MetricKind.GAUGE
        series.metric.type = "custom.googleapis.com/OpenTelemetry/name"
        point = series.points.add()
        point.value.int64_value = 5
        point.interval.end_time.seconds = WRITE_INTERVAL + 1
        point.interval.end_time.nanos = 0
        point.interval.start_time.seconds = WRITE_INTERVAL + 1
        point.interval.start_time.nanos = 0
        client.create_time_series.assert_has_calls(
            [mock.call(self.project_name, [series])])
Example #19
0
 def test_min_max_sum_aggregator_to_prometheus(self):
     meter = get_meter_provider().get_meter(__name__)
     metric = meter.create_valuerecorder("test@name", "testdesc", "unit",
                                         int, [])
     labels = {}
     key_labels = get_dict_as_key(labels)
     aggregator = MinMaxSumCountAggregator()
     aggregator.update(123)
     aggregator.update(456)
     aggregator.take_checkpoint()
     record = ExportRecord(metric, key_labels, aggregator,
                           get_meter_provider().resource)
     collector = CustomCollector("testprefix")
     collector.add_metrics_data([record])
     result_bytes = generate_latest(collector)
     result = result_bytes.decode("utf-8")
     self.assertIn("testprefix_test_name_count 2.0", result)
     self.assertIn("testprefix_test_name_sum 579.0", result)
    def setUp(self):
        self.exporter = OTLPMetricsExporter(insecure=True)
        resource = SDKResource(OrderedDict([("a", 1), ("b", False)]))

        self.counter_export_record = ExportRecord(
            Counter(
                "c",
                "d",
                "e",
                int,
                MeterProvider(resource=resource, ).get_meter(__name__),
                ("f", ),
            ),
            [("g", "h")],
            SumAggregator(),
            resource,
        )

        Configuration._reset()  # pylint: disable=protected-access
Example #21
0
    def test_convert_from_histogram(self):
        histogram_record = ExportRecord(
            Counter("testname", "testdesc", "testunit", int, None),
            None,
            HistogramAggregator(),
            Resource({}),
        )
        histogram_record.aggregator.update(5)
        histogram_record.aggregator.update(2)
        histogram_record.aggregator.update(-1)
        histogram_record.aggregator.take_checkpoint()

        expected_le_0_timeseries = self.exporter._create_timeseries(
            histogram_record, "testname_histogram", 1.0, ("le", "0"))
        expected_le_inf_timeseries = self.exporter._create_timeseries(
            histogram_record, "testname_histogram", 2.0, ("le", "+Inf"))
        timeseries = self.exporter._convert_from_histogram(histogram_record)
        self.assertEqual(timeseries[0], expected_le_0_timeseries)
        self.assertEqual(timeseries[1], expected_le_inf_timeseries)
 def test_invalid_export(self):
     record = ExportRecord(None, None, None, None)
     result = self.exporter.export([record])
     self.assertIs(result, MetricsExportResult.FAILURE)
    def test_get_metric_descriptor(self):
        client = mock.Mock()
        exporter = CloudMonitoringMetricsExporter(project_id=self.project_id,
                                                  client=client)
        exporter.project_name = self.project_name

        self.assertIsNone(
            exporter._get_metric_descriptor(
                ExportRecord(
                    MockMetric(),
                    (),
                    UnsupportedAggregator(),
                    Resource.create_empty(),
                )))

        record = ExportRecord(
            MockMetric(),
            (("label1", "value1"), ),
            SumAggregator(),
            Resource.create_empty(),
        )
        metric_descriptor = exporter._get_metric_descriptor(record)
        client.create_metric_descriptor.assert_called_with(
            self.project_name,
            MetricDescriptor(
                **{
                    "name": None,
                    "type": "custom.googleapis.com/OpenTelemetry/name",
                    "display_name": "name",
                    "description": "description",
                    "labels":
                    [LabelDescriptor(key="label1", value_type="STRING")],
                    "metric_kind": "CUMULATIVE",
                    "value_type": "INT64",
                }),
        )

        # Getting a cached metric descriptor shouldn't use another call
        cached_metric_descriptor = exporter._get_metric_descriptor(record)
        self.assertEqual(client.create_metric_descriptor.call_count, 1)
        self.assertEqual(metric_descriptor, cached_metric_descriptor)

        # Drop labels with values that aren't string, int or bool
        exporter._get_metric_descriptor(
            ExportRecord(
                MockMetric(name="name2", value_type=float),
                (
                    ("label1", "value1"),
                    ("label2", dict()),
                    ("label3", 3),
                    ("label4", False),
                ),
                SumAggregator(),
                Resource.create_empty(),
            ))
        client.create_metric_descriptor.assert_called_with(
            self.project_name,
            MetricDescriptor(
                **{
                    "name":
                    None,
                    "type":
                    "custom.googleapis.com/OpenTelemetry/name2",
                    "display_name":
                    "name2",
                    "description":
                    "description",
                    "labels": [
                        LabelDescriptor(key="label1", value_type="STRING"),
                        LabelDescriptor(key="label3", value_type="INT64"),
                        LabelDescriptor(key="label4", value_type="BOOL"),
                    ],
                    "metric_kind":
                    "CUMULATIVE",
                    "value_type":
                    "DOUBLE",
                }),
        )
    def test_export(self):
        client = mock.Mock()

        with mock.patch(
                "opentelemetry.exporter.cloud_monitoring.time_ns",
                lambda: NANOS_PER_SECOND,
        ):
            exporter = CloudMonitoringMetricsExporter(
                project_id=self.project_id, client=client)

        exporter.project_name = self.project_name

        exporter.export([
            ExportRecord(
                MockMetric(),
                (("label1", "value1"), ),
                UnsupportedAggregator(),
                Resource.create_empty(),
            )
        ])
        client.create_time_series.assert_not_called()

        client.create_metric_descriptor.return_value = MetricDescriptor(
            **{
                "name":
                None,
                "type":
                "custom.googleapis.com/OpenTelemetry/name",
                "display_name":
                "name",
                "description":
                "description",
                "labels": [
                    LabelDescriptor(key="label1", value_type="STRING"),
                    LabelDescriptor(key="label2", value_type="INT64"),
                ],
                "metric_kind":
                "CUMULATIVE",
                "value_type":
                "DOUBLE",
            })

        resource = Resource(
            attributes={
                "cloud.account.id": 123,
                "host.id": "host",
                "cloud.zone": "US",
                "cloud.provider": "gcp",
                "extra_info": "extra",
                "gcp.resource_type": "gce_instance",
                "not_gcp_resource": "value",
            })

        sum_agg_one = SumAggregator()
        sum_agg_one.checkpoint = 1
        sum_agg_one.last_update_timestamp = (WRITE_INTERVAL +
                                             1) * NANOS_PER_SECOND
        exporter.export([
            ExportRecord(
                MockMetric(meter=mock_meter()),
                (
                    ("label1", "value1"),
                    ("label2", 1),
                ),
                sum_agg_one,
                resource,
            ),
            ExportRecord(
                MockMetric(meter=mock_meter()),
                (
                    ("label1", "value2"),
                    ("label2", 2),
                ),
                sum_agg_one,
                resource,
            ),
        ])
        expected_resource = MonitoredResource(
            type="gce_instance",
            labels={
                "project_id": "123",
                "instance_id": "host",
                "zone": "US"
            },
        )

        series1 = TimeSeries(resource=expected_resource)
        series1.metric_kind = MetricDescriptor.MetricKind.CUMULATIVE
        series1.metric.type = "custom.googleapis.com/OpenTelemetry/name"
        series1.metric.labels["label1"] = "value1"
        series1.metric.labels["label2"] = "1"
        point = series1.points.add()
        point.value.int64_value = 1
        point.interval.end_time.seconds = WRITE_INTERVAL + 1
        point.interval.end_time.nanos = 0
        point.interval.start_time.seconds = 1
        point.interval.start_time.nanos = 0

        series2 = TimeSeries(resource=expected_resource)
        series2.metric_kind = MetricDescriptor.MetricKind.CUMULATIVE
        series2.metric.type = "custom.googleapis.com/OpenTelemetry/name"
        series2.metric.labels["label1"] = "value2"
        series2.metric.labels["label2"] = "2"
        point = series2.points.add()
        point.value.int64_value = 1
        point.interval.end_time.seconds = WRITE_INTERVAL + 1
        point.interval.end_time.nanos = 0
        point.interval.start_time.seconds = 1
        point.interval.start_time.nanos = 0

        client.create_time_series.assert_has_calls(
            [mock.call(self.project_name, [series1, series2])])

        # Attempting to export too soon after another export with the exact
        # same labels leads to it being dropped

        sum_agg_two = SumAggregator()
        sum_agg_two.checkpoint = 1
        sum_agg_two.last_update_timestamp = (WRITE_INTERVAL +
                                             2) * NANOS_PER_SECOND
        exporter.export([
            ExportRecord(
                MockMetric(),
                (
                    ("label1", "value1"),
                    ("label2", 1),
                ),
                sum_agg_two,
                Resource.create_empty(),
            ),
            ExportRecord(
                MockMetric(),
                (
                    ("label1", "value2"),
                    ("label2", 2),
                ),
                sum_agg_two,
                Resource.create_empty(),
            ),
        ])
        self.assertEqual(client.create_time_series.call_count, 1)

        # But exporting with different labels is fine
        sum_agg_two.checkpoint = 2
        exporter.export([
            ExportRecord(
                MockMetric(),
                (
                    ("label1", "changed_label"),
                    ("label2", 2),
                ),
                sum_agg_two,
                Resource.create_empty(),
            ),
        ])
        series3 = TimeSeries()
        series3.metric_kind = MetricDescriptor.MetricKind.CUMULATIVE
        series3.metric.type = "custom.googleapis.com/OpenTelemetry/name"
        series3.metric.labels["label1"] = "changed_label"
        series3.metric.labels["label2"] = "2"
        point = series3.points.add()
        point.value.int64_value = 2
        point.interval.end_time.seconds = WRITE_INTERVAL + 2
        point.interval.end_time.nanos = 0
        point.interval.start_time.seconds = 1
        point.interval.start_time.nanos = 0

        client.create_time_series.assert_has_calls([
            mock.call(self.project_name, [series1, series2]),
            mock.call(self.project_name, [series3]),
        ])
    def test_export_histogram(self):
        client = mock.Mock()

        with mock.patch(
                "opentelemetry.exporter.cloud_monitoring.time_ns",
                lambda: NANOS_PER_SECOND,
        ):
            exporter = CloudMonitoringMetricsExporter(
                project_id=self.project_id, client=client)

        exporter.project_name = self.project_name

        client.create_metric_descriptor.return_value = MetricDescriptor(
            **{
                "name": None,
                "type": "custom.googleapis.com/OpenTelemetry/name",
                "display_name": "name",
                "description": "description",
                "labels": [],
                "metric_kind": "CUMULATIVE",
                "value_type": "DISTRIBUTION",
            })

        aggregator = HistogramAggregator(config={"bounds": [2, 4, 6]})
        aggregator.checkpoint = OrderedDict([(2, 1), (4, 2), (6, 4), (">", 3)])
        aggregator.last_update_timestamp = (WRITE_INTERVAL +
                                            1) * NANOS_PER_SECOND
        exporter.export([
            ExportRecord(
                MockMetric(meter=mock_meter()),
                (),
                aggregator,
                Resource.create_empty(),
            )
        ])

        series = TimeSeries()
        series.metric_kind = MetricDescriptor.MetricKind.CUMULATIVE
        series.metric.type = "custom.googleapis.com/OpenTelemetry/name"
        point = {
            "interval": {
                "start_time": {
                    "seconds": 1
                },
                "end_time": {
                    "seconds": 11
                },
            },
            "value": {
                "distribution_value": {
                    "count": 10,
                    "bucket_options": {
                        "explicit_buckets": {
                            "bounds": [2.0, 4.0, 6.0]
                        }
                    },
                    "bucket_counts": [1, 2, 4, 3],
                }
            },
        }
        series.points.add(**point)
        client.create_time_series.assert_has_calls(
            [mock.call(self.project_name, [series])])
Example #26
0
 def test_invalid_convert_to_timeseries(self):
     data = self.exporter._convert_to_timeseries(
         [ExportRecord(None, None, None, Resource({}))])
     self.assertIsInstance(data, list)
     self.assertEqual(len(data), 0)
    def test_stateless_times(self):
        client = mock.Mock()
        with mock.patch(
                "opentelemetry.exporter.cloud_monitoring.time_ns",
                lambda: NANOS_PER_SECOND,
        ):
            exporter = CloudMonitoringMetricsExporter(
                project_id=self.project_id,
                client=client,
            )

        client.create_metric_descriptor.return_value = MetricDescriptor(
            **{
                "name":
                None,
                "type":
                "custom.googleapis.com/OpenTelemetry/name",
                "display_name":
                "name",
                "description":
                "description",
                "labels": [
                    LabelDescriptor(key=UNIQUE_IDENTIFIER_KEY,
                                    value_type="STRING"),
                ],
                "metric_kind":
                "CUMULATIVE",
                "value_type":
                "DOUBLE",
            })

        agg = SumAggregator()
        agg.checkpoint = 1
        agg.last_update_timestamp = (WRITE_INTERVAL + 1) * NANOS_PER_SECOND

        metric_record = ExportRecord(MockMetric(stateful=False), (), agg,
                                     Resource.create_empty())

        exporter.export([metric_record])

        exports_1 = client.create_time_series.call_args_list[0]

        # verify the first metric started at exporter start time
        self.assertEqual(
            exports_1[0][1][0].points[0].interval.start_time.seconds, 1)
        self.assertEqual(
            exports_1[0][1][0].points[0].interval.start_time.nanos, 0)

        self.assertEqual(
            exports_1[0][1][0].points[0].interval.end_time.seconds,
            WRITE_INTERVAL + 1,
        )

        agg.last_update_timestamp = (WRITE_INTERVAL * 2 + 2) * NANOS_PER_SECOND

        metric_record = ExportRecord(MockMetric(stateful=False), (), agg,
                                     Resource.create_empty())

        exporter.export([metric_record])

        exports_2 = client.create_time_series.call_args_list[1]

        # 1ms ahead of end time of last export
        self.assertEqual(
            exports_2[0][1][0].points[0].interval.start_time.seconds,
            WRITE_INTERVAL + 1,
        )
        self.assertEqual(
            exports_2[0][1][0].points[0].interval.start_time.nanos, 1e6)

        self.assertEqual(
            exports_2[0][1][0].points[0].interval.end_time.seconds,
            WRITE_INTERVAL * 2 + 2,
        )
    def test_translate_to_collector(self):
        test_metric = self._meter.create_counter(
            "testcollector", "testdesc", "unit", int,
        )
        aggregator = aggregate.SumAggregator()
        aggregator.update(123)
        aggregator.take_checkpoint()
        record = ExportRecord(
            test_metric,
            self._key_labels,
            aggregator,
            metrics.get_meter_provider().resource,
        )
        start_timestamp = Timestamp()
        output_metrics = metrics_exporter.translate_to_collector(
            [record], start_timestamp,
        )
        self.assertEqual(len(output_metrics), 1)
        self.assertIsInstance(output_metrics[0], metrics_pb2.Metric)
        self.assertEqual(
            output_metrics[0].metric_descriptor.name, "testcollector"
        )
        self.assertEqual(
            output_metrics[0].metric_descriptor.description, "testdesc"
        )
        self.assertEqual(output_metrics[0].metric_descriptor.unit, "unit")
        self.assertEqual(
            output_metrics[0].metric_descriptor.type,
            metrics_pb2.MetricDescriptor.CUMULATIVE_INT64,
        )
        self.assertEqual(
            len(output_metrics[0].metric_descriptor.label_keys), 2
        )
        self.assertEqual(
            output_metrics[0].metric_descriptor.label_keys[0].key,
            "environment",
        )
        self.assertEqual(
            output_metrics[0].metric_descriptor.label_keys[1].key, "number",
        )

        self.assertIsNotNone(output_metrics[0].resource)
        self.assertEqual(
            output_metrics[0].resource.type, "",
        )
        self.assertEqual(
            output_metrics[0].resource.labels["key_with_str_value"],
            self._resource_labels["key_with_str_value"],
        )
        self.assertIsInstance(
            output_metrics[0].resource.labels["key_with_int_val"], str,
        )
        self.assertEqual(
            output_metrics[0].resource.labels["key_with_int_val"],
            str(self._resource_labels["key_with_int_val"]),
        )
        self.assertIsInstance(
            output_metrics[0].resource.labels["key_with_true"], str,
        )
        self.assertEqual(
            output_metrics[0].resource.labels["key_with_true"],
            str(self._resource_labels["key_with_true"]),
        )

        self.assertEqual(len(output_metrics[0].timeseries), 1)
        self.assertEqual(len(output_metrics[0].timeseries[0].label_values), 2)
        self.assertEqual(
            output_metrics[0].timeseries[0].start_timestamp, start_timestamp
        )
        self.assertEqual(
            output_metrics[0].timeseries[0].label_values[0].has_value, True
        )
        self.assertEqual(
            output_metrics[0].timeseries[0].label_values[0].value, "staging"
        )
        self.assertEqual(len(output_metrics[0].timeseries[0].points), 1)
        self.assertEqual(
            output_metrics[0].timeseries[0].points[0].timestamp.seconds,
            record.aggregator.last_update_timestamp // 1000000000,
        )
        self.assertEqual(
            output_metrics[0].timeseries[0].points[0].timestamp.nanos,
            record.aggregator.last_update_timestamp % 1000000000,
        )
        self.assertEqual(
            output_metrics[0].timeseries[0].points[0].int64_value, 123
        )