def _get_data_points(sdk_metric: MetricRecord,
                     data_point_class: Type[DataPointT]) -> List[DataPointT]:

    data_points = []

    for (
            label,
            bound_counter,
    ) in sdk_metric.instrument.bound_instruments.items():

        string_key_values = []

        for label_key, label_value in label:
            string_key_values.append(
                StringKeyValue(key=label_key, value=label_value))

        for view_data in bound_counter.view_datas:

            if view_data.labels == label:

                data_points.append(
                    data_point_class(
                        labels=string_key_values,
                        value=view_data.aggregator.current,
                    ))
                break

    return data_points
def _get_data_points(export_record: ExportRecord,
                     data_point_class: Type[DataPointT]) -> List[DataPointT]:

    if isinstance(export_record.aggregator, SumAggregator):
        value = export_record.aggregator.checkpoint

    elif isinstance(export_record.aggregator, MinMaxSumCountAggregator):
        # FIXME: How are values to be interpreted from this aggregator?
        raise Exception("MinMaxSumCount aggregator data not supported")

    elif isinstance(export_record.aggregator, HistogramAggregator):
        # FIXME: How are values to be interpreted from this aggregator?
        raise Exception("Histogram aggregator data not supported")

    elif isinstance(export_record.aggregator, LastValueAggregator):
        value = export_record.aggregator.checkpoint

    elif isinstance(export_record.aggregator, ValueObserverAggregator):
        value = export_record.aggregator.checkpoint.last

    return [
        data_point_class(
            labels=[
                StringKeyValue(key=str(label_key), value=str(label_value))
                for label_key, label_value in export_record.labels
            ],
            value=value,
            start_time_unix_nano=(
                export_record.aggregator.initial_checkpoint_timestamp),
            time_unix_nano=(export_record.aggregator.last_update_timestamp),
        )
    ]
Example #3
0
    def test_translate_updowncounter_export_record(self, mock_time_ns):
        mock_time_ns.configure_mock(**{"return_value": 1})

        counter_export_record = ExportRecord(
            UpDownCounter("c", "d", "e", int, self.meter),
            [("g", "h")],
            SumAggregator(),
            self.resource,
        )

        counter_export_record.aggregator.checkpoint = 1
        counter_export_record.aggregator.initial_checkpoint_timestamp = 1
        counter_export_record.aggregator.last_update_timestamp = 1

        expected = ExportMetricsServiceRequest(resource_metrics=[
            ResourceMetrics(
                resource=OTLPResource(attributes=[
                    KeyValue(key="a", value=AnyValue(int_value=1)),
                    KeyValue(key="b", value=AnyValue(bool_value=False)),
                ]),
                instrumentation_library_metrics=[
                    InstrumentationLibraryMetrics(
                        instrumentation_library=InstrumentationLibrary(
                            name="name",
                            version="version",
                        ),
                        metrics=[
                            OTLPMetric(
                                name="c",
                                description="d",
                                unit="e",
                                int_sum=IntSum(
                                    data_points=[
                                        IntDataPoint(
                                            labels=[
                                                StringKeyValue(key="g",
                                                               value="h")
                                            ],
                                            value=1,
                                            time_unix_nano=1,
                                            start_time_unix_nano=1,
                                        )
                                    ],
                                    aggregation_temporality=(
                                        AggregationTemporality.
                                        AGGREGATION_TEMPORALITY_CUMULATIVE),
                                ),
                            )
                        ],
                    )
                ],
            )
        ])

        # pylint: disable=protected-access
        actual = self.exporter._translate_data([counter_export_record])

        self.assertEqual(expected, actual)
    def test_translate_metrics(self, mock_time_ns):
        # pylint: disable=no-member

        mock_time_ns.configure_mock(**{"return_value": 1})

        self.counter_metric_record.aggregator.checkpoint = 1
        self.counter_metric_record.aggregator.initial_checkpoint_timestamp = 1
        self.counter_metric_record.aggregator.last_update_timestamp = 1

        expected = ExportMetricsServiceRequest(resource_metrics=[
            ResourceMetrics(
                resource=OTLPResource(attributes=[
                    KeyValue(key="a", value=AnyValue(int_value=1)),
                    KeyValue(key="b", value=AnyValue(bool_value=False)),
                ]),
                instrumentation_library_metrics=[
                    InstrumentationLibraryMetrics(metrics=[
                        OTLPMetric(
                            name="c",
                            description="d",
                            unit="e",
                            int_sum=IntSum(
                                data_points=[
                                    IntDataPoint(
                                        labels=[
                                            StringKeyValue(key="g", value="h")
                                        ],
                                        value=1,
                                        time_unix_nano=1,
                                        start_time_unix_nano=1,
                                    )
                                ],
                                aggregation_temporality=(
                                    AggregationTemporality.
                                    AGGREGATION_TEMPORALITY_DELTA),
                                is_monotonic=True,
                            ),
                        )
                    ])
                ],
            )
        ])

        # pylint: disable=protected-access
        actual = self.exporter._translate_data([self.counter_metric_record])

        self.assertEqual(expected, actual)
def _get_data_points(
    export_record: ExportRecord,
    data_point_class: Type[DataPointT],
    aggregation_temporality: int,
) -> List[DataPointT]:

    if isinstance(export_record.aggregator, SumAggregator):
        value = export_record.aggregator.checkpoint

    elif isinstance(export_record.aggregator, MinMaxSumCountAggregator):
        # FIXME: How are values to be interpreted from this aggregator?
        raise Exception("MinMaxSumCount aggregator data not supported")

    elif isinstance(export_record.aggregator, HistogramAggregator):
        # FIXME: How are values to be interpreted from this aggregator?
        raise Exception("Histogram aggregator data not supported")

    elif isinstance(export_record.aggregator, LastValueAggregator):
        value = export_record.aggregator.checkpoint

    elif isinstance(export_record.aggregator, ValueObserverAggregator):
        value = export_record.aggregator.checkpoint.last

    if aggregation_temporality == (
        AggregationTemporality.AGGREGATION_TEMPORALITY_CUMULATIVE
    ):
        start_time_unix_nano = export_record.aggregator.first_timestamp
    else:
        start_time_unix_nano = (
            export_record.aggregator.initial_checkpoint_timestamp
        )

    return [
        data_point_class(
            labels=[
                StringKeyValue(key=str(label_key), value=str(label_value))
                for label_key, label_value in export_record.labels
            ],
            value=value,
            start_time_unix_nano=start_time_unix_nano,
            time_unix_nano=(export_record.aggregator.last_update_timestamp),
        )
    ]
    def test_translate_metrics(self):
        # pylint: disable=no-member

        self.counter_metric_record.instrument.add(1, OrderedDict([("a", "b")]))

        expected = ExportMetricsServiceRequest(resource_metrics=[
            ResourceMetrics(
                resource=OTLPResource(attributes=[
                    KeyValue(key="a", value=AnyValue(int_value=1)),
                    KeyValue(key="b", value=AnyValue(bool_value=False)),
                ]),
                instrumentation_library_metrics=[
                    InstrumentationLibraryMetrics(metrics=[
                        OTLPMetric(
                            name="a",
                            description="b",
                            unit="c",
                            int_sum=IntSum(
                                data_points=[
                                    IntDataPoint(
                                        labels=[
                                            StringKeyValue(key="a", value="b")
                                        ],
                                        value=1,
                                    )
                                ],
                                aggregation_temporality=(
                                    AggregationTemporality.
                                    AGGREGATION_TEMPORALITY_DELTA),
                                is_monotonic=True,
                            ),
                        )
                    ])
                ],
            )
        ])

        # pylint: disable=protected-access
        actual = self.exporter._translate_data([self.counter_metric_record])

        self.assertEqual(expected, actual)
    def test_translate_metrics(self):
        # pylint: disable=no-member

        self.counter_metric_record.instrument.add(1, OrderedDict([("a", "b")]))

        expected = ExportMetricsServiceRequest(resource_metrics=[
            ResourceMetrics(
                resource=CollectorResource(attributes=[
                    KeyValue(key="a", value=AnyValue(int_value=1)),
                    KeyValue(key="b", value=AnyValue(bool_value=False)),
                ]),
                instrumentation_library_metrics=[
                    InstrumentationLibraryMetrics(metrics=[
                        CollectorMetric(
                            metric_descriptor=MetricDescriptor(
                                name="a",
                                description="b",
                                unit="c",
                                type=MetricDescriptor.Type.INT64,
                                temporality=(
                                    MetricDescriptor.Temporality.DELTA),
                            ),
                            int64_data_points=[
                                Int64DataPoint(
                                    labels=[
                                        StringKeyValue(key="a", value="b")
                                    ],
                                    value=1,
                                )
                            ],
                        )
                    ])
                ],
            )
        ])

        # pylint: disable=protected-access
        actual = self.exporter._translate_data([self.counter_metric_record])

        self.assertEqual(expected, actual)