Exemplo n.º 1
0
    def test_translate_updowncounter_export_record(self, mock_time_ns):
        mock_time_ns.configure_mock(**{"return_value": 1})

        counter_export_record = ExportRecord(
            UpDownCounter("c", "d", "e", int, self.meter),
            [("g", "h")],
            SumAggregator(),
            self.resource,
        )

        counter_export_record.aggregator.checkpoint = 1
        counter_export_record.aggregator.initial_checkpoint_timestamp = 1
        counter_export_record.aggregator.last_update_timestamp = 1

        expected = ExportMetricsServiceRequest(resource_metrics=[
            ResourceMetrics(
                resource=OTLPResource(attributes=[
                    KeyValue(key="a", value=AnyValue(int_value=1)),
                    KeyValue(key="b", value=AnyValue(bool_value=False)),
                ]),
                instrumentation_library_metrics=[
                    InstrumentationLibraryMetrics(
                        instrumentation_library=InstrumentationLibrary(
                            name="name",
                            version="version",
                        ),
                        metrics=[
                            OTLPMetric(
                                name="c",
                                description="d",
                                unit="e",
                                int_sum=IntSum(
                                    data_points=[
                                        IntDataPoint(
                                            labels=[
                                                StringKeyValue(key="g",
                                                               value="h")
                                            ],
                                            value=1,
                                            time_unix_nano=1,
                                            start_time_unix_nano=1,
                                        )
                                    ],
                                    aggregation_temporality=(
                                        AggregationTemporality.
                                        AGGREGATION_TEMPORALITY_CUMULATIVE),
                                ),
                            )
                        ],
                    )
                ],
            )
        ])

        # pylint: disable=protected-access
        actual = self.exporter._translate_data([counter_export_record])

        self.assertEqual(expected, actual)
    def test_translate_metrics(self, mock_time_ns):
        # pylint: disable=no-member

        mock_time_ns.configure_mock(**{"return_value": 1})

        self.counter_metric_record.aggregator.checkpoint = 1
        self.counter_metric_record.aggregator.initial_checkpoint_timestamp = 1
        self.counter_metric_record.aggregator.last_update_timestamp = 1

        expected = ExportMetricsServiceRequest(resource_metrics=[
            ResourceMetrics(
                resource=OTLPResource(attributes=[
                    KeyValue(key="a", value=AnyValue(int_value=1)),
                    KeyValue(key="b", value=AnyValue(bool_value=False)),
                ]),
                instrumentation_library_metrics=[
                    InstrumentationLibraryMetrics(metrics=[
                        OTLPMetric(
                            name="c",
                            description="d",
                            unit="e",
                            int_sum=IntSum(
                                data_points=[
                                    IntDataPoint(
                                        labels=[
                                            StringKeyValue(key="g", value="h")
                                        ],
                                        value=1,
                                        time_unix_nano=1,
                                        start_time_unix_nano=1,
                                    )
                                ],
                                aggregation_temporality=(
                                    AggregationTemporality.
                                    AGGREGATION_TEMPORALITY_DELTA),
                                is_monotonic=True,
                            ),
                        )
                    ])
                ],
            )
        ])

        # pylint: disable=protected-access
        actual = self.exporter._translate_data([self.counter_metric_record])

        self.assertEqual(expected, actual)
    def test_translate_metrics(self):
        # pylint: disable=no-member

        self.counter_metric_record.instrument.add(1, OrderedDict([("a", "b")]))

        expected = ExportMetricsServiceRequest(resource_metrics=[
            ResourceMetrics(
                resource=OTLPResource(attributes=[
                    KeyValue(key="a", value=AnyValue(int_value=1)),
                    KeyValue(key="b", value=AnyValue(bool_value=False)),
                ]),
                instrumentation_library_metrics=[
                    InstrumentationLibraryMetrics(metrics=[
                        OTLPMetric(
                            name="a",
                            description="b",
                            unit="c",
                            int_sum=IntSum(
                                data_points=[
                                    IntDataPoint(
                                        labels=[
                                            StringKeyValue(key="a", value="b")
                                        ],
                                        value=1,
                                    )
                                ],
                                aggregation_temporality=(
                                    AggregationTemporality.
                                    AGGREGATION_TEMPORALITY_DELTA),
                                is_monotonic=True,
                            ),
                        )
                    ])
                ],
            )
        ])

        # pylint: disable=protected-access
        actual = self.exporter._translate_data([self.counter_metric_record])

        self.assertEqual(expected, actual)
Exemplo n.º 4
0
    def _translate_data(
            self, data: Sequence[MetricRecord]) -> ExportMetricsServiceRequest:
        # pylint: disable=too-many-locals,no-member
        # pylint: disable=attribute-defined-outside-init

        sdk_resource_instrumentation_library_metrics = {}

        # The criteria to decide how to translate data is based on this table
        # taken directly from OpenTelemetry Proto v0.5.0:

        # TODO: Update table after the decision on:
        # https://github.com/open-telemetry/opentelemetry-specification/issues/731.
        # By default, metrics recording using the OpenTelemetry API are exported as
        # (the table does not include MeasurementValueType to avoid extra rows):
        #
        #   Instrument         Type
        #   ----------------------------------------------
        #   Counter            Sum(aggregation_temporality=delta;is_monotonic=true)
        #   UpDownCounter      Sum(aggregation_temporality=delta;is_monotonic=false)
        #   ValueRecorder      TBD
        #   SumObserver        Sum(aggregation_temporality=cumulative;is_monotonic=true)
        #   UpDownSumObserver  Sum(aggregation_temporality=cumulative;is_monotonic=false)
        #   ValueObserver      Gauge()
        for sdk_metric in data:

            if sdk_metric.resource not in (
                    sdk_resource_instrumentation_library_metrics.keys()):
                sdk_resource_instrumentation_library_metrics[
                    sdk_metric.resource] = InstrumentationLibraryMetrics()

            type_class = {
                int: {
                    "sum": {
                        "class": IntSum,
                        "argument": "int_sum"
                    },
                    "gauge": {
                        "class": IntGauge,
                        "argument": "int_gauge"
                    },
                    "data_point_class": IntDataPoint,
                },
                float: {
                    "sum": {
                        "class": DoubleSum,
                        "argument": "double_sum"
                    },
                    "gauge": {
                        "class": DoubleGauge,
                        "argument": "double_gauge",
                    },
                    "data_point_class": DoubleDataPoint,
                },
            }

            value_type = sdk_metric.instrument.value_type

            sum_class = type_class[value_type]["sum"]["class"]
            gauge_class = type_class[value_type]["gauge"]["class"]
            data_point_class = type_class[value_type]["data_point_class"]

            if isinstance(sdk_metric.instrument, Counter):
                otlp_metric_data = sum_class(
                    data_points=_get_data_points(sdk_metric, data_point_class),
                    aggregation_temporality=(
                        AggregationTemporality.AGGREGATION_TEMPORALITY_DELTA),
                    is_monotonic=True,
                )
                argument = type_class[value_type]["sum"]["argument"]

            elif isinstance(sdk_metric.instrument, UpDownCounter):
                otlp_metric_data = sum_class(
                    data_points=_get_data_points(sdk_metric, data_point_class),
                    aggregation_temporality=(
                        AggregationTemporality.AGGREGATION_TEMPORALITY_DELTA),
                    is_monotonic=False,
                )
                argument = type_class[value_type]["sum"]["argument"]

            elif isinstance(sdk_metric.instrument, (ValueRecorder)):
                logger.warning("Skipping exporting of ValueRecorder metric")
                continue

            elif isinstance(sdk_metric.instrument, SumObserver):
                otlp_metric_data = sum_class(
                    data_points=_get_data_points(sdk_metric, data_point_class),
                    aggregation_temporality=(
                        AggregationTemporality.
                        AGGREGATION_TEMPORALITY_CUMULATIVE),
                    is_monotonic=True,
                )
                argument = type_class[value_type]["sum"]["argument"]

            elif isinstance(sdk_metric.instrument, UpDownSumObserver):
                otlp_metric_data = sum_class(
                    data_points=_get_data_points(sdk_metric, data_point_class),
                    aggregation_temporality=(
                        AggregationTemporality.
                        AGGREGATION_TEMPORALITY_CUMULATIVE),
                    is_monotonic=False,
                )
                argument = type_class[value_type]["sum"]["argument"]

            elif isinstance(sdk_metric.instrument, (ValueObserver)):
                otlp_metric_data = gauge_class(
                    data_points=_get_data_points(sdk_metric, data_point_class))
                argument = type_class[value_type]["gauge"]["argument"]

            sdk_resource_instrumentation_library_metrics[
                sdk_metric.resource].metrics.append(
                    OTLPMetric(
                        **{
                            "name": sdk_metric.instrument.name,
                            "description": sdk_metric.instrument.description,
                            "unit": sdk_metric.instrument.unit,
                            argument: otlp_metric_data,
                        }))

        return ExportMetricsServiceRequest(resource_metrics=_get_resource_data(
            sdk_resource_instrumentation_library_metrics,
            ResourceMetrics,
            "metrics",
        ))