Exemplo n.º 1
0
def translate_to_collector(
    metric_records: Sequence[MetricRecord], ) -> Sequence[metrics_pb2.Metric]:
    collector_metrics = []
    for metric_record in metric_records:

        label_values = []
        label_keys = []
        for label_tuple in metric_record.labels:
            label_keys.append(metrics_pb2.LabelKey(key=label_tuple[0]))
            label_values.append(
                metrics_pb2.LabelValue(has_value=label_tuple[1] is not None,
                                       value=label_tuple[1]))

        metric_descriptor = metrics_pb2.MetricDescriptor(
            name=metric_record.metric.name,
            description=metric_record.metric.description,
            unit=metric_record.metric.unit,
            type=get_collector_metric_type(metric_record.metric),
            label_keys=label_keys,
        )

        timeseries = metrics_pb2.TimeSeries(
            label_values=label_values,
            points=[get_collector_point(metric_record)],
        )
        collector_metrics.append(
            metrics_pb2.Metric(metric_descriptor=metric_descriptor,
                               timeseries=[timeseries]))
    return collector_metrics
Exemplo n.º 2
0
def _get_time_series_list_proto(series_list):
    return [
        metrics_pb2.TimeSeries(
            start_timestamp=utils.proto_ts_from_datetime_str(
                series.start_timestamp),
            label_values=_get_label_values_proto(series.label_values),
            points=_get_points_proto(series.points)) for series in series_list
    ]
Exemplo n.º 3
0
def translate_to_collector(
    export_records: Sequence[ExportRecord],
    exporter_start_timestamp: Timestamp,
) -> Sequence[metrics_pb2.Metric]:
    collector_metrics = []
    for export_record in export_records:

        label_values = []
        label_keys = []
        for label_tuple in export_record.labels:
            label_keys.append(metrics_pb2.LabelKey(key=label_tuple[0]))
            label_values.append(
                metrics_pb2.LabelValue(
                    has_value=label_tuple[1] is not None,
                    value=str(label_tuple[1]),
                )
            )

        metric_descriptor = metrics_pb2.MetricDescriptor(
            name=export_record.instrument.name,
            description=export_record.instrument.description,
            unit=export_record.instrument.unit,
            type=get_collector_metric_type(export_record.instrument),
            label_keys=label_keys,
        )

        # If cumulative and stateful, explicitly set the start_timestamp to
        # exporter start time.
        if export_record.instrument.meter.processor.stateful:
            start_timestamp = exporter_start_timestamp
        else:
            start_timestamp = None

        timeseries = metrics_pb2.TimeSeries(
            label_values=label_values,
            points=[get_collector_point(export_record)],
            start_timestamp=start_timestamp,
        )
        collector_metrics.append(
            metrics_pb2.Metric(
                metric_descriptor=metric_descriptor,
                timeseries=[timeseries],
                resource=get_resource(export_record),
            )
        )
    return collector_metrics
    def test_export_view_data(self):
        v_data = view_data_module.ViewData(view=VIDEO_SIZE_VIEW,
                                           start_time=TEST_TIME_STR,
                                           end_time=TEST_TIME_STR)
        v_data.record(context=tag_map_module.TagMap(), value=2, timestamp=None)
        view_data = [v_data]
        view_data = [metric_utils.view_data_to_metric(view_data[0], TEST_TIME)]

        handler = mock.Mock(spec=ocagent.ExportRpcHandler)
        ocagent.StatsExporter(handler).export_metrics(view_data)

        self.assertEqual(
            handler.send.call_args[0][0].metrics[0].metric_descriptor,
            metrics_pb2.MetricDescriptor(
                name=VIDEO_SIZE_VIEW_NAME,
                description='processed video size over time',
                unit='By',
                type=metrics_pb2.MetricDescriptor.CUMULATIVE_DISTRIBUTION,
                label_keys=[metrics_pb2.LabelKey(key=FRONTEND_KEY)]))

        self.assertEqual(
            handler.send.call_args[0][0].metrics[0].timeseries[0],
            metrics_pb2.TimeSeries(
                start_timestamp=timestamp_pb2.Timestamp(seconds=1545699723,
                                                        nanos=4000),
                label_values=[metrics_pb2.LabelValue(has_value=False)],
                points=[
                    metrics_pb2.Point(
                        timestamp=timestamp_pb2.Timestamp(seconds=1545699723,
                                                          nanos=4000),
                        distribution_value=metrics_pb2.DistributionValue(
                            sum=2,
                            count=1,
                            bucket_options=metrics_pb2.DistributionValue.
                            BucketOptions(
                                explicit=metrics_pb2.DistributionValue.
                                BucketOptions.Explicit(
                                    bounds=[16.0 * MiB, 256.0 * MiB])),
                            buckets=[
                                metrics_pb2.DistributionValue.Bucket(count=1),
                                metrics_pb2.DistributionValue.Bucket(),
                                metrics_pb2.DistributionValue.Bucket(),
                            ]))
                ]))