def _get_points_proto(points):
    protos = []
    for point in points:
        proto = metrics_pb2.Point(
            timestamp=utils.proto_ts_from_datetime(point.timestamp))

        if isinstance(point.value, value.ValueLong):
            proto.int64_value = int(point.value.value)
        elif isinstance(point.value, value.ValueDouble):
            proto.double_value = float(point.value.value)
        elif isinstance(point.value, value.ValueDistribution):
            proto.distribution_value.MergeFrom(
                metrics_pb2.DistributionValue(
                    sum=point.value.sum,
                    count=point.value.count,
                    sum_of_squared_deviation=point.value.
                    sum_of_squared_deviation,
                    bucket_options=_get_bucket_options_proto(
                        point.value.bucket_options)
                    if point.value.bucket_options else None,
                    buckets=_get_buckets_proto(point.value.buckets)))

        # TODO: handle SUMMARY metrics, #567
        else:  # pragma: NO COVER
            raise TypeError('Unsupported metric type: {}'.format(
                type(point.value)))
        protos.append(proto)
    return protos
    def test_export_view_data(self):
        v_data = view_data_module.ViewData(view=VIDEO_SIZE_VIEW,
                                           start_time=TEST_TIME_STR,
                                           end_time=TEST_TIME_STR)
        v_data.record(context=tag_map_module.TagMap(), value=2, timestamp=None)
        view_data = [v_data]
        view_data = [metric_utils.view_data_to_metric(view_data[0], TEST_TIME)]

        handler = mock.Mock(spec=ocagent.ExportRpcHandler)
        ocagent.StatsExporter(handler).export_metrics(view_data)

        self.assertEqual(
            handler.send.call_args[0][0].metrics[0].metric_descriptor,
            metrics_pb2.MetricDescriptor(
                name=VIDEO_SIZE_VIEW_NAME,
                description='processed video size over time',
                unit='By',
                type=metrics_pb2.MetricDescriptor.CUMULATIVE_DISTRIBUTION,
                label_keys=[metrics_pb2.LabelKey(key=FRONTEND_KEY)]))

        self.assertEqual(
            handler.send.call_args[0][0].metrics[0].timeseries[0],
            metrics_pb2.TimeSeries(
                start_timestamp=timestamp_pb2.Timestamp(seconds=1545699723,
                                                        nanos=4000),
                label_values=[metrics_pb2.LabelValue(has_value=False)],
                points=[
                    metrics_pb2.Point(
                        timestamp=timestamp_pb2.Timestamp(seconds=1545699723,
                                                          nanos=4000),
                        distribution_value=metrics_pb2.DistributionValue(
                            sum=2,
                            count=1,
                            bucket_options=metrics_pb2.DistributionValue.
                            BucketOptions(
                                explicit=metrics_pb2.DistributionValue.
                                BucketOptions.Explicit(
                                    bounds=[16.0 * MiB, 256.0 * MiB])),
                            buckets=[
                                metrics_pb2.DistributionValue.Bucket(count=1),
                                metrics_pb2.DistributionValue.Bucket(),
                                metrics_pb2.DistributionValue.Bucket(),
                            ]))
                ]))