def test_collector_add_view_data(self):
     registry = mock.Mock()
     start_time = datetime.utcnow()
     end_time = datetime.utcnow()
     view_data = view_data_module.ViewData(view=VIDEO_SIZE_VIEW,
                                           start_time=start_time,
                                           end_time=end_time)
     options = prometheus.Options("test1", 8001, "localhost", registry)
     collector = prometheus.Collector(options=options)
     collector.register_view(VIDEO_SIZE_VIEW)
     collector.add_view_data(view_data)
     view_name_to_data_map = {list(REGISTERED_VIEW)[0]: view_data}
     collector.collect()
     self.assertEqual(view_name_to_data_map,
                      collector.view_name_to_data_map)
 def test_export_with_data(self):
     client = mock.Mock()
     transport = mock.Mock()
     start_time = datetime.utcnow()
     end_time = datetime.utcnow()
     v_data = view_data_module.ViewData(view=VIDEO_SIZE_VIEW,
                                        start_time=start_time,
                                        end_time=end_time)
     view_data = [v_data]
     option = stackdriver.Options(project_id="project-test")
     exporter = stackdriver.StackdriverStatsExporter(options=option,
                                                     client=client,
                                                     transport=transport)
     exporter.export(view_data)
     self.assertTrue(exporter.transport.export.called)
Beispiel #3
0
    def test_record_with_attachment(self):
        boundaries = [1, 2, 3]
        distribution_aggregation = aggregation_module.DistributionAggregation(
            boundaries=boundaries)
        name = "testName"
        description = "testMeasure"
        unit = "testUnit"

        measure = measure_module.MeasureInt(name=name,
                                            description=description,
                                            unit=unit)

        description = "testMeasure"
        columns = ["key1", "key2"]

        view = view_module.View(name=name,
                                description=description,
                                columns=columns,
                                measure=measure,
                                aggregation=distribution_aggregation)

        start_time = datetime.utcnow()
        attachments = {"One": "one", "Two": "two"}
        end_time = datetime.utcnow()
        view_data = view_data_module.ViewData(view=view,
                                              start_time=start_time,
                                              end_time=end_time)
        context = mock.Mock
        context.map = {'key1': 'val1', 'key2': 'val2'}
        time = utils.to_iso_str()
        value = 1

        view_data.record(context=context,
                         value=value,
                         timestamp=time,
                         attachments=attachments)
        tag_values = view_data.get_tag_values(tags=context.map,
                                              columns=view.columns)
        tuple_vals = tuple(tag_values)

        self.assertEqual(['val1', 'val2'], tag_values)
        self.assertIsNotNone(view_data.tag_value_aggregation_data_map)
        self.assertTrue(tuple_vals in view_data.tag_value_aggregation_data_map)
        self.assertIsNotNone(
            view_data.tag_value_aggregation_data_map[tuple_vals])
        self.assertEqual(
            attachments, view_data.tag_value_aggregation_data_map[tuple_vals].
            exemplars[1].attachments)
Beispiel #4
0
    def test_record_with_multi_keys(self):
        measure = mock.Mock()
        sum_aggregation = aggregation_module.SumAggregation()
        view = view_module.View(
            "test_view", "description", ['key1', 'key2'], measure, sum_aggregation)
        start_time = datetime.utcnow()
        end_time = datetime.utcnow()
        view_data = view_data_module.ViewData(view=view,
                                              start_time=start_time,
                                              end_time=end_time)
        context = mock.Mock()
        context.map = {'key1': 'val1', 'key2': 'val2'}
        time = datetime.utcnow().isoformat() + 'Z'
        value = 1
        self.assertEqual({}, view_data.tag_value_aggregation_data_map)

        view_data.record(context=context, value=value, timestamp=time, attachments=None)
        tag_values = view_data.get_tag_values(
            tags=context.map, columns=view.columns)
        tuple_vals = tuple(tag_values)
        self.assertEqual(['val1', 'val2'], tag_values)
        self.assertIsNotNone(view_data.tag_value_aggregation_data_map)
        self.assertTrue(tuple_vals in view_data.tag_value_aggregation_data_map)
        self.assertIsNotNone(view_data.tag_value_aggregation_data_map[tuple_vals])
        sum_data = view_data.tag_value_aggregation_data_map.get(tuple_vals)
        self.assertEqual(1, sum_data.sum_data)

        context_2 = mock.Mock()
        context_2.map = {'key1': 'val3', 'key2': 'val2'}
        time_2 = datetime.utcnow().isoformat() + 'Z'
        value_2 = 2
        view_data.record(context=context_2, value=value_2, timestamp=time_2, attachments=None)
        tag_values_2 = view_data.get_tag_values(
            tags=context_2.map, columns=view.columns)
        tuple_vals_2 = tuple(tag_values_2)
        self.assertEqual(['val3', 'val2'], tag_values_2)
        self.assertTrue(tuple_vals_2 in view_data.tag_value_aggregation_data_map)
        sum_data_2 = view_data.tag_value_aggregation_data_map.get(tuple_vals_2)
        self.assertEqual(2, sum_data_2.sum_data)

        time_3 = datetime.utcnow().isoformat() + 'Z'
        value_3 = 3
        # Use the same context {'key1': 'val1', 'key2': 'val2'}.
        # Record to entry [(val1, val2), sum=1].
        view_data.record(context=context, value=value_3, timestamp=time_3, attachments=None)
        self.assertEqual(4, sum_data.sum_data)
        # The other entry should remain unchanged.
        self.assertEqual(2, sum_data_2.sum_data)
    def test_create_timeseries_from_distribution(self):
        """Check for explicit 0-bound bucket for SD export."""
        agg = aggregation_module.DistributionAggregation(
            aggregation_type=aggregation_module.Type.DISTRIBUTION)

        view = view_module.View(
            name="example.org/test_view",
            description="example.org/test_view",
            columns=['tag_key'],
            measure=mock.Mock(),
            aggregation=agg,
        )

        v_data = view_data_module.ViewData(
            view=view,
            start_time=TEST_TIME_STR,
            end_time=TEST_TIME_STR,
        )

        # Aggregation over (10 * range(10)) for buckets [2, 4, 6, 8]
        dad = aggregation_data_module.DistributionAggregationData(
            mean_data=4.5,
            count_data=100,
            sum_of_sqd_deviations=825,
            counts_per_bucket=[20, 20, 20, 20, 20],
            bounds=[2, 4, 6, 8],
            exemplars={mock.Mock()
                       for ii in range(5)})
        v_data._tag_value_aggregation_data_map = {('tag_value', ): dad}

        v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME)

        exporter = stackdriver.StackdriverStatsExporter()
        time_series_list = exporter.create_time_series_list(v_data)
        self.assertEqual(len(time_series_list), 1)
        [time_series] = time_series_list

        self.check_labels(time_series.metric.labels, {'tag_key': 'tag_value'},
                          include_opencensus=True)
        self.assertEqual(len(time_series.points), 1)
        [point] = time_series.points
        dv = point.value.distribution_value
        self.assertEqual(100, dv.count)
        self.assertEqual(825.0, dv.sum_of_squared_deviation)
        self.assertEqual([0, 20, 20, 20, 20, 20], dv.bucket_counts)
        self.assertEqual([0, 2, 4, 6, 8],
                         dv.bucket_options.explicit_buckets.bounds)
Beispiel #6
0
    def test_get_tag_values(self):
        view = mock.Mock()
        start_time = datetime.utcnow()
        end_time = datetime.utcnow()
        view_data = view_data_module.ViewData(view=view,
                                              start_time=start_time,
                                              end_time=end_time)

        tags = {'testTag1': 'testVal1'}
        columns = ['testTag1']
        tag_values = view_data.get_tag_values(tags, columns)
        self.assertEqual(['testVal1'], tag_values)

        tags = {'testTag1': 'testVal1'}
        columns = ['testTag2']
        tag_values = view_data.get_tag_values(tags, columns)
        self.assertEqual([None], tag_values)
    def test_export_double_point_value(self):
        view = view_module.View('', '', [FRONTEND_KEY], VIDEO_SIZE_MEASURE,
                                aggregation_module.SumAggregation())
        v_data = view_data_module.ViewData(view=view,
                                           start_time=TEST_TIME_STR,
                                           end_time=TEST_TIME_STR)
        v_data.record(context=tag_map_module.TagMap(),
                      value=2.5,
                      timestamp=None)
        view_data = [v_data]
        view_data = [metric_utils.view_data_to_metric(view_data[0], TEST_TIME)]

        handler = mock.Mock(spec=ocagent.ExportRpcHandler)
        ocagent.StatsExporter(handler).export_metrics(view_data)
        self.assertEqual(
            handler.send.call_args[0]
            [0].metrics[0].timeseries[0].points[0].double_value, 2.5)
    def test_export_view_data(self):
        v_data = view_data_module.ViewData(view=VIDEO_SIZE_VIEW,
                                           start_time=TEST_TIME_STR,
                                           end_time=TEST_TIME_STR)
        v_data.record(context=tag_map_module.TagMap(), value=2, timestamp=None)
        view_data = [v_data]
        view_data = [metric_utils.view_data_to_metric(view_data[0], TEST_TIME)]

        handler = mock.Mock(spec=ocagent.ExportRpcHandler)
        ocagent.StatsExporter(handler).export_metrics(view_data)

        self.assertEqual(
            handler.send.call_args[0][0].metrics[0].metric_descriptor,
            metrics_pb2.MetricDescriptor(
                name=VIDEO_SIZE_VIEW_NAME,
                description='processed video size over time',
                unit='By',
                type=metrics_pb2.MetricDescriptor.CUMULATIVE_DISTRIBUTION,
                label_keys=[metrics_pb2.LabelKey(key=FRONTEND_KEY)]))

        self.assertEqual(
            handler.send.call_args[0][0].metrics[0].timeseries[0],
            metrics_pb2.TimeSeries(
                start_timestamp=timestamp_pb2.Timestamp(seconds=1545699723,
                                                        nanos=4000),
                label_values=[metrics_pb2.LabelValue(has_value=False)],
                points=[
                    metrics_pb2.Point(
                        timestamp=timestamp_pb2.Timestamp(seconds=1545699723,
                                                          nanos=4000),
                        distribution_value=metrics_pb2.DistributionValue(
                            sum=2,
                            count=1,
                            bucket_options=metrics_pb2.DistributionValue.
                            BucketOptions(
                                explicit=metrics_pb2.DistributionValue.
                                BucketOptions.Explicit(
                                    bounds=[16.0 * MiB, 256.0 * MiB])),
                            buckets=[
                                metrics_pb2.DistributionValue.Bucket(count=1),
                                metrics_pb2.DistributionValue.Bucket(),
                                metrics_pb2.DistributionValue.Bucket(),
                            ]))
                ]))
Beispiel #9
0
    def test_record(self):
        view = mock.Mock()
        view.columns = ['key1']
        view.aggregation = mock.Mock()
        start_time = datetime.utcnow()
        end_time = datetime.utcnow()
        view_data = view_data_module.ViewData(view=view,
                                              start_time=start_time,
                                              end_time=end_time)

        context = mock.Mock()
        context.map = {'key1': 'val1', 'key2': 'val2'}
        time = datetime.utcnow().isoformat() + 'Z'
        value = 1
        self.assertEqual({}, view_data.tag_value_aggregation_data_map)

        view_data.record(context=context, value=value, timestamp=time)
        tag_values = view_data.get_tag_values(tags=context.map,
                                              columns=view.columns)
        tuple_vals = tuple(tag_values)
        self.assertEqual(['val1'], tag_values)
        self.assertIsNotNone(view_data.tag_value_aggregation_data_map)

        self.assertTrue(tuple_vals in view_data.tag_value_aggregation_data_map)
        self.assertIsNotNone(
            view_data.tag_value_aggregation_data_map[tuple_vals])
        self.assertIsNotNone(
            view_data.tag_value_aggregation_data_map.get(tuple_vals).add(
                value))

        view_data.record(context=context, value=value, timestamp=time)
        tag_values.append('val2')
        tuple_vals_2 = tuple(['val2'])
        self.assertFalse(
            tuple_vals_2 in view_data.tag_value_aggregation_data_map)
        view_data.tag_value_aggregation_data_map[
            tuple_vals_2] = view.aggregation
        self.assertEqual(
            view_data.tag_value_aggregation_data_map.get(tuple_vals_2),
            view_data.view.aggregation)
        self.assertIsNotNone(
            view_data.tag_value_aggregation_data_map.get(tuple_vals_2).add(
                value))
Beispiel #10
0
    def test_get_tag_map(self):
        view = mock.Mock()
        start_time = datetime.utcnow()
        end_time = datetime.utcnow()
        view_data = view_data_module.ViewData(view=view,
                                              start_time=start_time,
                                              end_time=end_time)
        test_context_1 = {'key1': 'val1'}
        context_map_1 = view_data.get_tag_map(context=test_context_1)
        self.assertEqual(test_context_1, view_data.tag_map)
        self.assertEqual(test_context_1, context_map_1)

        test_context_2 = {'key1': 'val2'}
        context_map_2 = view_data.get_tag_map(context=test_context_2)
        self.assertEqual(test_context_2, context_map_2)

        test_context_3 = {}
        context_map_3 = view_data.get_tag_map(context=test_context_3)
        self.assertEqual({'key1': 'val2'}, context_map_3)
Beispiel #11
0
 def test_record_with_none_context(self):
     measure = mock.Mock(spec=measure_module.MeasureInt)
     sum_aggregation = aggregation_module.SumAggregation()
     view = view_module.View("test_view", "description", ['key1', 'key2'],
                             measure, sum_aggregation)
     start_time = datetime.utcnow()
     end_time = datetime.utcnow()
     view_data = view_data_module.ViewData(view=view,
                                           start_time=start_time,
                                           end_time=end_time)
     time = utils.to_iso_str()
     value = 4
     view_data.record(context=None,
                      value=value,
                      timestamp=time,
                      attachments=None)
     tag_values = view_data.get_tag_values(tags={}, columns=view.columns)
     tuple_vals = tuple(tag_values)
     self.assertEqual([None, None], tag_values)
     self.assertTrue(tuple_vals in view_data.tag_value_aggregation_data_map)
     sum_data = view_data.tag_value_aggregation_data_map.get(tuple_vals)
     self.assertEqual(4, sum_data.sum_data)
Beispiel #12
0
 def test_record_with_missing_key_in_context(self):
     measure = mock.Mock()
     sum_aggregation = aggregation_module.SumAggregation()
     view = view_module.View(
         "test_view", "description", ['key1', 'key2'], measure, sum_aggregation)
     start_time = datetime.utcnow()
     end_time = datetime.utcnow()
     view_data = view_data_module.ViewData(view=view,
                                           start_time=start_time,
                                           end_time=end_time)
     context = mock.Mock()
     context.map = {'key1': 'val1', 'key3': 'val3'}  # key2 is not in the context.
     time = datetime.utcnow().isoformat() + 'Z'
     value = 4
     view_data.record(context=context, value=value, timestamp=time, attachments=None)
     tag_values = view_data.get_tag_values(
         tags=context.map, columns=view.columns)
     tuple_vals = tuple(tag_values)
     self.assertEqual(['val1', None], tag_values)
     self.assertTrue(tuple_vals in view_data.tag_value_aggregation_data_map)
     sum_data = view_data.tag_value_aggregation_data_map.get(tuple_vals)
     self.assertEqual(4, sum_data.sum_data)
Beispiel #13
0
    def test_create_batched_time_series(self, monitor_resource_mock):
        client = mock.Mock()
        v_data = view_data_module.ViewData(view=VIDEO_SIZE_VIEW,
                                           start_time=TEST_TIME,
                                           end_time=TEST_TIME)
        v_data.record(context=tag_map_module.TagMap(), value=2, timestamp=None)
        view_data = [v_data]

        option = stackdriver.Options(project_id="project-test")
        exporter = stackdriver.StackdriverStatsExporter(options=option,
                                                        client=client)

        time_series_batches = exporter.create_batched_time_series(view_data, 1)

        self.assertEqual(len(time_series_batches), 1)
        [time_series_batch] = time_series_batches
        self.assertEqual(len(time_series_batch), 1)
        [time_series] = time_series_batch
        self.assertEqual(
            time_series.metric.type,
            'custom.googleapis.com/opencensus/' + VIDEO_SIZE_VIEW_NAME)
        self.assertCorrectLabels(time_series.metric.labels, {},
                                 include_opencensus=True)