def setup_open_census():
    stats_stats = stats.Stats()

    app.m_response_ms = measure_module.MeasureFloat("flask_response_time",
                                                    "The request duration",
                                                    "ms")

    app.key_method = tag_key_module.TagKey("method")
    # Create the status key
    app.key_status = tag_key_module.TagKey("status")
    # Create the error key
    app.key_error = tag_key_module.TagKey("error")

    app.view_manager = stats_stats.view_manager
    app.stats_recorder = stats_stats.stats_recorder
    response_time_view = view.View(
        "response_time", "The time it took to respond",
        [app.key_method, app.key_status, app.key_error], app.m_response_ms,
        aggregation.LastValueAggregation())

    app.exporter = stackdriver.new_stats_exporter(options=stackdriver.Options(
        project_id=os.getenv('PROJECT_ID')))

    app.view_manager.register_exporter(app.exporter)
    app.view_manager.register_view(response_time_view)
    def do_test_view_data_to_metric(self, aggregation_class,
                                    value_type, metric_descriptor_type):
        """Test that ViewDatas are converted correctly into Metrics.

        This test doesn't check that the various aggregation data `to_point`
        methods handle the point conversion correctly, just that converted
        Point is included in the Metric, and the metric has the expected
        structure, descriptor, and labels.
        """
        start_time = datetime.datetime(2019, 1, 25, 11, 12, 13)
        current_time = datetime.datetime(2019, 1, 25, 12, 13, 14)

        mock_measure = mock.Mock(spec=measure.MeasureFloat)
        mock_aggregation = mock.Mock(spec=aggregation_class)
        mock_aggregation.get_metric_type.return_value = metric_descriptor_type

        vv = view.View(
            name=mock.Mock(),
            description=mock.Mock(),
            columns=[tag_key.TagKey('k1'), tag_key.TagKey('k2')],
            measure=mock_measure,
            aggregation=mock_aggregation)

        vd = mock.Mock(spec=view_data.ViewData)
        vd.view = vv
        vd.start_time = start_time

        mock_point = mock.Mock(spec=point.Point)
        mock_point.value = mock.Mock(spec=value_type)

        mock_agg = mock.Mock(spec=aggregation_data.SumAggregationData)
        mock_agg.to_point.return_value = mock_point

        vd.tag_value_aggregation_data_map = {
            (tag_value.TagValue('v1'), tag_value.TagValue('v2')): mock_agg
        }

        metric = metric_utils.view_data_to_metric(vd, current_time)
        mock_agg.to_point.assert_called_once_with(current_time)

        self.assertEqual(metric.descriptor.name, vv.name)
        self.assertEqual(metric.descriptor.description, vv.description)
        self.assertEqual(metric.descriptor.unit, vv.measure.unit)
        self.assertEqual(metric.descriptor.type, metric_descriptor_type)
        self.assertListEqual(
            [lk.key for lk in metric.descriptor.label_keys],
            ['k1', 'k2'])

        self.assertEqual(len(metric.time_series), 1)
        [ts] = metric.time_series
        self.assertEqual(ts.start_timestamp, start_time)
        self.assertListEqual(
            [lv.value for lv in ts.label_values],
            ['v1', 'v2'])
        self.assertEqual(len(ts.points), 1)
        [pt] = ts.points
        self.assertEqual(pt, mock_point)
 def test_is_valid(self):
     test_key3 = 'e9nnb1ixRnvzBH1TUonCG5IsV3ba2PMKjAbSxdLFFpgxFKhZHfi92ajNH6EARaK9FGGShk2EeZ4XObwqIPBwi7j4ZSRR1ZWXtS15keA1h4c9CxeAdakcxxUN0YH6mLJ0BygwRbdbMSeOIPWLo7iyGCil4njKOxH6HF7k0aN4BQl03HQZoXe0t0gd5xKQW37ePNA4FRVZlbLbib3GCF7BeKeA0DKMtuRu27r2hDGEFAmvqh3JEnqOy4gDbhFubaLblr4R4GOHo'
     tag_key1 = tag_key_module.TagKey('')
     self.assertFalse(tag_key1.is_valid_name(tag_key1.name))
     tag_key2 = tag_key_module.TagKey('testKey')
     self.assertTrue(tag_key2.is_valid_name(tag_key2.name))
     tag_key3 = tag_key_module.TagKey(test_key3)
     self.assertFalse(tag_key3.is_valid_name(tag_key3.name))
     tag_key4 = tag_key_module.TagKey('Æ!01kr')
     self.assertFalse(tag_key3.is_valid_name(tag_key4.name))
    def test_create_timeseries_multiple_tags(self):
        """Check that exporter creates timeseries for multiple tag values.

        create_time_series_list should return a time series for each set of
        values in the tag value aggregation map.
        """
        agg = aggregation_module.CountAggregation(
            aggregation_type=aggregation_module.Type.COUNT)

        view = view_module.View(
            name="example.org/test_view",
            description="example.org/test_view",
            columns=[
                tag_key_module.TagKey('color'),
                tag_key_module.TagKey('shape')
            ],
            measure=mock.Mock(),
            aggregation=agg,
        )

        v_data = view_data_module.ViewData(
            view=view,
            start_time=TEST_TIME_STR,
            end_time=TEST_TIME_STR,
        )

        rs_count = aggregation_data_module.CountAggregationData(10)
        bc_count = aggregation_data_module.CountAggregationData(20)
        v_data._tag_value_aggregation_data_map = {
            ('red', 'square'): rs_count,
            ('blue', 'circle'): bc_count,
        }

        v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME)

        exporter = stackdriver.StackdriverStatsExporter()
        time_series_list = exporter.create_time_series_list(v_data)

        self.assertEqual(len(time_series_list), 2)
        self.assertEqual(len(time_series_list[0].points), 1)
        self.assertEqual(len(time_series_list[1].points), 1)

        ts_by_color = {
            ts.metric.labels.get('color'): ts
            for ts in time_series_list
        }
        rs_ts = ts_by_color['red']
        bc_ts = ts_by_color['blue']
        self.assertEqual(rs_ts.metric.labels.get('shape'), 'square')
        self.assertEqual(bc_ts.metric.labels.get('shape'), 'circle')
        self.assertEqual(rs_ts.points[0].value.int64_value, 10)
        self.assertEqual(bc_ts.points[0].value.int64_value, 20)
Beispiel #5
0
    def _record(self, metric_point: MetricPoint,
                measurement_map: MeasurementMap):
        """Record a single metric point to export.

        NOTE: When this method is called, the caller should acquire a lock.

        Args:
            metric_point(MetricPoint) metric point defined in common.proto
            measurement_map(MeasurementMap): Measurement map to record metrics.
        """
        metric_name = metric_point.metric_name
        tags = metric_point.tags

        metric = self._registry.get(metric_name)
        # Metrics should be always registered dynamically.
        assert metric

        tag_map = tag_map_module.TagMap()
        for key, value in tags.items():
            tag_key = tag_key_module.TagKey(key)
            tag_value = tag_value_module.TagValue(value)
            tag_map.insert(tag_key, tag_value)

        metric_value = metric_point.value
        measurement_map.measure_float_put(metric.measure, metric_value)
        # NOTE: When we record this metric, timestamp will be renewed.
        measurement_map.record(tag_map)
Beispiel #6
0
    def __init__(self, app=None, blacklist_paths=None, exporter=None):
        self.app = app
        self.blacklist_paths = blacklist_paths
        self.exporter = exporter

        self.app.m_response_ms = measure_module.MeasureFloat(
            "flask_response_time", "The request duration", "ms")

        self.app.key_method = tag_key_module.TagKey("method")
        # Create the status key
        self.app.key_status = tag_key_module.TagKey("status")
        # Create the error key
        self.app.key_error = tag_key_module.TagKey("error")

        if self.app is not None:
            self.init_app(app)
    def test_stats_record_async(self):
        # We are using sufix in order to prevent cached objects
        sufix = str(os.getpid())

        tag_key = "SampleKeyAsyncTest%s" % sufix
        measure_name = "SampleMeasureNameAsyncTest%s" % sufix
        measure_description = "SampleDescriptionAsyncTest%s" % sufix
        view_name = "SampleViewNameAsyncTest%s" % sufix
        view_description = "SampleViewDescriptionAsyncTest%s" % sufix

        FRONTEND_KEY_ASYNC = tag_key_module.TagKey(tag_key)
        VIDEO_SIZE_MEASURE_ASYNC = measure_module.MeasureInt(
            measure_name, measure_description, "By")
        VIDEO_SIZE_VIEW_NAME_ASYNC = view_name
        VIDEO_SIZE_DISTRIBUTION_ASYNC =\
            aggregation_module.DistributionAggregation(
                [0.0, 16.0 * MiB, 256.0 * MiB]
            )
        VIDEO_SIZE_VIEW_ASYNC = view_module.View(
            VIDEO_SIZE_VIEW_NAME_ASYNC, view_description, [FRONTEND_KEY_ASYNC],
            VIDEO_SIZE_MEASURE_ASYNC, VIDEO_SIZE_DISTRIBUTION_ASYNC)

        stats = stats_module.Stats()
        view_manager = stats.view_manager
        stats_recorder = stats.stats_recorder

        exporter = stackdriver.new_stats_exporter(
            stackdriver.Options(project_id=PROJECT))
        view_manager.register_exporter(exporter)

        # Register view.
        view_manager.register_view(VIDEO_SIZE_VIEW_ASYNC)

        # Sleep for [0, 10] milliseconds to fake work.
        time.sleep(random.randint(1, 10) / 1000.0)

        # Process video.
        # Record the processed video size.
        tag_value = tag_value_module.TagValue("1200")
        tag_map = tag_map_module.TagMap()
        tag_map.insert(FRONTEND_KEY_ASYNC, tag_value)
        measure_map = stats_recorder.new_measurement_map()
        measure_map.measure_int_put(VIDEO_SIZE_MEASURE_ASYNC, 25 * MiB)

        measure_map.record(tag_map)

        @retry(wait_fixed=RETRY_WAIT_PERIOD,
               stop_max_attempt_number=RETRY_MAX_ATTEMPT)
        def get_metric_descriptors(self, exporter, view_description):
            name = exporter.client.project_path(PROJECT)
            list_metrics_descriptors = exporter.client.list_metric_descriptors(
                name)
            element = next((element for element in list_metrics_descriptors
                            if element.description == view_description), None)
            self.assertIsNotNone(element)
            self.assertEqual(element.description, view_description)
            self.assertEqual(element.unit, "By")

        get_metric_descriptors(self, exporter, view_description)
Beispiel #8
0
    def test_create_timeseries_something(self):
        """Check that exporter creates timeseries for multiple tag values.

        create_time_series_list should return a time series for each set of
        values in the tag value aggregation map.
        """

        v_data = mock.Mock(spec=view_data_module.ViewData)
        v_data.view.name = "example.org/test_view"
        v_data.view.columns = [
            tag_key_module.TagKey('color'),
            tag_key_module.TagKey('shape')
        ]
        v_data.view.aggregation.aggregation_type = \
            aggregation_module.Type.COUNT
        v_data.start_time = TEST_TIME
        v_data.end_time = TEST_TIME

        rs_count = aggregation_data_module.CountAggregationData(10)
        bc_count = aggregation_data_module.CountAggregationData(20)
        v_data.tag_value_aggregation_data_map = {
            ('red', 'square'): rs_count,
            ('blue', 'circle'): bc_count,
        }

        exporter = stackdriver.StackdriverStatsExporter(
            options=mock.Mock(),
            client=mock.Mock(),
        )
        time_series_list = exporter.create_time_series_list(v_data, "", "")

        self.assertEqual(len(time_series_list), 2)
        self.assertEqual(len(time_series_list[0].points), 1)
        self.assertEqual(len(time_series_list[1].points), 1)

        ts_by_color = {
            ts.metric.labels.get('color'): ts
            for ts in time_series_list
        }
        rs_ts = ts_by_color['red']
        bc_ts = ts_by_color['blue']
        self.assertEqual(rs_ts.metric.labels.get('shape'), 'square')
        self.assertEqual(bc_ts.metric.labels.get('shape'), 'circle')
        self.assertEqual(rs_ts.points[0].value.int64_value, 10)
        self.assertEqual(bc_ts.points[0].value.int64_value, 20)
Beispiel #9
0
def test_gauge():
    tags = [tag_key_module.TagKey(str(i)) for i in range(10)]
    name = "name"
    description = "description"
    units = "units"
    gauge = Gauge(name, description, units, tags)
    assert gauge.__dict__()["name"] == name
    assert gauge.__dict__()["description"] == description
    assert gauge.__dict__()["units"] == units
    assert gauge.__dict__()["tags"] == tags
Beispiel #10
0
def test_missing_def(cleanup_agent):
    # Make sure when metrics with description and units are reported,
    # agent updates its registry to include them.
    POINTS_DEF = [4, 5, 6]
    tag = {"TAG_KEY": "TAG_VALUE"}
    metrics_points = [
        generate_metrics_point(
            str(i),
            float(i),
            i,
            tag,
        ) for i in POINTS_DEF
    ]

    # At first, metrics shouldn't have description and units.
    assert metrics_agent.record_metrics_points(metrics_points) is True
    for i, metric_entry in zip(POINTS_DEF, metrics_agent.registry.items()):
        metric_name, metric_entry = metric_entry
        assert metric_name == metric_entry.name
        assert metric_entry.name == str(i)
        assert metric_entry.description == ""
        assert metric_entry.units == ""
        assert metric_entry.tags == [tag_key_module.TagKey(key) for key in tag]

    # The points are coming again with description and units.
    # Make sure they are updated.
    metrics_points = [
        generate_metrics_point(str(i),
                               float(i),
                               i,
                               tag,
                               description=str(i),
                               units=str(i)) for i in POINTS_DEF
    ]
    assert metrics_agent.record_metrics_points(metrics_points) is False
    for i, metric_entry in zip(POINTS_DEF, metrics_agent.registry.items()):
        metric_name, metric_entry = metric_entry
        assert metric_name == metric_entry.name
        assert metric_entry.name == str(i)
        assert metric_entry.description == str(i)
        assert metric_entry.units == str(i)
        assert metric_entry.tags == [tag_key_module.TagKey(key) for key in tag]
Beispiel #11
0
    def test_set_and_get_tag_map(self):
        key = tag_key_module.TagKey('key')
        value = tag_value_module.TagValue('value')
        tag_map = tag_map_module.TagMap()
        tag_map.insert(key, value)

        execution_context.set_current_tag_map(tag_map)

        result = execution_context.get_current_tag_map()

        self.assertEqual(result, tag_map)
Beispiel #12
0
    def test_prometheus_stats(self):

        method_key = tag_key_module.TagKey("method")
        request_count_measure = measure_module.MeasureInt(
            "request_count", "number of requests", "1")
        request_count_view_name = "request_count_view"
        count_agg = aggregation_module.CountAggregation()
        request_count_view = view_module.View(
            request_count_view_name,
            "number of requests broken down by methods", [method_key],
            request_count_measure, count_agg)
        stats = stats_module.stats
        view_manager = stats.view_manager
        stats_recorder = stats.stats_recorder

        exporter = prometheus.new_stats_exporter(
            prometheus.Options(namespace="opencensus", port=9303))
        view_manager.register_exporter(exporter)

        view_manager.register_view(request_count_view)

        time.sleep(random.randint(1, 10) / 1000.0)

        method_value_1 = tag_value_module.TagValue("some method")
        tag_map_1 = tag_map_module.TagMap()
        tag_map_1.insert(method_key, method_value_1)
        measure_map_1 = stats_recorder.new_measurement_map()
        measure_map_1.measure_int_put(request_count_measure, 1)
        measure_map_1.record(tag_map_1)

        method_value_2 = tag_value_module.TagValue("some other method")
        tag_map_2 = tag_map_module.TagMap()
        tag_map_2.insert(method_key, method_value_2)
        measure_map_2 = stats_recorder.new_measurement_map()
        measure_map_2.measure_int_put(request_count_measure, 1)
        measure_map_2.record(tag_map_2)
        measure_map_2.record(tag_map_2)

        if sys.version_info > (3, 0):
            import urllib.request
            contents = urllib.request.urlopen(
                "http://localhost:9303/metrics").read()
        else:
            import urllib2
            contents = urllib2.urlopen("http://localhost:9303/metrics").read()

        self.assertIn(b'# TYPE opencensus_request_count_view_total counter',
                      contents)
        self.assertIn(
            b'opencensus_request_count_view_total'
            b'{method="some method"} 1.0', contents)
        self.assertIn(
            b'opencensus_request_count_view_total'
            b'{method="some other method"} 2.0', contents)
    def test_prometheus_stats(self):
        import random
        import time
        import sys

        from opencensus.stats import aggregation as aggregation_module
        from opencensus.stats.exporters import prometheus_exporter as prometheus
        from opencensus.stats import measure as measure_module
        from opencensus.stats import stats as stats_module
        from opencensus.stats import view as view_module
        from opencensus.tags import tag_key as tag_key_module
        from opencensus.tags import tag_map as tag_map_module
        from opencensus.tags import tag_value as tag_value_module

        MiB = 1 << 20
        FRONTEND_KEY = tag_key_module.TagKey("my.org/keys/frontend")
        VIDEO_SIZE_MEASURE = measure_module.MeasureInt(
            "my.org/measures/video_size", "size of processed videos", "By")
        VIDEO_SIZE_VIEW_NAME = "my.org/views/video_size"
        VIDEO_SIZE_DISTRIBUTION = aggregation_module.CountAggregation(
            256.0 * MiB)
        VIDEO_SIZE_VIEW = view_module.View(VIDEO_SIZE_VIEW_NAME,
                                           "processed video size over time",
                                           [FRONTEND_KEY],
                                           VIDEO_SIZE_MEASURE,
                                           VIDEO_SIZE_DISTRIBUTION)
        stats = stats_module.Stats()
        view_manager = stats.view_manager
        stats_recorder = stats.stats_recorder

        exporter = prometheus.new_stats_exporter(prometheus.Options(namespace="opencensus", port=9303))
        view_manager.register_exporter(exporter)

        view_manager.register_view(VIDEO_SIZE_VIEW)

        time.sleep(random.randint(1, 10) / 1000.0)

        tag_value = tag_value_module.TagValue(str(random.randint(1, 10000)))
        tag_map = tag_map_module.TagMap()
        tag_map.insert(FRONTEND_KEY, tag_value)
        measure_map = stats_recorder.new_measurement_map()
        measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB)
        measure_map.record(tag_map)

        if sys.version_info > (3, 0):
            import urllib.request
            contents = urllib.request.urlopen("http://localhost:9303/metrics").read()
        else:
            import urllib2
            contents = urllib2.urlopen("http://localhost:9303/metrics").read()

        self.assertIn(b'# TYPE opencensus_my.org/views/video_size counter', contents)
        self.assertIn(b'opencensus_my.org/views/video_size 268435456.0', contents)
    def test_stats_record_sync(self):
        # We are using sufix in order to prevent cached objects
        sufix = str(os.getgid())

        tag_key = "SampleKeySyncTest%s" % sufix
        measure_name = "SampleMeasureNameSyncTest%s" % sufix
        measure_description = "SampleDescriptionSyncTest%s" % sufix
        view_name = "SampleViewNameSyncTest%s" % sufix
        view_description = "SampleViewDescriptionSyncTest%s" % sufix

        FRONTEND_KEY = tag_key_module.TagKey(tag_key)
        VIDEO_SIZE_MEASURE = measure_module.MeasureInt(measure_name,
                                                       measure_description,
                                                       "By")
        VIDEO_SIZE_VIEW_NAME = view_name
        VIDEO_SIZE_DISTRIBUTION = aggregation_module.DistributionAggregation(
            [0.0, 16.0 * MiB, 256.0 * MiB])
        VIDEO_SIZE_VIEW = view_module.View(VIDEO_SIZE_VIEW_NAME,
                                           view_description, [FRONTEND_KEY],
                                           VIDEO_SIZE_MEASURE,
                                           VIDEO_SIZE_DISTRIBUTION)

        stats = stats_module.stats
        view_manager = stats.view_manager
        stats_recorder = stats.stats_recorder

        client = monitoring_v3.MetricServiceClient()
        exporter = stackdriver.StackdriverStatsExporter(
            options=stackdriver.Options(project_id=PROJECT), client=client)
        view_manager.register_exporter(exporter)

        # Register view.
        view_manager.register_view(VIDEO_SIZE_VIEW)

        # Sleep for [0, 10] milliseconds to fake work.
        time.sleep(random.randint(1, 10) / 1000.0)

        # Process video.
        # Record the processed video size.
        tag_value = tag_value_module.TagValue("1200")
        tag_map = tag_map_module.TagMap()
        tag_map.insert(FRONTEND_KEY, tag_value)
        measure_map = stats_recorder.new_measurement_map()
        measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB)

        measure_map.record(tag_map)
        exporter.export_metrics(stats_module.stats.get_metrics())

        # Sleep for [0, 10] milliseconds to fake wait.
        time.sleep(random.randint(1, 10) / 1000.0)

        self.check_sd_md(exporter, view_description)
 def create_measurement_view(self, measurement_name):
     "creates a measurement and a view"
     tg_key = tag_key_module.TagKey("TEST_ID")
     measurement = measure_module.MeasureInt(
         f"gw_m_{measurement_name}_response",
         "response time of the home page", "s")
     view_name = f"views_{measurement_name}_response"
     aggregation = aggregation_module.LastValueAggregation()
     view = view_module.View(view_name,
                             f"glasswall {measurement_name} response time",
                             [tg_key], measurement, aggregation)
     # Register view.
     self.view_manager.register_view(view)
     return measurement
Beispiel #16
0
    def test_stats_record_async(self):
        # We are using suffix in order to prevent cached objects
        suffix = str(os.getpid())

        tag_key = "SampleKeyAsyncTest%s" % suffix
        measure_name = "SampleMeasureNameAsyncTest%s" % suffix
        measure_description = "SampleDescriptionAsyncTest%s" % suffix
        view_name = "SampleViewNameAsyncTest%s" % suffix
        view_description = "SampleViewDescriptionAsyncTest%s" % suffix

        FRONTEND_KEY_ASYNC = tag_key_module.TagKey(tag_key)
        VIDEO_SIZE_MEASURE_ASYNC = measure_module.MeasureInt(
            measure_name, measure_description, "By")
        VIDEO_SIZE_VIEW_NAME_ASYNC = view_name
        VIDEO_SIZE_DISTRIBUTION_ASYNC =\
            aggregation_module.DistributionAggregation(
                [0.0, 16.0 * MiB, 256.0 * MiB]
            )
        VIDEO_SIZE_VIEW_ASYNC = view_module.View(
            VIDEO_SIZE_VIEW_NAME_ASYNC, view_description, [FRONTEND_KEY_ASYNC],
            VIDEO_SIZE_MEASURE_ASYNC, VIDEO_SIZE_DISTRIBUTION_ASYNC)

        stats = stats_module.stats
        view_manager = stats.view_manager
        stats_recorder = stats.stats_recorder

        exporter = stackdriver.new_stats_exporter(
            stackdriver.Options(project_id=PROJECT))
        view_manager.register_exporter(exporter)

        # Register view.
        view_manager.register_view(VIDEO_SIZE_VIEW_ASYNC)

        # Sleep for [0, 10] milliseconds to fake work.
        time.sleep(random.randint(1, 10) / 1000.0)

        # Process video.
        # Record the processed video size.
        tag_value = tag_value_module.TagValue("1200")
        tag_map = tag_map_module.TagMap()
        tag_map.insert(FRONTEND_KEY_ASYNC, tag_value)
        measure_map = stats_recorder.new_measurement_map()
        measure_map.measure_int_put(VIDEO_SIZE_MEASURE_ASYNC, 25 * MiB)

        measure_map.record(tag_map)
        # Give the exporter thread enough time to export exactly once
        time.sleep(transport.DEFAULT_INTERVAL * 1.5)

        self.check_sd_md(exporter, view_description)
Beispiel #17
0
    def _record_gauge(self, gauge: Gauge, value: float, tags: dict):
        view_data = self.view_manager.get_view(gauge.name)
        if not view_data:
            self.view_manager.register_view(gauge.view)
            # Reobtain the view.
        view = self.view_manager.get_view(gauge.name).view

        measurement_map = self.stats_recorder.new_measurement_map()
        tag_map = tag_map_module.TagMap()
        for key, tag_val in tags.items():
            tag_key = tag_key_module.TagKey(key)
            tag_value = tag_value_module.TagValue(tag_val)
            tag_map.insert(tag_key, tag_value)
        measurement_map.measure_float_put(view.measure, value)
        # NOTE: When we record this metric, timestamp will be renewed.
        measurement_map.record(tag_map)
Beispiel #18
0
def test_basic_e2e(cleanup_agent):
    # Test the basic end to end workflow. This includes.
    # - Metrics are reported.
    # - Metrics are dynamically registered to registry.
    # - Metrics are accessbiel from Prometheus.
    POINTS_DEF = [0, 1, 2]
    tag = {"TAG_KEY": "TAG_VALUE"}
    metrics_points = [
        generate_metrics_point(str(i),
                               float(i),
                               i,
                               tag,
                               description=str(i),
                               units=str(i)) for i in POINTS_DEF
    ]
    metrics_points_dict = {
        metric_point.metric_name: metric_point
        for metric_point in metrics_points
    }
    assert metrics_agent.record_metrics_points(metrics_points) is False
    # Make sure all metrics are registered.
    for i, metric_entry in zip(POINTS_DEF, metrics_agent.registry.items()):
        metric_name, metric_entry = metric_entry
        assert metric_name == metric_entry.name
        assert metric_entry.name == str(i)
        assert metric_entry.description == str(i)
        assert metric_entry.units == str(i)
        assert metric_entry.tags == [tag_key_module.TagKey(key) for key in tag]

    # Make sure all metrics are available through a port.
    response = requests.get("http://localhost:{}".format(
        metrics_agent.metrics_export_port))
    response.raise_for_status()
    for line in response.text.split("\n"):
        for family in text_string_to_metric_families(line):
            metric_name = family.name

            if metric_name not in metrics_points_dict:
                continue

            if line.startswith("# HELP"):
                # description
                assert (family.documentation ==
                        metrics_points_dict[metric_name].description)
            else:
                for sample in family.samples:
                    metrics_points_dict[metric_name].value == sample.value
Beispiel #19
0
    def _register_if_needed(self, metric_point: MetricPoint):
        """Register metrics if they are not registered.

        NOTE: When this method is called, the caller should acquire a lock.

        Unseen metrics:
            Register it with Gauge type metrics. Note that all metrics in
            the agent will be gauge because sampling is already done
            within cpp processes.
        Metrics that are missing description & units:
            In this case, we will notify cpp proceses that we need this
            information. Cpp processes will then report description and units
            of all metrics they have.

        Args:
            metric_point metric point defined in common.proto
        Return:
            True if given metrics are missing description and units.
            False otherwise.
        """
        metric_name = metric_point.metric_name
        metric_description = metric_point.description
        metric_units = metric_point.units
        if self._registry[metric_name] is None:
            tags = metric_point.tags
            metric_tags = []
            for tag_key in tags:
                metric_tags.append(tag_key_module.TagKey(tag_key))

            metric = Gauge(metric_name, metric_description, metric_units,
                           metric_tags)
            self._registry[metric_name] = metric
            self.view_manager.register_view(metric.view)

            # If there are missing description & unit information,
            # we should notify cpp processes that we need them.
            if not metric_description or not metric_units:
                self._missing_information = True

        if metric_description and metric_units:
            self._registry[metric_name].view._description = metric_description
            self._registry[
                metric_name].view.measure._description = metric_description
            self._registry[metric_name].view.measure._unit = metric_units
            self._missing_information = False
Beispiel #20
0
    def test_create_timeseries_invalid_aggregation(self):
        v_data = mock.Mock(spec=view_data_module.ViewData)
        v_data.view.name = "example.org/base_view"
        v_data.view.columns = [tag_key_module.TagKey('base_key')]
        v_data.view.aggregation.aggregation_type = \
            aggregation_module.Type.NONE
        v_data.start_time = TEST_TIME
        v_data.end_time = TEST_TIME

        base_data = aggregation_data_module.BaseAggregationData(10)
        v_data.tag_value_aggregation_data_map = {
            (None, ): base_data,
        }

        exporter = stackdriver.StackdriverStatsExporter(
            options=mock.Mock(),
            client=mock.Mock(),
        )
        self.assertRaises(TypeError, exporter.create_time_series_list, v_data,
                          "", "")
    def test_constructor(self):
        key = 'key1'
        tag_key = tag_key_module.TagKey(key)

        self.assertEqual(tag_key.name, key)
import mock

from opencensus.__version__ import __version__
from opencensus.stats import aggregation as aggregation_module
from opencensus.stats import measure as measure_module
from opencensus.stats import stats as stats_module
from opencensus.stats import view as view_module
from opencensus.stats import view_data as view_data_module
from opencensus.stats.exporters import stackdriver_exporter as stackdriver
from opencensus.tags import tag_key as tag_key_module
from opencensus.tags import tag_map as tag_map_module
from opencensus.tags import tag_value as tag_value_module

MiB = 1 << 20
FRONTEND_KEY = tag_key_module.TagKey("my.org/keys/frontend")
FRONTEND_KEY_FLOAT = tag_key_module.TagKey("my.org/keys/frontend-FLOAT")
FRONTEND_KEY_INT = tag_key_module.TagKey("my.org/keys/frontend-INT")
FRONTEND_KEY_STR = tag_key_module.TagKey("my.org/keys/frontend-INT")

VIDEO_SIZE_MEASURE = measure_module.MeasureInt(
    "my.org/measure/video_size_test2", "size of processed videos", "By")

VIDEO_SIZE_MEASURE_FLOAT = measure_module.MeasureFloat(
    "my.org/measure/video_size_test-float", "size of processed videos-float",
    "By")

VIDEO_SIZE_VIEW_NAME = "my.org/views/video_size_test2"
VIDEO_SIZE_DISTRIBUTION = aggregation_module.DistributionAggregation(
    [16.0 * MiB, 256.0 * MiB])
VIDEO_SIZE_VIEW = view_module.View(VIDEO_SIZE_VIEW_NAME,
Beispiel #23
0
 def __init__(self, name, description, unit, tags: List[str]):
     self._measure = measure_module.MeasureInt(name, description, unit)
     tags = [tag_key_module.TagKey(tag) for tag in tags]
     self._view = View(name, description, tags, self.measure,
                       aggregation.LastValueAggregation())
import random
import time

from opencensus.ext.prometheus import stats_exporter as prometheus
from opencensus.stats import aggregation as aggregation_module
from opencensus.stats import measure as measure_module
from opencensus.stats import stats as stats_module
from opencensus.stats import view as view_module
from opencensus.tags import tag_key as tag_key_module
from opencensus.tags import tag_map as tag_map_module
from opencensus.tags import tag_value as tag_value_module
from pprint import pprint

MiB = 1 << 20
FRONTEND_KEY = tag_key_module.TagKey("myorg_keys_frontend")
VIDEO_SIZE_MEASURE = measure_module.MeasureInt("myorg_measures_video_size",
                                               "size of processed videos",
                                               "By")
VIDEO_SIZE_VIEW_NAME = "myorg_views_video_size"
VIDEO_SIZE_DISTRIBUTION = aggregation_module.DistributionAggregation(
    [0.0, 16.0 * MiB, 256.0 * MiB])
VIDEO_SIZE_VIEW = view_module.View(VIDEO_SIZE_VIEW_NAME,
                                   "processed video size over time",
                                   [FRONTEND_KEY], VIDEO_SIZE_MEASURE,
                                   VIDEO_SIZE_DISTRIBUTION)


def main():
    stats = stats_module.Stats()
    view_manager = stats.view_manager
import unittest

from prometheus_client.core import Sample

from opencensus.ext.prometheus import stats_exporter as prometheus
from opencensus.stats import aggregation as aggregation_module
from opencensus.stats import measure as measure_module
from opencensus.stats import stats as stats_module
from opencensus.stats import view as view_module
from opencensus.stats import view_data as view_data_module
from opencensus.tags import tag_key as tag_key_module
from opencensus.tags import tag_map as tag_map_module
from opencensus.tags import tag_value as tag_value_module

MiB = 1 << 20
FRONTEND_KEY = tag_key_module.TagKey("myorg_keys_frontend")
FRONTEND_KEY_FLOAT = tag_key_module.TagKey("myorg_keys_frontend_FLOAT")
FRONTEND_KEY_INT = tag_key_module.TagKey("myorg_keys_frontend_INT")
FRONTEND_KEY_STR = tag_key_module.TagKey("myorg_keys_frontend_INT")

VIDEO_SIZE_MEASURE = measure_module.MeasureInt(
    "myorg_measure_video_size_test2", "size of processed videos", "By")

VIDEO_SIZE_MEASURE_FLOAT = measure_module.MeasureFloat(
    "myorg_measure_video_size_test_float", "size of processed videos float",
    "By")

VIDEO_SIZE_VIEW_NAME = "myorg_views_video_size_test2"
VIDEO_SIZE_DISTRIBUTION = aggregation_module.DistributionAggregation(
    [16.0 * MiB, 256.0 * MiB])
VIDEO_SIZE_VIEW = view_module.View(VIDEO_SIZE_VIEW_NAME,
    def __init__(self):
        """
        Define client and server tags 
        """
        # Client Tags
        # gRPC server status code received, e.g. OK, CANCELLED, DEADLINE_EXCEEDED
        self.grpc_client_status = tag_key.TagKey("grpc_client_status")

        # Full gRPC method name, including package, service and method,
        # e.g. google.bigtable.v2.Bigtable/CheckAndMutateRow
        self.grpc_client_method = tag_key.TagKey("grpc_client_method")

        # Server Tags
        # gRPC server status code returned, e.g. OK, CANCELLED, DEADLINE_EXCEEDED
        self.grpc_server_status = tag_key.TagKey("grpc_server_status")

        # Full gRPC method name, including package, service and method,
        # e.g. com.exampleapi.v4.BookshelfService/Checkout
        self.grpc_server_method = tag_key.TagKey("grpc_server_method")
        """
        Client Measures 
        """
        # Number of messages sent in the RPC (always 1 for non-streaming RPCs)
        self.grpc_client_sent_messages_per_rpc = measure.MeasureInt(
            name="grpc.io/client/sent_messages_per_rpc",
            description="Number of messages sent in the RPC",
            unit=self.count)

        # Total bytes sent across all request messages per RPC
        self.grpc_client_sent_bytes_per_rpc = measure.MeasureFloat(
            name="grpc.io/client/sent_bytes_per_rpc",
            description="Total bytes sent across all request messages per RPC",
            unit=self.byte)

        # Number of response messages received per RPC (always 1 for non-streaming RPCs)
        self.grpc_client_received_messages_per_rpc = measure.MeasureInt(
            name="grpc.io/client/received_messages_per_rpc",
            description="Number of response messages received per RPC",
            unit=self.count)

        # Total bytes received across all response messages per RPC
        self.grpc_client_received_bytes_per_rpc = measure.MeasureFloat(
            name="grpc.io/client/received_bytes_per_rpc",
            description=
            "Total bytes received across all response messages per RPC",
            unit=self.byte)

        # Time between first byte of request sent to last byte of response received, or terminal error
        self.grpc_client_roundtrip_latency = measure.MeasureFloat(
            name="grpc.io/client/roundtrip_latency",
            description="Time between first byte of request sent to"
            " last byte of response received or terminal error.",
            unit=self.millisecond)

        # Propagated from the server and should have the same value as "grpc.io/server/latency"
        self.grpc_client_server_latency = measure.MeasureFloat(
            name="grpc.io/client/server_latency",
            description="Server latency in msecs",
            unit=self.millisecond)

        # The total number of client RPCs ever opened, including those that have not completed
        self.grpc_client_started_rpcs = measure.MeasureInt(
            name="grpc.io/client/started_rpcs",
            description="Number of started client RPCs.",
            unit=self.count)

        # Total messages sent per method
        self.grpc_client_sent_messages_per_method = measure.MeasureInt(
            name="grpc.io/client/sent_messages_per_method",
            description="Total messages sent per method.",
            unit=self.count)

        # Total messages received per method
        self.grpc_client_received_messages_per_method = measure.MeasureInt(
            name="grpc.io/client/received_messages_per_method",
            description="Total messages received per method.",
            unit=self.count)

        # Total bytes sent per method, recorded real-time as bytes are sent
        self.grpc_client_sent_bytes_per_method = measure.MeasureFloat(
            name="grpc.io/client/sent_bytes_per_method",
            description=
            "Total bytes sent per method, recorded real-time as bytes are sent.",
            unit=self.byte)

        # Total bytes received per method, recorded real-time as bytes are received
        self.grpc_client_received_bytes_per_method = measure.MeasureFloat(
            name="grpc.io/client/received_bytes_per_method",
            description="Total bytes received per method,"
            " recorded real-time as bytes are received.",
            unit=self.byte)
        """
        Server Measures 
        """
        # Number of messages received in each RPC. Has value 1 for non-streaming RPCs
        self.grpc_server_received_messages_per_rpc = measure.MeasureInt(
            name="grpc.io/server/received_messages_per_rpc",
            description="Number of messages received in each RPC",
            unit=self.count)

        # Total bytes received across all messages per RPC
        self.grpc_server_received_bytes_per_rpc = measure.MeasureFloat(
            name="grpc.io/server/received_bytes_per_rpc",
            description="Total bytes received across all messages per RPC",
            unit=self.byte)

        # Number of messages sent in each RPC. Has value 1 for non-streaming RPCs
        self.grpc_server_sent_messages_per_rpc = measure.MeasureInt(
            name="grpc.io/server/sent_messages_per_rpc",
            description="Number of messages sent in each RPC",
            unit=self.count)

        # Total bytes sent in across all response messages per RPC
        self.grpc_server_sent_bytes_per_rpc = measure.MeasureFloat(
            name="grpc.io/server/sent_bytes_per_rpc",
            description="Total bytes sent across all response messages per RPC",
            unit=self.byte)

        # Time between first byte of request received to last byte of response sent, or terminal error
        self.grpc_server_server_latency = measure.MeasureFloat(
            name="grpc.io/server/server_latency",
            description="Time between first byte of request received"
            " to last byte of response sent or terminal error.",
            unit=self.millisecond)

        # The total number of server RPCs ever opened, including those that have not completed
        self.grpc_server_started_rpcs = measure.MeasureInt(
            name="grpc.io/server/started_rpcs",
            description="Number of started server RPCs.",
            unit=self.count)

        # Total messages sent per method
        self.grpc_server_sent_messages_per_method = measure.MeasureInt(
            name="grpc.io/server/sent_messages_per_method",
            description="Total messages sent per method.",
            unit=self.count)

        # Total messages received per method
        self.grpc_server_received_messages_per_method = measure.MeasureInt(
            name="grpc.io/server/received_messages_per_method",
            description="Total messages received per method.",
            unit=self.count)

        # Total bytes sent per method, recorded real-time as bytes are sent
        self.grpc_server_sent_bytes_per_method = measure.MeasureFloat(
            name="grpc.io/server/sent_bytes_per_method",
            description=
            "Total bytes sent per method, recorded real-time as bytes are sent.",
            unit=self.byte)

        # Total bytes received per method, recorded real-time as bytes are received
        self.grpc_server_received_bytes_per_method = measure.MeasureFloat(
            name="grpc.io/server/received_bytes_per_method",
            description=
            "Total bytes received per method, recorded real-time as bytes are received.",
            unit=self.byte)
Beispiel #27
0
from flask import Flask, g
from opencensus.stats.exporters import prometheus_exporter as prometheus
from opencensus.stats import measure as measure_module
from opencensus.stats import aggregation
from opencensus.stats import view
from opencensus.stats import stats
from opencensus.tags import tag_key as tag_key_module
from opencensus.tags import tag_map as tag_map_module
from opencensus.tags import tag_value as tag_value_module
import time
'''A simple flask app making use of the open census lib to measure metrics in prometheus'''

m_response_ms = measure_module.MeasureFloat("flask_response_time",
                                            "The request duration", "ms")

key_method = tag_key_module.TagKey("method")
# Create the status key
key_status = tag_key_module.TagKey("status")
# Create the error key
key_error = tag_key_module.TagKey("error")

app = Flask(__name__)


def setup_open_census():
    stats_stats = stats.Stats()
    app.view_manager = stats_stats.view_manager
    app.stats_recorder = stats_stats.stats_recorder
    response_time_view = view.View("response_time",
                                   "The time it took to respond",
                                   [key_method, key_status, key_error],
from opencensus.ext.azure.log_exporter import AzureLogHandler
# OpenCensus TraceCapture and Application Insights via Tracer
from opencensus.ext.azure.trace_exporter import AzureExporter
from opencensus.trace.samplers import AlwaysOnSampler
from opencensus.trace.tracer import Tracer
from opencensus.trace.span import SpanKind

#log_prefix = os.path.basename(__file__) + ":"
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)

# sample speedtest.net output json as a string
sample_string = "{\"download\": 93579659.45913646, \"upload\": 94187295.64264823, \"ping\": 40.125, \"server\": {\"url\": \"http://speedtest.red5g.com:8080/speedtest/upload.php\", \"lat\": \"38.9047\", \"lon\": \"-77.0164\", \"name\": \"Washington, DC\", \"country\": \"United States\", \"cc\": \"US\", \"sponsor\": \"red5g.com\", \"id\": \"30471\", \"host\": \"speedtest.red5g.com:8080\", \"d\": 23.71681279068988, \"latency\": 9.125}, \"timestamp\": \"2021-03-01T13:18:16.460145Z\", \"bytes_sent\": 117825536, \"bytes_received\": 117376482, \"share\": null, \"client\": {\"ip\": \"108.48.69.33\", \"lat\": \"39.0828\", \"lon\": \"-77.1674\", \"isp\": \"Verizon Fios\", \"isprating\": \"3.7\", \"rating\": \"0\", \"ispdlavg\": \"0\", \"ispulavg\": \"0\", \"loggedin\": \"0\", \"country\": \"US\"}}"

# keys in the key map must also be in the view dimensions/columns to be exposed as customDimensions
tag_key_isp = tag_key.TagKey("client_isp")
tag_key_server_host = tag_key.TagKey("server_host")


def load_insights_key():
    # Add support for a config.ini file
    config = configparser.ConfigParser()
    config.read('config.ini')
    config['azure']
    logger.debug("Instrumentation key: %s",
                 config['azure']['azure_instrumentation_key'])
    return config['azure']['azure_instrumentation_key']


# after this, everything sent to this view will end up in azure as a metric
def register_azure_exporter_with_view_manager(view_manager,
# See the License for the specific language governing permissions and
# limitations under the License.

import time
import random
from opencensus.stats import aggregation as aggregation_module
from opencensus.stats.exporters import stackdriver_exporter as stackdriver
from opencensus.stats import measure as measure_module
from opencensus.stats import stats as stats_module
from opencensus.stats import view as view_module
from opencensus.tags import tag_key as tag_key_module
from opencensus.tags import tag_map as tag_map_module
from opencensus.tags import tag_value as tag_value_module

MiB = 1 << 20
FRONTEND_KEY = tag_key_module.TagKey("my.org/keys/frontend")
VIDEO_SIZE_MEASURE = measure_module.MeasureInt(
    "my.org/measure/video_size_test2", "size of processed videos", "By")
VIDEO_SIZE_VIEW_NAME = "my.org/views/video_size_test2"
VIDEO_SIZE_DISTRIBUTION = aggregation_module.DistributionAggregation(
    [0.0, 16.0 * MiB, 256.0 * MiB])
VIDEO_SIZE_VIEW = view_module.View(VIDEO_SIZE_VIEW_NAME,
                                   "processed video size over time",
                                   [FRONTEND_KEY], VIDEO_SIZE_MEASURE,
                                   VIDEO_SIZE_DISTRIBUTION)

stats = stats_module.Stats()
view_manager = stats.view_manager
stats_recorder = stats.stats_recorder

exporter = stackdriver.new_stats_exporter(
# See the License for the specific language governing permissions and
# limitations under the License.

import unittest

import mock

from opencensus.stats import measure_to_view_map as measure_to_view_map_module
from opencensus.stats.aggregation import CountAggregation
from opencensus.stats.measure import BaseMeasure
from opencensus.stats.measure import MeasureInt
from opencensus.stats.view import View
from opencensus.stats.view_data import ViewData
from opencensus.tags import tag_key as tag_key_module

METHOD_KEY = tag_key_module.TagKey("method")
REQUEST_COUNT_MEASURE = MeasureInt("request_count", "number of requests", "1")
REQUEST_COUNT_VIEW_NAME = "request_count_view"
COUNT = CountAggregation()
REQUEST_COUNT_VIEW = View(REQUEST_COUNT_VIEW_NAME,
                          "number of requests broken down by methods",
                          [METHOD_KEY], REQUEST_COUNT_MEASURE, COUNT)


class TestMeasureToViewMap(unittest.TestCase):
    @staticmethod
    def _get_target_class():
        return measure_to_view_map_module.MeasureToViewMap

    def _make_one(self, *args, **kw):
        return self._get_target_class()(*args, **kw)