def test_constructor_explicit(self):
        count = 4

        count_aggregation = aggregation_module.CountAggregation(count=count)

        self.assertEqual(4, count_aggregation.count.count_data)
        self.assertEqual("count", count_aggregation.aggregation_type)
Example #2
0
    def test_prometheus_stats(self):

        method_key = tag_key_module.TagKey("method")
        request_count_measure = measure_module.MeasureInt(
            "request_count", "number of requests", "1")
        request_count_view_name = "request_count_view"
        count_agg = aggregation_module.CountAggregation()
        request_count_view = view_module.View(
            request_count_view_name,
            "number of requests broken down by methods", [method_key],
            request_count_measure, count_agg)
        stats = stats_module.stats
        view_manager = stats.view_manager
        stats_recorder = stats.stats_recorder

        exporter = prometheus.new_stats_exporter(
            prometheus.Options(namespace="opencensus", port=9303))
        view_manager.register_exporter(exporter)

        view_manager.register_view(request_count_view)

        time.sleep(random.randint(1, 10) / 1000.0)

        method_value_1 = tag_value_module.TagValue("some method")
        tag_map_1 = tag_map_module.TagMap()
        tag_map_1.insert(method_key, method_value_1)
        measure_map_1 = stats_recorder.new_measurement_map()
        measure_map_1.measure_int_put(request_count_measure, 1)
        measure_map_1.record(tag_map_1)

        method_value_2 = tag_value_module.TagValue("some other method")
        tag_map_2 = tag_map_module.TagMap()
        tag_map_2.insert(method_key, method_value_2)
        measure_map_2 = stats_recorder.new_measurement_map()
        measure_map_2.measure_int_put(request_count_measure, 1)
        measure_map_2.record(tag_map_2)
        measure_map_2.record(tag_map_2)

        if sys.version_info > (3, 0):
            import urllib.request
            contents = urllib.request.urlopen(
                "http://localhost:9303/metrics").read()
        else:
            import urllib2
            contents = urllib2.urlopen("http://localhost:9303/metrics").read()

        self.assertIn(b'# TYPE opencensus_request_count_view_total counter',
                      contents)
        self.assertIn(
            b'opencensus_request_count_view_total'
            b'{method="some method"} 1.0', contents)
        self.assertIn(
            b'opencensus_request_count_view_total'
            b'{method="some other method"} 2.0', contents)
    def test_prometheus_stats(self):
        import random
        import time
        import sys

        from opencensus.stats import aggregation as aggregation_module
        from opencensus.stats.exporters import prometheus_exporter as prometheus
        from opencensus.stats import measure as measure_module
        from opencensus.stats import stats as stats_module
        from opencensus.stats import view as view_module
        from opencensus.tags import tag_key as tag_key_module
        from opencensus.tags import tag_map as tag_map_module
        from opencensus.tags import tag_value as tag_value_module

        MiB = 1 << 20
        FRONTEND_KEY = tag_key_module.TagKey("my.org/keys/frontend")
        VIDEO_SIZE_MEASURE = measure_module.MeasureInt(
            "my.org/measures/video_size", "size of processed videos", "By")
        VIDEO_SIZE_VIEW_NAME = "my.org/views/video_size"
        VIDEO_SIZE_DISTRIBUTION = aggregation_module.CountAggregation(
            256.0 * MiB)
        VIDEO_SIZE_VIEW = view_module.View(VIDEO_SIZE_VIEW_NAME,
                                           "processed video size over time",
                                           [FRONTEND_KEY],
                                           VIDEO_SIZE_MEASURE,
                                           VIDEO_SIZE_DISTRIBUTION)
        stats = stats_module.Stats()
        view_manager = stats.view_manager
        stats_recorder = stats.stats_recorder

        exporter = prometheus.new_stats_exporter(prometheus.Options(namespace="opencensus", port=9303))
        view_manager.register_exporter(exporter)

        view_manager.register_view(VIDEO_SIZE_VIEW)

        time.sleep(random.randint(1, 10) / 1000.0)

        tag_value = tag_value_module.TagValue(str(random.randint(1, 10000)))
        tag_map = tag_map_module.TagMap()
        tag_map.insert(FRONTEND_KEY, tag_value)
        measure_map = stats_recorder.new_measurement_map()
        measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB)
        measure_map.record(tag_map)

        if sys.version_info > (3, 0):
            import urllib.request
            contents = urllib.request.urlopen("http://localhost:9303/metrics").read()
        else:
            import urllib2
            contents = urllib2.urlopen("http://localhost:9303/metrics").read()

        self.assertIn(b'# TYPE opencensus_my.org/views/video_size counter', contents)
        self.assertIn(b'opencensus_my.org/views/video_size 268435456.0', contents)
    def test_create_timeseries_multiple_tags(self):
        """Check that exporter creates timeseries for multiple tag values.

        create_time_series_list should return a time series for each set of
        values in the tag value aggregation map.
        """
        agg = aggregation_module.CountAggregation(
            aggregation_type=aggregation_module.Type.COUNT)

        view = view_module.View(
            name="example.org/test_view",
            description="example.org/test_view",
            columns=[
                tag_key_module.TagKey('color'),
                tag_key_module.TagKey('shape')
            ],
            measure=mock.Mock(),
            aggregation=agg,
        )

        v_data = view_data_module.ViewData(
            view=view,
            start_time=TEST_TIME_STR,
            end_time=TEST_TIME_STR,
        )

        rs_count = aggregation_data_module.CountAggregationData(10)
        bc_count = aggregation_data_module.CountAggregationData(20)
        v_data._tag_value_aggregation_data_map = {
            ('red', 'square'): rs_count,
            ('blue', 'circle'): bc_count,
        }

        v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME)

        exporter = stackdriver.StackdriverStatsExporter()
        time_series_list = exporter.create_time_series_list(v_data)

        self.assertEqual(len(time_series_list), 2)
        self.assertEqual(len(time_series_list[0].points), 1)
        self.assertEqual(len(time_series_list[1].points), 1)

        ts_by_color = {
            ts.metric.labels.get('color'): ts
            for ts in time_series_list
        }
        rs_ts = ts_by_color['red']
        bc_ts = ts_by_color['blue']
        self.assertEqual(rs_ts.metric.labels.get('shape'), 'square')
        self.assertEqual(bc_ts.metric.labels.get('shape'), 'circle')
        self.assertEqual(rs_ts.points[0].value.int64_value, 10)
        self.assertEqual(bc_ts.points[0].value.int64_value, 20)
 def test_create_metric_descriptor_count(self):
     client = mock.Mock()
     option = stackdriver.Options(
         project_id="project-test", metric_prefix="teste")
     view_name_count = "view-count"
     agg_count = aggregation_module.CountAggregation(count=2)
     view_count = view_module.View(
         view_name_count, "processed video size over time", [FRONTEND_KEY],
         VIDEO_SIZE_MEASURE, agg_count)
     exporter = stackdriver.StackdriverStatsExporter(
         options=option, client=client)
     desc = exporter.create_metric_descriptor(view_count)
     self.assertIsNotNone(desc)
    def __init__(self, test_name, test_description=None):
        # oc will automatically search for the ENV VAR 'APPLICATIONINSIGHTS_CONNECTION_STRING'
        self.exporter = metrics_exporter.new_metrics_exporter()
        self.stats = stats_module.stats
        self.view_manager = self.stats.view_manager
        self.stats_recorder = self.stats.stats_recorder
        self.azure_logger = get_azure_logger(test_name)
        self.name = test_name
        self.desc = test_description

        events_measure_name = "The number of events handled by " + self.name
        events_measure_desc = "The number of events handled by " + self.desc if self.desc else None
        memory_measure_name = "memory usage percentage for " + self.name
        memory_measure_desc = "memory usage percentage for " + self.desc if self.desc else None
        cpu_measure_name = "cpu usage percentage for " + self.name
        cpu_measure_desc = "cpu usage percentage for " + self.desc if self.desc else None
        error_measure_name = "error count for " + self.name
        error_measure_desc = "The number of errors happened while running the test for " + self.desc if self.desc else None

        self.events_measure = measure_module.MeasureInt(
            events_measure_name, events_measure_desc, "events")
        self.memory_measure = measure_module.MeasureFloat(
            memory_measure_name, memory_measure_desc)
        self.cpu_measure = measure_module.MeasureFloat(cpu_measure_name,
                                                       cpu_measure_desc)
        self.error_measure = measure_module.MeasureInt(error_measure_name,
                                                       error_measure_desc)

        self.events_measure_view = view_module.View(
            events_measure_name, events_measure_desc, [], self.events_measure,
            aggregation_module.SumAggregation())

        self.memory_measure_view = view_module.View(
            memory_measure_name, memory_measure_desc, [], self.memory_measure,
            aggregation_module.LastValueAggregation())

        self.cpu_measure_view = view_module.View(
            cpu_measure_name, cpu_measure_desc, [], self.cpu_measure,
            aggregation_module.LastValueAggregation())

        self.error_measure_view = view_module.View(
            error_measure_name, error_measure_desc, [], self.error_measure,
            aggregation_module.CountAggregation())

        self.view_manager.register_view(self.events_measure_view)
        self.view_manager.register_view(self.memory_measure_view)
        self.view_manager.register_view(self.cpu_measure_view)
        self.view_manager.register_view(self.error_measure_view)

        self.mmap = self.stats_recorder.new_measurement_map()
Example #7
0
 def __init__(self, export_metrics=False):
     self.nb_check_requests = measure_module.MeasureInt(
         "nb_check_requests",
         "number of dates check requests for all weeks", "nb")
     self.nb_check_requests_view = view_module.View(
         "nb_check_requests view",
         "number of dates check requests for all weeks", [],
         self.nb_check_requests, aggregation_module.CountAggregation())
     view_manager.register_view(self.nb_check_requests_view)
     self.mmap = stats_recorder.new_measurement_map()
     self.tmap = tag_map_module.TagMap()
     if export_metrics:
         exporter = metrics_exporter.new_metrics_exporter(
             connection_string=azure_insights.connection_string)
         view_manager.register_exporter(exporter)
    def test_collector_to_metric_count(self):
        agg = aggregation_module.CountAggregation(256)
        view = view_module.View(VIDEO_SIZE_VIEW_NAME,
                                "processed video size over time",
                                [FRONTEND_KEY], VIDEO_SIZE_MEASURE, agg)
        registry = mock.Mock()
        view_data = mock.Mock()
        options = prometheus.Options("test1", 8001, "localhost", registry)
        collector = prometheus.Collector(options=options, view_data=view_data)
        collector.register_view(view)
        desc = collector.registered_views[list(REGISTERED_VIEW)[0]]
        metric = collector.to_metric(desc=desc, view=view)

        self.assertEqual(desc['name'], metric.name)
        self.assertEqual(desc['documentation'], metric.documentation)
        self.assertEqual('counter', metric.type)
        self.assertEqual(1, len(metric.samples))
Example #9
0
def enable_metrics_views():
    calls_view = view_module.View("pymemcache/calls", "The number of calls",
        [key_method, key_error, key_status],
        m_calls,
        aggregation_module.CountAggregation())

    latency_view = view_module.View("pymemcache/latency", "The distribution of the latencies",
        [key_method, key_error, key_status],
        m_latency_ms,
        aggregation_module.DistributionAggregation([
            # Latency in buckets:
            # [>=0ms, >=5ms, >=10ms, >=25ms, >=40ms, >=50ms, >=75ms, >=100ms, >=200ms, >=400ms, >=600ms, >=800ms, >=1s, >=2s, >=4s, >=6s, >=10s, >-20s]
            0, 5, 10, 25, 40, 50, 75, 100, 200, 400, 600, 800, 1000, 2000, 4000, 6000, 10000, 20000
        ]))

    view_manager = stats.Stats().view_manager
    view_manager.register_view(calls_view)
    view_manager.register_view(latency_view)
Example #10
0
    def test_create_timeseries_str_tagvalue_count_aggregtation(
            self, monitor_resource_mock):
        view_manager, stats_recorder, exporter = \
            self.setup_create_timeseries_test()

        agg_1 = aggregation_module.CountAggregation(count=2)
        view_name1 = "view-name1"
        new_view1 = view_module.View(view_name1,
                                     "processed video size over time",
                                     [FRONTEND_KEY_INT], VIDEO_SIZE_MEASURE_2,
                                     agg_1)

        view_manager.register_view(new_view1)

        tag_value_int = tag_value_module.TagValue("Abc")
        tag_map = tag_map_module.TagMap()
        tag_map.insert(FRONTEND_KEY_INT, tag_value_int)

        measure_map = stats_recorder.new_measurement_map()
        measure_map.measure_int_put(VIDEO_SIZE_MEASURE_2, 25 * MiB)
        measure_map.record(tag_map)

        v_data = measure_map.measure_to_view_map.get_view(view_name1, None)

        time_series_list = exporter.create_time_series_list(
            v_data, "global", "kubernetes.io/myorg/")
        self.assertEqual(len(time_series_list), 1)
        time_series = time_series_list[0]
        self.assertEqual(time_series.metric.type,
                         "kubernetes.io/myorg/view-name1")
        self.assertCorrectLabels(time_series.metric.labels,
                                 {FRONTEND_KEY_INT_CLEAN: "Abc"},
                                 include_opencensus=True)
        self.assertIsNotNone(time_series.resource)

        self.assertEqual(len(time_series.points), 1)
        expected_value = monitoring_v3.types.TypedValue()
        expected_value.int64_value = 3
        self.assertEqual(time_series.points[0].value, expected_value)
Example #11
0
    def __init__(self, dialog_id: str = None):
        super(BookingDialog, self).__init__(dialog_id
                                            or BookingDialog.__name__)

        self.add_dialog(TextPrompt(TextPrompt.__name__))
        self.add_dialog(ConfirmPrompt(ConfirmPrompt.__name__))
        self.add_dialog(DateResolverDialog(DateResolverDialog.__name__))
        self.add_dialog(
            WaterfallDialog(
                WaterfallDialog.__name__,
                [
                    self.destination_step,
                    self.origin_step,
                    self.from_date_step,
                    self.to_date_step,
                    self.budget_step,
                    self.confirm_step,
                    self.final_step,
                ],
            ))

        self.initial_dialog_id = WaterfallDialog.__name__
        self.logger = None
        self.stats = stats_module.stats
        self.view_manager = self.stats.view_manager
        self.stats_recorder = self.stats.stats_recorder
        self.bot_measure = measure_module.MeasureInt("botdefects",
                                                     "number of bot defects",
                                                     "botdefects")
        self.bot_view = view_module.View("defect view",
                                         "number of bot defects", [],
                                         self.bot_measure,
                                         aggregation_module.CountAggregation())
        self.view_manager.register_view(self.bot_view)
        self.mmap = self.stats_recorder.new_measurement_map()
        self.tmap = tag_map_module.TagMap()
        self.metrics_exporter = None
        self.message_history = set()
# The latency in milliseconds
MEASURE = measure_module.MeasureFloat("number", "A number!", "things")

GAUGE_VIEWS = {
    "last": view_module.View(
        "last",
        "A last value",
        ("tag",),
        MEASURE,
        aggregation_module.LastValueAggregation(),
    )
}
COUNT_VIEWS = {
    "count": view_module.View(
        "count", "A count", ("tag",), MEASURE, aggregation_module.CountAggregation()
    ),
    "sum": view_module.View(
        "sum", "A sum", ("tag",), MEASURE, aggregation_module.SumAggregation()
    ),
}
DISTRIBUTION_VIEWS = {
    "distribution": view_module.View(
        "distribution",
        "A distribution",
        ("tag",),
        MEASURE,
        aggregation_module.DistributionAggregation([50.0, 200.0]),
    )
}
VIEWS = {}
Example #13
0
from opencensus.ext.azure import metrics_exporter
from opencensus.stats import aggregation as aggregation_module
from opencensus.stats import measure as measure_module
from opencensus.stats import stats as stats_module
from opencensus.stats import view as view_module
from opencensus.tags import tag_map as tag_map_module

stats = stats_module.stats
view_manager = stats.view_manager
stats_recorder = stats.stats_recorder

CARROTS_MEASURE = measure_module.MeasureInt("carrots", "number of carrots",
                                            "carrots")
CARROTS_VIEW = view_module.View("carrots_view", "number of carrots", [],
                                CARROTS_MEASURE,
                                aggregation_module.CountAggregation())


def main():
    # Enable metrics
    # Set the interval in seconds in which you want to send metrics
    # TODO: you need to specify the instrumentation key in a connection string
    # and place it in the APPLICATIONINSIGHTS_CONNECTION_STRING
    # environment variable.
    exporter = metrics_exporter.new_metrics_exporter()
    view_manager.register_exporter(exporter)

    view_manager.register_view(CARROTS_VIEW)
    mmap = stats_recorder.new_measurement_map()
    tmap = tag_map_module.TagMap()
Example #14
0
    def test_constructor_defaults(self):
        count_aggregation = aggregation_module.CountAggregation()

        self.assertEqual(0, count_aggregation.count.count_data)
        self.assertEqual(aggregation_module.Type.COUNT,
                         count_aggregation.aggregation_type)
def register_views():
    all_tag_keys = [key_method, key_error, key_status]
    calls_view = view.View("redispy/calls", "The number of calls",
                           all_tag_keys, m_latency_ms,
                           aggregation.CountAggregation())

    latency_view = view.View(
        "redispy/latency",
        "The distribution of the latencies per method",
        all_tag_keys,
        m_latency_ms,
        aggregation.DistributionAggregation([
            # Latency in buckets:
            # [
            #    >=0ms, >=5ms, >=10ms, >=25ms, >=40ms, >=50ms, >=75ms, >=100ms, >=200ms, >=400ms,
            #    >=600ms, >=800ms, >=1s, >=2s, >=4s, >=6s, >=10s, >-20s, >=50s, >=100s
            # ]
            0,
            5,
            10,
            25,
            40,
            50,
            75,
            1e2,
            2e2,
            4e2,
            6e2,
            8e2,
            1e3,
            2e3,
            4e3,
            6e3,
            1e4,
            2e4,
            5e4,
            10e5
        ]))

    key_lengths_view = view.View(
        "redispy/key_lengths",
        "The distribution of the key lengths",
        all_tag_keys,
        m_key_length,
        aggregation.DistributionAggregation([
            # Key length buckets:
            # [
            #   0B, 5B, 10B, 20B, 50B, 100B, 200B, 500B, 1000B, 2000B, 5000B
            # ]
            0,
            5,
            10,
            20,
            50,
            100,
            200,
            500,
            1000,
            2000,
            5000
        ]))

    value_lengths_view = view.View(
        "redispy/value_lengths",
        "The distribution of the value lengths",
        all_tag_keys,
        m_value_length,
        aggregation.DistributionAggregation([
            # Value length buckets:
            # [
            #   0B, 5B, 10B, 20B, 50B, 100B, 200B, 500B, 1000B, 2000B, 5000B, 10000B, 20000B
            # ]
            0,
            5,
            10,
            20,
            50,
            100,
            200,
            500,
            1000,
            2000,
            5000,
            10000,
            20000
        ]))
    view_manager = stats.stats.view_manager
    for each_view in [
            calls_view, latency_view, key_lengths_view, value_lengths_view
    ]:
        view_manager.register_view(each_view)
Example #16
0
 def test_new_aggregation_data_explicit(self):
     count_aggregation = aggregation_module.CountAggregation(count=4)
     agg_data = count_aggregation.new_aggregation_data()
     self.assertEqual(4, agg_data.count_data)
Example #17
0
 def test_new_aggregation_data_defaults(self):
     count_aggregation = aggregation_module.CountAggregation()
     agg_data = count_aggregation.new_aggregation_data()
     self.assertEqual(0, agg_data.count_data)