コード例 #1
0
    def __init__(self, test_name, test_description=None):
        # oc will automatically search for the ENV VAR 'APPLICATIONINSIGHTS_CONNECTION_STRING'
        self.exporter = metrics_exporter.new_metrics_exporter()
        self.stats = stats_module.stats
        self.view_manager = self.stats.view_manager
        self.stats_recorder = self.stats.stats_recorder
        self.azure_logger = get_azure_logger(test_name)
        self.name = test_name
        self.desc = test_description

        events_measure_name = "The number of events handled by " + self.name
        events_measure_desc = "The number of events handled by " + self.desc if self.desc else None
        memory_measure_name = "memory usage percentage for " + self.name
        memory_measure_desc = "memory usage percentage for " + self.desc if self.desc else None
        cpu_measure_name = "cpu usage percentage for " + self.name
        cpu_measure_desc = "cpu usage percentage for " + self.desc if self.desc else None
        error_measure_name = "error count for " + self.name
        error_measure_desc = "The number of errors happened while running the test for " + self.desc if self.desc else None

        self.events_measure = measure_module.MeasureInt(
            events_measure_name, events_measure_desc, "events")
        self.memory_measure = measure_module.MeasureFloat(
            memory_measure_name, memory_measure_desc)
        self.cpu_measure = measure_module.MeasureFloat(cpu_measure_name,
                                                       cpu_measure_desc)
        self.error_measure = measure_module.MeasureInt(error_measure_name,
                                                       error_measure_desc)

        self.events_measure_view = view_module.View(
            events_measure_name, events_measure_desc, [], self.events_measure,
            aggregation_module.SumAggregation())

        self.memory_measure_view = view_module.View(
            memory_measure_name, memory_measure_desc, [], self.memory_measure,
            aggregation_module.LastValueAggregation())

        self.cpu_measure_view = view_module.View(
            cpu_measure_name, cpu_measure_desc, [], self.cpu_measure,
            aggregation_module.LastValueAggregation())

        self.error_measure_view = view_module.View(
            error_measure_name, error_measure_desc, [], self.error_measure,
            aggregation_module.CountAggregation())

        self.view_manager.register_view(self.events_measure_view)
        self.view_manager.register_view(self.memory_measure_view)
        self.view_manager.register_view(self.cpu_measure_view)
        self.view_manager.register_view(self.error_measure_view)

        self.mmap = self.stats_recorder.new_measurement_map()
コード例 #2
0
    def track_metric(self, metric_name, metric_value):
        if self.appinsights_key:
            print("Tracking metric:" + metric_name + ", Value: " +
                  str(metric_value))

            if not metric_name in self.metrics:
                metrics_measure = measure_module.MeasureInt(
                    metric_name, metric_name, metric_name)
                metrics_view = view_module.View(
                    metric_name,
                    metric_name,
                    [],
                    metrics_measure,
                    aggregation_module.LastValueAggregation(
                        value=metric_value),
                )

                view_manager.register_view(metrics_view)
                mmap = stats_recorder.new_measurement_map()
                tmap = tag_map_module.TagMap()

                self.metrics[metric_name] = {
                    "measure": metrics_measure,
                    "measurement_map": mmap,
                    "tag_map": tmap,
                }

            measure = self.metrics[metric_name]["measure"]
            mmap = self.metrics[metric_name]["measurement_map"]
            tmap = self.metrics[metric_name]["tag_map"]
            print("Putting metric:" + metric_name + ", Value: " +
                  str(metric_value))
            mmap.measure_int_put(measure, metric_value)
            mmap.record(tmap)
コード例 #3
0
    def track_metric(self, metric_name, metric_value):
        try:
            if self.appinsights_key:
                if not metric_name in self.metrics:
                    metrics_measure = measure_module.MeasureInt(
                        metric_name, metric_name, metric_name)
                    metrics_view = view_module.View(
                        metric_name,
                        metric_name,
                        [],
                        metrics_measure,
                        aggregation_module.LastValueAggregation(
                            value=metric_value),
                    )

                    self.view_manager.register_view(metrics_view)
                    mmap = self.stats_recorder.new_measurement_map()
                    tmap = tag_map_module.TagMap()

                    self.metrics[metric_name] = {
                        "measure": metrics_measure,
                        "measurement_map": mmap,
                        "tag_map": tmap,
                    }

                measure = self.metrics[metric_name]["measure"]
                mmap = self.metrics[metric_name]["measurement_map"]
                tmap = self.metrics[metric_name]["tag_map"]
                mmap.measure_int_put(measure, metric_value)
                mmap.record(tmap)
        except Exception as e:
            print("Exception when tracking a metric:")
            print(e)
    def track_metric(self, metric_name, metric_value):
        if (self.appinsights_key):
            print("Tracking metric:" + metric_name + ", Value: " +
                  str(metric_value))

            if (not metric_name in self.metrics):
                metrics_measure = measure_module.MeasureInt(
                    metric_name, metric_name, metric_name)
                metrics_view = view_module.View(
                    metric_name, metric_name, [], metrics_measure,
                    aggregation_module.LastValueAggregation(
                        value=metric_value))

                view_manager.register_view(metrics_view)
                mmap = stats_recorder.new_measurement_map()
                tmap = tag_map_module.TagMap()

                self.metrics[metric_name] = {
                    'measure': metrics_measure,
                    'measurement_map': mmap,
                    'tag_map': tmap
                }

            measure = self.metrics[metric_name]['measure']
            mmap = self.metrics[metric_name]['measurement_map']
            tmap = self.metrics[metric_name]['tag_map']
            print("Putting metric:" + metric_name + ", Value: " +
                  str(metric_value))
            mmap.measure_int_put(measure, metric_value)
            mmap.record(tmap)
コード例 #5
0
    def test_constructor_defaults(self):
        name = "testName"
        description = "testMeasure"

        measure = measure_module.MeasureInt(name=name, description=description)

        self.assertEqual(None, measure.unit)
コード例 #6
0
    def track_metric(self, metric_name, metric_value):
        try:
            if (self.appinsights_key):
                if (not metric_name in self.metrics):
                    metrics_measure = measure_module.MeasureInt(
                        metric_name, metric_name, metric_name)
                    metrics_view = view_module.View(
                        metric_name, metric_name, [], metrics_measure,
                        aggregation_module.LastValueAggregation(
                            value=metric_value))

                    self.view_manager.register_view(metrics_view)
                    mmap = self.stats_recorder.new_measurement_map()
                    tmap = tag_map_module.TagMap()

                    self.metrics[metric_name] = {
                        'measure': metrics_measure,
                        'measurement_map': mmap,
                        'tag_map': tmap
                    }

                measure = self.metrics[metric_name]['measure']
                mmap = self.metrics[metric_name]['measurement_map']
                tmap = self.metrics[metric_name]['tag_map']
                mmap.measure_int_put(measure, metric_value)
                mmap.record(tmap)
        except Exception as e:
            print('Exception when tracking a metric:')
            print(e)
コード例 #7
0
    def test_stats_record_async(self):
        # We are using sufix in order to prevent cached objects
        sufix = str(os.getpid())

        tag_key = "SampleKeyAsyncTest%s" % sufix
        measure_name = "SampleMeasureNameAsyncTest%s" % sufix
        measure_description = "SampleDescriptionAsyncTest%s" % sufix
        view_name = "SampleViewNameAsyncTest%s" % sufix
        view_description = "SampleViewDescriptionAsyncTest%s" % sufix

        FRONTEND_KEY_ASYNC = tag_key_module.TagKey(tag_key)
        VIDEO_SIZE_MEASURE_ASYNC = measure_module.MeasureInt(
            measure_name, measure_description, "By")
        VIDEO_SIZE_VIEW_NAME_ASYNC = view_name
        VIDEO_SIZE_DISTRIBUTION_ASYNC =\
            aggregation_module.DistributionAggregation(
                [0.0, 16.0 * MiB, 256.0 * MiB]
            )
        VIDEO_SIZE_VIEW_ASYNC = view_module.View(
            VIDEO_SIZE_VIEW_NAME_ASYNC, view_description, [FRONTEND_KEY_ASYNC],
            VIDEO_SIZE_MEASURE_ASYNC, VIDEO_SIZE_DISTRIBUTION_ASYNC)

        stats = stats_module.Stats()
        view_manager = stats.view_manager
        stats_recorder = stats.stats_recorder

        exporter = stackdriver.new_stats_exporter(
            stackdriver.Options(project_id=PROJECT))
        view_manager.register_exporter(exporter)

        # Register view.
        view_manager.register_view(VIDEO_SIZE_VIEW_ASYNC)

        # Sleep for [0, 10] milliseconds to fake work.
        time.sleep(random.randint(1, 10) / 1000.0)

        # Process video.
        # Record the processed video size.
        tag_value = tag_value_module.TagValue("1200")
        tag_map = tag_map_module.TagMap()
        tag_map.insert(FRONTEND_KEY_ASYNC, tag_value)
        measure_map = stats_recorder.new_measurement_map()
        measure_map.measure_int_put(VIDEO_SIZE_MEASURE_ASYNC, 25 * MiB)

        measure_map.record(tag_map)

        @retry(wait_fixed=RETRY_WAIT_PERIOD,
               stop_max_attempt_number=RETRY_MAX_ATTEMPT)
        def get_metric_descriptors(self, exporter, view_description):
            name = exporter.client.project_path(PROJECT)
            list_metrics_descriptors = exporter.client.list_metric_descriptors(
                name)
            element = next((element for element in list_metrics_descriptors
                            if element.description == view_description), None)
            self.assertIsNotNone(element)
            self.assertEqual(element.description, view_description)
            self.assertEqual(element.unit, "By")

        get_metric_descriptors(self, exporter, view_description)
コード例 #8
0
    def test_constructor_explicit(self):
        name = "testName"
        description = "testMeasure"
        unit = "testUnit"

        measure = measure_module.MeasureInt(name=name, description=description, unit=unit)

        self.assertEqual("testName", measure.name)
        self.assertEqual("testMeasure", measure.description)
        self.assertEqual("testUnit", measure.unit)
コード例 #9
0
    def test_prometheus_stats(self):

        method_key = tag_key_module.TagKey("method")
        request_count_measure = measure_module.MeasureInt(
            "request_count", "number of requests", "1")
        request_count_view_name = "request_count_view"
        count_agg = aggregation_module.CountAggregation()
        request_count_view = view_module.View(
            request_count_view_name,
            "number of requests broken down by methods", [method_key],
            request_count_measure, count_agg)
        stats = stats_module.stats
        view_manager = stats.view_manager
        stats_recorder = stats.stats_recorder

        exporter = prometheus.new_stats_exporter(
            prometheus.Options(namespace="opencensus", port=9303))
        view_manager.register_exporter(exporter)

        view_manager.register_view(request_count_view)

        time.sleep(random.randint(1, 10) / 1000.0)

        method_value_1 = tag_value_module.TagValue("some method")
        tag_map_1 = tag_map_module.TagMap()
        tag_map_1.insert(method_key, method_value_1)
        measure_map_1 = stats_recorder.new_measurement_map()
        measure_map_1.measure_int_put(request_count_measure, 1)
        measure_map_1.record(tag_map_1)

        method_value_2 = tag_value_module.TagValue("some other method")
        tag_map_2 = tag_map_module.TagMap()
        tag_map_2.insert(method_key, method_value_2)
        measure_map_2 = stats_recorder.new_measurement_map()
        measure_map_2.measure_int_put(request_count_measure, 1)
        measure_map_2.record(tag_map_2)
        measure_map_2.record(tag_map_2)

        if sys.version_info > (3, 0):
            import urllib.request
            contents = urllib.request.urlopen(
                "http://localhost:9303/metrics").read()
        else:
            import urllib2
            contents = urllib2.urlopen("http://localhost:9303/metrics").read()

        self.assertIn(b'# TYPE opencensus_request_count_view_total counter',
                      contents)
        self.assertIn(
            b'opencensus_request_count_view_total'
            b'{method="some method"} 1.0', contents)
        self.assertIn(
            b'opencensus_request_count_view_total'
            b'{method="some other method"} 2.0', contents)
コード例 #10
0
    def test_prometheus_stats(self):
        import random
        import time
        import sys

        from opencensus.stats import aggregation as aggregation_module
        from opencensus.stats.exporters import prometheus_exporter as prometheus
        from opencensus.stats import measure as measure_module
        from opencensus.stats import stats as stats_module
        from opencensus.stats import view as view_module
        from opencensus.tags import tag_key as tag_key_module
        from opencensus.tags import tag_map as tag_map_module
        from opencensus.tags import tag_value as tag_value_module

        MiB = 1 << 20
        FRONTEND_KEY = tag_key_module.TagKey("my.org/keys/frontend")
        VIDEO_SIZE_MEASURE = measure_module.MeasureInt(
            "my.org/measures/video_size", "size of processed videos", "By")
        VIDEO_SIZE_VIEW_NAME = "my.org/views/video_size"
        VIDEO_SIZE_DISTRIBUTION = aggregation_module.CountAggregation(
            256.0 * MiB)
        VIDEO_SIZE_VIEW = view_module.View(VIDEO_SIZE_VIEW_NAME,
                                           "processed video size over time",
                                           [FRONTEND_KEY],
                                           VIDEO_SIZE_MEASURE,
                                           VIDEO_SIZE_DISTRIBUTION)
        stats = stats_module.Stats()
        view_manager = stats.view_manager
        stats_recorder = stats.stats_recorder

        exporter = prometheus.new_stats_exporter(prometheus.Options(namespace="opencensus", port=9303))
        view_manager.register_exporter(exporter)

        view_manager.register_view(VIDEO_SIZE_VIEW)

        time.sleep(random.randint(1, 10) / 1000.0)

        tag_value = tag_value_module.TagValue(str(random.randint(1, 10000)))
        tag_map = tag_map_module.TagMap()
        tag_map.insert(FRONTEND_KEY, tag_value)
        measure_map = stats_recorder.new_measurement_map()
        measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB)
        measure_map.record(tag_map)

        if sys.version_info > (3, 0):
            import urllib.request
            contents = urllib.request.urlopen("http://localhost:9303/metrics").read()
        else:
            import urllib2
            contents = urllib2.urlopen("http://localhost:9303/metrics").read()

        self.assertIn(b'# TYPE opencensus_my.org/views/video_size counter', contents)
        self.assertIn(b'opencensus_my.org/views/video_size 268435456.0', contents)
コード例 #11
0
    def test_stats_record_sync(self):
        # We are using sufix in order to prevent cached objects
        sufix = str(os.getgid())

        tag_key = "SampleKeySyncTest%s" % sufix
        measure_name = "SampleMeasureNameSyncTest%s" % sufix
        measure_description = "SampleDescriptionSyncTest%s" % sufix
        view_name = "SampleViewNameSyncTest%s" % sufix
        view_description = "SampleViewDescriptionSyncTest%s" % sufix

        FRONTEND_KEY = tag_key_module.TagKey(tag_key)
        VIDEO_SIZE_MEASURE = measure_module.MeasureInt(measure_name,
                                                       measure_description,
                                                       "By")
        VIDEO_SIZE_VIEW_NAME = view_name
        VIDEO_SIZE_DISTRIBUTION = aggregation_module.DistributionAggregation(
            [0.0, 16.0 * MiB, 256.0 * MiB])
        VIDEO_SIZE_VIEW = view_module.View(VIDEO_SIZE_VIEW_NAME,
                                           view_description, [FRONTEND_KEY],
                                           VIDEO_SIZE_MEASURE,
                                           VIDEO_SIZE_DISTRIBUTION)

        stats = stats_module.stats
        view_manager = stats.view_manager
        stats_recorder = stats.stats_recorder

        client = monitoring_v3.MetricServiceClient()
        exporter = stackdriver.StackdriverStatsExporter(
            options=stackdriver.Options(project_id=PROJECT), client=client)
        view_manager.register_exporter(exporter)

        # Register view.
        view_manager.register_view(VIDEO_SIZE_VIEW)

        # Sleep for [0, 10] milliseconds to fake work.
        time.sleep(random.randint(1, 10) / 1000.0)

        # Process video.
        # Record the processed video size.
        tag_value = tag_value_module.TagValue("1200")
        tag_map = tag_map_module.TagMap()
        tag_map.insert(FRONTEND_KEY, tag_value)
        measure_map = stats_recorder.new_measurement_map()
        measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB)

        measure_map.record(tag_map)
        exporter.export_metrics(stats_module.stats.get_metrics())

        # Sleep for [0, 10] milliseconds to fake wait.
        time.sleep(random.randint(1, 10) / 1000.0)

        self.check_sd_md(exporter, view_description)
コード例 #12
0
 def create_measurement_view(self, measurement_name):
     "creates a measurement and a view"
     tg_key = tag_key_module.TagKey("TEST_ID")
     measurement = measure_module.MeasureInt(
         f"gw_m_{measurement_name}_response",
         "response time of the home page", "s")
     view_name = f"views_{measurement_name}_response"
     aggregation = aggregation_module.LastValueAggregation()
     view = view_module.View(view_name,
                             f"glasswall {measurement_name} response time",
                             [tg_key], measurement, aggregation)
     # Register view.
     self.view_manager.register_view(view)
     return measurement
コード例 #13
0
    def test_record_with_attachment(self):
        boundaries = [1, 2, 3]
        distribution = {1: "test"}
        distribution_aggregation = aggregation_module.DistributionAggregation(
            boundaries=boundaries, distribution=distribution)
        name = "testName"
        description = "testMeasure"
        unit = "testUnit"

        measure = measure_module.MeasureInt(name=name,
                                            description=description,
                                            unit=unit)

        description = "testMeasure"
        columns = ["key1", "key2"]

        view = view_module.View(name=name,
                                description=description,
                                columns=columns,
                                measure=measure,
                                aggregation=distribution_aggregation)

        start_time = datetime.utcnow()
        attachments = {"One": "one", "Two": "two"}
        end_time = datetime.utcnow()
        view_data = view_data_module.ViewData(view=view,
                                              start_time=start_time,
                                              end_time=end_time)
        context = mock.Mock
        context.map = {'key1': 'val1', 'key2': 'val2'}
        time = datetime.utcnow().isoformat() + 'Z'
        value = 1

        view_data.record(context=context,
                         value=value,
                         timestamp=time,
                         attachments=attachments)
        tag_values = view_data.get_tag_values(tags=context.map,
                                              columns=view.columns)
        tuple_vals = tuple(tag_values)

        self.assertEqual(['val1', 'val2'], tag_values)
        self.assertIsNotNone(view_data.tag_value_aggregation_data_map)
        self.assertTrue(tuple_vals in view_data.tag_value_aggregation_data_map)
        self.assertIsNotNone(
            view_data.tag_value_aggregation_data_map[tuple_vals])
        self.assertEqual(
            attachments, view_data.tag_value_aggregation_data_map[tuple_vals].
            exemplars[1].attachments)
コード例 #14
0
    def test_stats_record_async(self):
        # We are using sufix in order to prevent cached objects
        sufix = str(os.getpid())

        tag_key = "SampleKeyAsyncTest%s" % sufix
        measure_name = "SampleMeasureNameAsyncTest%s" % sufix
        measure_description = "SampleDescriptionAsyncTest%s" % sufix
        view_name = "SampleViewNameAsyncTest%s" % sufix
        view_description = "SampleViewDescriptionAsyncTest%s" % sufix

        FRONTEND_KEY_ASYNC = tag_key_module.TagKey(tag_key)
        VIDEO_SIZE_MEASURE_ASYNC = measure_module.MeasureInt(
            measure_name, measure_description, "By")
        VIDEO_SIZE_VIEW_NAME_ASYNC = view_name
        VIDEO_SIZE_DISTRIBUTION_ASYNC =\
            aggregation_module.DistributionAggregation(
                [0.0, 16.0 * MiB, 256.0 * MiB]
            )
        VIDEO_SIZE_VIEW_ASYNC = view_module.View(
            VIDEO_SIZE_VIEW_NAME_ASYNC, view_description, [FRONTEND_KEY_ASYNC],
            VIDEO_SIZE_MEASURE_ASYNC, VIDEO_SIZE_DISTRIBUTION_ASYNC)

        stats = stats_module.stats
        view_manager = stats.view_manager
        stats_recorder = stats.stats_recorder

        exporter = stackdriver.new_stats_exporter(
            stackdriver.Options(project_id=PROJECT))
        view_manager.register_exporter(exporter)

        # Register view.
        view_manager.register_view(VIDEO_SIZE_VIEW_ASYNC)

        # Sleep for [0, 10] milliseconds to fake work.
        time.sleep(random.randint(1, 10) / 1000.0)

        # Process video.
        # Record the processed video size.
        tag_value = tag_value_module.TagValue("1200")
        tag_map = tag_map_module.TagMap()
        tag_map.insert(FRONTEND_KEY_ASYNC, tag_value)
        measure_map = stats_recorder.new_measurement_map()
        measure_map.measure_int_put(VIDEO_SIZE_MEASURE_ASYNC, 25 * MiB)

        measure_map.record(tag_map)
        # Give the exporter thread enough time to export exactly once
        time.sleep(transport.DEFAULT_INTERVAL * 1.5)

        self.check_sd_md(exporter, view_description)
コード例 #15
0
ファイル: metrics.py プロジェクト: 2ooom/bot-prefecture
 def __init__(self, export_metrics=False):
     self.nb_check_requests = measure_module.MeasureInt(
         "nb_check_requests",
         "number of dates check requests for all weeks", "nb")
     self.nb_check_requests_view = view_module.View(
         "nb_check_requests view",
         "number of dates check requests for all weeks", [],
         self.nb_check_requests, aggregation_module.CountAggregation())
     view_manager.register_view(self.nb_check_requests_view)
     self.mmap = stats_recorder.new_measurement_map()
     self.tmap = tag_map_module.TagMap()
     if export_metrics:
         exporter = metrics_exporter.new_metrics_exporter(
             connection_string=azure_insights.connection_string)
         view_manager.register_exporter(exporter)
コード例 #16
0
    def __init__(self, dialog_id: str = None):
        super(BookingDialog, self).__init__(dialog_id
                                            or BookingDialog.__name__)

        self.add_dialog(TextPrompt(TextPrompt.__name__))
        self.add_dialog(ConfirmPrompt(ConfirmPrompt.__name__))
        self.add_dialog(DateResolverDialog(DateResolverDialog.__name__))
        self.add_dialog(
            WaterfallDialog(
                WaterfallDialog.__name__,
                [
                    self.destination_step,
                    self.origin_step,
                    self.from_date_step,
                    self.to_date_step,
                    self.budget_step,
                    self.confirm_step,
                    self.final_step,
                ],
            ))

        self.initial_dialog_id = WaterfallDialog.__name__
        self.logger = None
        self.stats = stats_module.stats
        self.view_manager = self.stats.view_manager
        self.stats_recorder = self.stats.stats_recorder
        self.bot_measure = measure_module.MeasureInt("botdefects",
                                                     "number of bot defects",
                                                     "botdefects")
        self.bot_view = view_module.View("defect view",
                                         "number of bot defects", [],
                                         self.bot_measure,
                                         aggregation_module.CountAggregation())
        self.view_manager.register_view(self.bot_view)
        self.mmap = self.stats_recorder.new_measurement_map()
        self.tmap = tag_map_module.TagMap()
        self.metrics_exporter = None
        self.message_history = set()
コード例 #17
0
from flask import Blueprint, request

from recidiviz.big_query import view_update_manager
from recidiviz.utils import monitoring
from recidiviz.utils.auth import authenticate_request
from recidiviz.utils.environment import GCP_PROJECT_STAGING
from recidiviz.utils.metadata import local_project_id_override
from recidiviz.utils.params import get_bool_param_value
from recidiviz.validation.checks.check_resolver import checker_for_validation

from recidiviz.validation.configured_validations import get_all_validations, STATES_TO_VALIDATE
from recidiviz.validation.validation_models import DataValidationJob, DataValidationJobResult
from recidiviz.validation.views import view_config

m_failed_validations = measure.MeasureInt("validation/num_failures",
                                          "The number of failed validations",
                                          "1")

failed_validations_view = view.View(
    "recidiviz/validation/num_failures", "The sum of failed validations", [
        monitoring.TagKey.REGION, monitoring.TagKey.VALIDATION_CHECK_TYPE,
        monitoring.TagKey.VALIDATION_VIEW_ID
    ], m_failed_validations, aggregation.SumAggregation())

monitoring.register_views([failed_validations_view])

validation_manager_blueprint = Blueprint('validation_manager', __name__)


@validation_manager_blueprint.route('/validate')
@authenticate_request
コード例 #18
0
# limitations under the License.

import time

from opencensus.ext.azure import metrics_exporter
from opencensus.stats import aggregation as aggregation_module
from opencensus.stats import measure as measure_module
from opencensus.stats import stats as stats_module
from opencensus.stats import view as view_module
from opencensus.tags import tag_map as tag_map_module

stats = stats_module.stats
view_manager = stats.view_manager
stats_recorder = stats.stats_recorder

CARROTS_MEASURE = measure_module.MeasureInt("carrots", "number of carrots",
                                            "carrots")
CARROTS_VIEW = view_module.View("carrots_view", "number of carrots", [],
                                CARROTS_MEASURE,
                                aggregation_module.CountAggregation())


def main():
    # Enable metrics
    # Set the interval in seconds in which you want to send metrics
    # TODO: you need to specify the instrumentation key in a connection string
    # and place it in the APPLICATIONINSIGHTS_CONNECTION_STRING
    # environment variable.
    exporter = metrics_exporter.new_metrics_exporter()
    view_manager.register_exporter(exporter)

    view_manager.register_view(CARROTS_VIEW)
コード例 #19
0
    ExportBigQueryViewConfig,
    ExportViewCollectionConfig,
)
from recidiviz.metrics.export.optimized_metric_big_query_view_exporter import (
    OptimizedMetricBigQueryViewExporter, )
from recidiviz.metrics.export.optimized_metric_big_query_view_export_validator import (
    OptimizedMetricBigQueryViewExportValidator, )
from recidiviz.metrics.export.view_export_cloud_task_manager import (
    ViewExportCloudTaskManager, )
from recidiviz.utils import metadata, monitoring
from recidiviz.utils.auth.gae import requires_gae_auth
from recidiviz.utils.params import get_str_param_value

m_failed_metric_export_validation = measure.MeasureInt(
    "bigquery/metric_view_export_manager/metric_view_export_validation_failure",
    "Counted every time a set of exported metric views fails validation",
    "1",
)

failed_metric_export_validation_view = opencensus_view.View(
    "bigquery/metric_view_export_manager/num_metric_view_export_validation_failure",
    "The sum of times a set of exported metric views fails validation",
    [monitoring.TagKey.REGION, monitoring.TagKey.METRIC_VIEW_EXPORT_NAME],
    m_failed_metric_export_validation,
    aggregation.SumAggregation(),
)

m_failed_metric_export_job = measure.MeasureInt(
    "bigquery/metric_view_export_manager/metric_view_export_job_failure",
    "Counted every time a set of exported metric views fails for non-validation reasons",
    "1",
コード例 #20
0
ファイル: metrics_agent.py プロジェクト: sanderland/ray
 def __init__(self, name, description, unit,
              tags: List[tag_key_module.TagKey]):
     self._measure = measure_module.MeasureInt(name, description, unit)
     self._view = view.View(name, description, tags, self.measure,
                            aggregation.LastValueAggregation())
コード例 #21
0
ファイル: entity_enum.py プロジェクト: jazzPouls/pulse-data
# You should have received a copy of the GNU General Public License
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
# =============================================================================
"""Contains logic related to EntityEnums."""

import re
from typing import Dict, Optional, Type, TypeVar

from aenum import Enum, EnumMeta
from opencensus.stats import aggregation, measure, view

from recidiviz.common.str_field_utils import normalize
from recidiviz.utils import monitoring
from recidiviz.utils.types import ClsT

m_enum_errors = measure.MeasureInt("converter/enum_error_count",
                                   "The number of enum errors", "1")
enum_errors_view = view.View(
    "recidiviz/converter/enum_error_count",
    "The sum of enum errors",
    [monitoring.TagKey.REGION, monitoring.TagKey.ENTITY_TYPE],
    m_enum_errors,
    aggregation.SumAggregation(),
)
monitoring.register_views([enum_errors_view])


class EnumParsingError(Exception):
    """Raised if an MappableEnum can't be built from the provided string."""
    def __init__(self, cls: type, string_to_parse: str):
        msg = "Could not parse {0} when building {1}".format(
            string_to_parse, cls)
コード例 #22
0
"""Contains logic to match database entities with ingested entities."""

from abc import abstractmethod
from typing import List, Generic

from opencensus.stats import measure, view, aggregation

from recidiviz.persistence.database.session import Session
from recidiviz.persistence.entity.entities import EntityPersonType
from recidiviz.persistence.entity_matching.entity_matching_types import \
    MatchedEntities
from recidiviz.utils import monitoring

m_matching_errors = measure.MeasureInt(
    'persistence/entity_matching/error_count',
    'Number of EntityMatchingErrors thrown for a specific entity type', '1')

matching_errors_by_entity_view = view.View(
    'recidiviz/persistence/entity_matching/error_count',
    'Sum of the errors in the entit matching layer, by entity',
    [monitoring.TagKey.REGION, monitoring.TagKey.ENTITY_TYPE],
    m_matching_errors,
    aggregation.SumAggregation())

monitoring.register_views([matching_errors_by_entity_view])


class BaseEntityMatcher(Generic[EntityPersonType]):
    """Base class for all entity matchers."""
コード例 #23
0
from config import connection_string
from datetime import datetime
from opencensus.ext.azure import metrics_exporter
from opencensus.stats import aggregation as aggregation_module
from opencensus.stats import measure as measure_module
from opencensus.stats import stats as stats_module
from opencensus.stats import view as view_module
from opencensus.tags import tag_map as tag_map_module

stats = stats_module.stats
view_manager = stats.view_manager
stats_recorder = stats.stats_recorder

prompt_measure = measure_module.MeasureInt("prompts", "number of prompts",
                                           "prompts")
prompt_view = view_module.View("prompt view", "number of prompts", [],
                               prompt_measure,
                               aggregation_module.CountAggregation())
view_manager.register_view(prompt_view)
mmap = stats_recorder.new_measurement_map()
tmap = tag_map_module.TagMap()

exporter = metrics_exporter.new_metrics_exporter(
    connection_string=connection_string)

view_manager.register_exporter(exporter)


def prompt():
    input("Press enter.")
    mmap.measure_int_put(prompt_measure, 1)
コード例 #24
0
 def __init__(self, name, description, unit, tags: List[str]):
     self._measure = measure_module.MeasureInt(name, description, unit)
     tags = [tag_key_module.TagKey(tag) for tag in tags]
     self._view = View(name, description, tags, self.measure,
                       aggregation.LastValueAggregation())
コード例 #25
0
import time

from opencensus.ext.prometheus import stats_exporter as prometheus
from opencensus.stats import aggregation as aggregation_module
from opencensus.stats import measure as measure_module
from opencensus.stats import stats as stats_module
from opencensus.stats import view as view_module
from opencensus.tags import tag_key as tag_key_module
from opencensus.tags import tag_map as tag_map_module
from opencensus.tags import tag_value as tag_value_module
from pprint import pprint

MiB = 1 << 20
FRONTEND_KEY = tag_key_module.TagKey("myorg_keys_frontend")
VIDEO_SIZE_MEASURE = measure_module.MeasureInt("myorg_measures_video_size",
                                               "size of processed videos",
                                               "By")
VIDEO_SIZE_VIEW_NAME = "myorg_views_video_size"
VIDEO_SIZE_DISTRIBUTION = aggregation_module.DistributionAggregation(
    [0.0, 16.0 * MiB, 256.0 * MiB])
VIDEO_SIZE_VIEW = view_module.View(VIDEO_SIZE_VIEW_NAME,
                                   "processed video size over time",
                                   [FRONTEND_KEY], VIDEO_SIZE_MEASURE,
                                   VIDEO_SIZE_DISTRIBUTION)


def main():
    stats = stats_module.Stats()
    view_manager = stats.view_manager
    stats_recorder = stats.stats_recorder
コード例 #26
0
ファイル: metrics.py プロジェクト: tonitopark/rcs_log_howto
# 2. Metrics

#Initialization
from datetime import datetime
from opencensus.stats import aggregation as aggregation_module
from opencensus.stats import measure as measure_module
from opencensus.stats import stats as stats_module
from opencensus.stats import view as view_module
from opencensus.tags import tag_map as tag_map_module

stats = stats_module.stats
view_manager = stats.view_manager
stats_recorder = stats.stats_recorder

prompt_measure = measure_module.MeasureInt("rcs_log_num", "number of rcs_logs",
                                           "logs")
prompt_view = view_module.View("rcs_log_view_3", "number of logs", [],
                               prompt_measure,
                               aggregation_module.SumAggregation())
view_manager.register_view(prompt_view)
mmap = stats_recorder.new_measurement_map()
tmap = tag_map_module.TagMap()

# Register the metrics exporter

##customMetrics
## | where name =='prompt view'
from opencensus.ext.azure import metrics_exporter

exporter = metrics_exporter.new_metrics_exporter(
    connection_string='InstrumentationKey=<Your Key>')
コード例 #27
0
from recidiviz.persistence import persistence
from recidiviz.persistence import datastore_ingest_info
from recidiviz.persistence.database.schema_utils import SchemaType
from recidiviz.persistence.database.sqlalchemy_database_key import SQLAlchemyDatabaseKey
from recidiviz.persistence.datastore_ingest_info import BatchIngestInfoData
from recidiviz.persistence.ingest_info_validator import ingest_info_validator
from recidiviz.utils import monitoring, regions
from recidiviz.utils.auth.gae import requires_gae_auth

FAILED_TASK_THRESHOLD = 0.1

batch_blueprint = Blueprint("batch", __name__)

m_batch_count = measure.MeasureInt(
    "persistence/batch_persistence/batch_count",
    "The count of batch persistence calls",
    "1",
)

count_view = view.View(
    "recidiviz/persistence/batch_persistence/batch_count",
    "The sum of batch persistence calls that occurred",
    [monitoring.TagKey.REGION, monitoring.TagKey.STATUS, monitoring.TagKey.PERSISTED],
    m_batch_count,
    aggregation.SumAggregation(),
)
monitoring.register_views([count_view])


class BatchPersistError(Exception):
    """Raised when there was an error with batch persistence."""
コード例 #28
0
from recidiviz.persistence.database.session_factory import SessionFactory
from recidiviz.persistence.database.base_schema import JailsBase
from recidiviz.persistence.database.schema_utils import \
    schema_base_for_system_level
from recidiviz.persistence.entity.county import entities as county_entities
from recidiviz.persistence.ingest_info_validator import ingest_info_validator
from recidiviz.persistence.database.schema.county import dao as county_dao
from recidiviz.persistence.entity_matching import entity_matching
from recidiviz.persistence.entity_validator import entity_validator
from recidiviz.persistence.database import database
from recidiviz.persistence.ingest_info_converter import ingest_info_converter
from recidiviz.persistence.ingest_info_converter.base_converter import \
    IngestInfoConversionResult
from recidiviz.utils import environment, monitoring

m_people = measure.MeasureInt("persistence/num_people",
                              "The number of people persisted", "1")
m_aborts = measure.MeasureInt("persistence/num_aborts",
                              "The number of aborted writes", "1")
m_errors = measure.MeasureInt("persistence/num_errors", "The number of errors",
                              "1")
people_persisted_view = view.View(
    "recidiviz/persistence/num_people", "The sum of people persisted",
    [monitoring.TagKey.REGION, monitoring.TagKey.PERSISTED], m_people,
    aggregation.SumAggregation())
aborted_writes_view = view.View(
    "recidiviz/persistence/num_aborts",
    "The sum of aborted writes to persistence",
    [monitoring.TagKey.REGION, monitoring.TagKey.REASON], m_aborts,
    aggregation.SumAggregation())
errors_persisted_view = view.View(
    "recidiviz/persistence/num_errors",
コード例 #29
0
# limitations under the License.

import time
import random
from opencensus.stats import aggregation as aggregation_module
from opencensus.stats.exporters import stackdriver_exporter as stackdriver
from opencensus.stats import measure as measure_module
from opencensus.stats import stats as stats_module
from opencensus.stats import view as view_module
from opencensus.tags import tag_key as tag_key_module
from opencensus.tags import tag_map as tag_map_module
from opencensus.tags import tag_value as tag_value_module

MiB = 1 << 20
FRONTEND_KEY = tag_key_module.TagKey("my.org/keys/frontend")
VIDEO_SIZE_MEASURE = measure_module.MeasureInt(
    "my.org/measure/video_size_test2", "size of processed videos", "By")
VIDEO_SIZE_VIEW_NAME = "my.org/views/video_size_test2"
VIDEO_SIZE_DISTRIBUTION = aggregation_module.DistributionAggregation(
    [0.0, 16.0 * MiB, 256.0 * MiB])
VIDEO_SIZE_VIEW = view_module.View(VIDEO_SIZE_VIEW_NAME,
                                   "processed video size over time",
                                   [FRONTEND_KEY], VIDEO_SIZE_MEASURE,
                                   VIDEO_SIZE_DISTRIBUTION)

stats = stats_module.Stats()
view_manager = stats.view_manager
stats_recorder = stats.stats_recorder

exporter = stackdriver.new_stats_exporter(
    stackdriver.Options(project_id="opencenus-node"))
view_manager.register_exporter(exporter)
# =============================================================================
"""Controller for updating raw state data latest views in BQ."""

import logging

from opencensus.stats import measure, view as opencensus_view, aggregation
from recidiviz.utils import monitoring

from recidiviz.big_query.big_query_client import BigQueryClient
from recidiviz.ingest.direct.controllers.direct_ingest_raw_file_import_manager import DirectIngestRegionRawFileConfig, \
    DirectIngestRawFileConfig
from recidiviz.ingest.direct.controllers.direct_ingest_big_query_view_types import \
    DirectIngestRawDataTableLatestView

m_failed_latest_views_update = measure.MeasureInt(
    "ingest/direct/controllers/direct_ingest_raw_data_table_latest_view_updater/update_views_for_state_failure",
    "Counted every time updating views for state fails", "1")

failed_latest_view_updates_view = opencensus_view.View(
    "ingest/direct/controllers/direct_ingest_raw_data_table_latest_view_updater/num_update_views_for_state_failure",
    "The sum of times a view failed to update",
    [monitoring.TagKey.CREATE_UPDATE_RAW_DATA_LATEST_VIEWS_FILE_TAG],
    m_failed_latest_views_update, aggregation.SumAggregation())

monitoring.register_views([failed_latest_view_updates_view])


class DirectIngestRawDataTableLatestViewUpdater:
    """Controller for updating raw state data latest views in BQ."""
    def __init__(self,
                 state_code: str,