Ejemplo n.º 1
0
    def test_constructor_explicit(self):
        sum = 1

        sum_aggregation = aggregation_module.SumAggregation(sum=sum)

        self.assertEqual(1, sum_aggregation.sum.sum_data)
        self.assertEqual("sum", sum_aggregation.aggregation_type)
Ejemplo n.º 2
0
 def test_record_with_missing_key_in_context(self):
     measure = mock.Mock()
     sum_aggregation = aggregation_module.SumAggregation()
     view = view_module.View("test_view", "description", ['key1', 'key2'],
                             measure, sum_aggregation)
     start_time = datetime.utcnow()
     end_time = datetime.utcnow()
     view_data = view_data_module.ViewData(view=view,
                                           start_time=start_time,
                                           end_time=end_time)
     context = mock.Mock()
     context.map = {
         'key1': 'val1',
         'key3': 'val3'
     }  # key2 is not in the context.
     time = datetime.utcnow().isoformat() + 'Z'
     value = 4
     view_data.record(context=context,
                      value=value,
                      timestamp=time,
                      attachments=None)
     tag_values = view_data.get_tag_values(tags=context.map,
                                           columns=view.columns)
     tuple_vals = tuple(tag_values)
     self.assertEqual(['val1', None], tag_values)
     self.assertTrue(tuple_vals in view_data.tag_value_aggregation_data_map)
     sum_data = view_data.tag_value_aggregation_data_map.get(tuple_vals)
     self.assertEqual(4, sum_data.sum_data)
Ejemplo n.º 3
0
    def test_record_with_multi_keys(self):
        measure = mock.Mock()
        sum_aggregation = aggregation_module.SumAggregation()
        view = view_module.View("test_view", "description", ['key1', 'key2'],
                                measure, sum_aggregation)
        start_time = datetime.utcnow()
        end_time = datetime.utcnow()
        view_data = view_data_module.ViewData(view=view,
                                              start_time=start_time,
                                              end_time=end_time)
        context = mock.Mock()
        context.map = {'key1': 'val1', 'key2': 'val2'}
        time = datetime.utcnow().isoformat() + 'Z'
        value = 1
        self.assertEqual({}, view_data.tag_value_aggregation_data_map)

        view_data.record(context=context,
                         value=value,
                         timestamp=time,
                         attachments=None)
        tag_values = view_data.get_tag_values(tags=context.map,
                                              columns=view.columns)
        tuple_vals = tuple(tag_values)
        self.assertEqual(['val1', 'val2'], tag_values)
        self.assertIsNotNone(view_data.tag_value_aggregation_data_map)
        self.assertTrue(tuple_vals in view_data.tag_value_aggregation_data_map)
        self.assertIsNotNone(
            view_data.tag_value_aggregation_data_map[tuple_vals])
        sum_data = view_data.tag_value_aggregation_data_map.get(tuple_vals)
        self.assertEqual(1, sum_data.sum_data)

        context_2 = mock.Mock()
        context_2.map = {'key1': 'val3', 'key2': 'val2'}
        time_2 = datetime.utcnow().isoformat() + 'Z'
        value_2 = 2
        view_data.record(context=context_2,
                         value=value_2,
                         timestamp=time_2,
                         attachments=None)
        tag_values_2 = view_data.get_tag_values(tags=context_2.map,
                                                columns=view.columns)
        tuple_vals_2 = tuple(tag_values_2)
        self.assertEqual(['val3', 'val2'], tag_values_2)
        self.assertTrue(
            tuple_vals_2 in view_data.tag_value_aggregation_data_map)
        sum_data_2 = view_data.tag_value_aggregation_data_map.get(tuple_vals_2)
        self.assertEqual(2, sum_data_2.sum_data)

        time_3 = datetime.utcnow().isoformat() + 'Z'
        value_3 = 3
        # Use the same context {'key1': 'val1', 'key2': 'val2'}.
        # Record to entry [(val1, val2), sum=1].
        view_data.record(context=context,
                         value=value_3,
                         timestamp=time_3,
                         attachments=None)
        self.assertEqual(4, sum_data.sum_data)
        # The other entry should remain unchanged.
        self.assertEqual(2, sum_data_2.sum_data)
    def test_create_timeseries_float_tagvalue(self, monitor_resource_mock):
        client = mock.Mock()

        option = stackdriver.Options(project_id="project-test",
                                     resource="global")
        exporter = stackdriver.StackdriverStatsExporter(options=option,
                                                        client=client)

        stats = stats_module.stats
        view_manager = stats.view_manager
        stats_recorder = stats.stats_recorder

        if len(view_manager.measure_to_view_map.exporters) > 0:
            view_manager.unregister_exporter(
                view_manager.measure_to_view_map.exporters[0])

        view_manager.register_exporter(exporter)

        agg_3 = aggregation_module.SumAggregation(sum=2.2)
        view_name3 = "view-name3"
        new_view3 = view_module.View(view_name3,
                                     "processed video size over time",
                                     [FRONTEND_KEY_FLOAT],
                                     VIDEO_SIZE_MEASURE_FLOAT, agg_3)

        view_manager.register_view(new_view3)

        tag_value_float = tag_value_module.TagValue("1200")
        tag_map = tag_map_module.TagMap()
        tag_map.insert(FRONTEND_KEY_FLOAT, tag_value_float)

        measure_map = stats_recorder.new_measurement_map()
        measure_map.measure_float_put(VIDEO_SIZE_MEASURE_FLOAT, 25 * MiB)
        measure_map.record(tag_map)

        v_data = measure_map.measure_to_view_map.get_view(view_name3, None)

        v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME)

        time_series_list = exporter.create_time_series_list(v_data)
        self.assertEqual(len(time_series_list), 1)
        [time_series] = time_series_list
        self.assertEqual(time_series.metric.type,
                         "custom.googleapis.com/opencensus/view-name3")
        self.check_labels(time_series.metric.labels,
                          {FRONTEND_KEY_FLOAT_CLEAN: "1200"},
                          include_opencensus=True)
        self.assertIsNotNone(time_series.resource)

        self.assertEqual(len(time_series.points), 1)
        expected_value = monitoring_v3.types.TypedValue()
        expected_value.double_value = 2.2 + 25 * MiB
        self.assertEqual(time_series.points[0].value, expected_value)
    def __init__(self, test_name, test_description=None):
        # oc will automatically search for the ENV VAR 'APPLICATIONINSIGHTS_CONNECTION_STRING'
        self.exporter = metrics_exporter.new_metrics_exporter()
        self.stats = stats_module.stats
        self.view_manager = self.stats.view_manager
        self.stats_recorder = self.stats.stats_recorder
        self.azure_logger = get_azure_logger(test_name)
        self.name = test_name
        self.desc = test_description

        events_measure_name = "The number of events handled by " + self.name
        events_measure_desc = "The number of events handled by " + self.desc if self.desc else None
        memory_measure_name = "memory usage percentage for " + self.name
        memory_measure_desc = "memory usage percentage for " + self.desc if self.desc else None
        cpu_measure_name = "cpu usage percentage for " + self.name
        cpu_measure_desc = "cpu usage percentage for " + self.desc if self.desc else None
        error_measure_name = "error count for " + self.name
        error_measure_desc = "The number of errors happened while running the test for " + self.desc if self.desc else None

        self.events_measure = measure_module.MeasureInt(
            events_measure_name, events_measure_desc, "events")
        self.memory_measure = measure_module.MeasureFloat(
            memory_measure_name, memory_measure_desc)
        self.cpu_measure = measure_module.MeasureFloat(cpu_measure_name,
                                                       cpu_measure_desc)
        self.error_measure = measure_module.MeasureInt(error_measure_name,
                                                       error_measure_desc)

        self.events_measure_view = view_module.View(
            events_measure_name, events_measure_desc, [], self.events_measure,
            aggregation_module.SumAggregation())

        self.memory_measure_view = view_module.View(
            memory_measure_name, memory_measure_desc, [], self.memory_measure,
            aggregation_module.LastValueAggregation())

        self.cpu_measure_view = view_module.View(
            cpu_measure_name, cpu_measure_desc, [], self.cpu_measure,
            aggregation_module.LastValueAggregation())

        self.error_measure_view = view_module.View(
            error_measure_name, error_measure_desc, [], self.error_measure,
            aggregation_module.CountAggregation())

        self.view_manager.register_view(self.events_measure_view)
        self.view_manager.register_view(self.memory_measure_view)
        self.view_manager.register_view(self.cpu_measure_view)
        self.view_manager.register_view(self.error_measure_view)

        self.mmap = self.stats_recorder.new_measurement_map()
    def test_create_metric_descriptor_sum_float(self):
        client = mock.Mock()
        option = stackdriver.Options(
            project_id="project-test", metric_prefix="teste")

        view_name_sum_float = "view-sum-float"
        agg_sum = aggregation_module.SumAggregation(sum=2)
        view_sum_float = view_module.View(
            view_name_sum_float, "processed video size over time",
            [FRONTEND_KEY_FLOAT], VIDEO_SIZE_MEASURE_FLOAT, agg_sum)
        exporter = stackdriver.StackdriverStatsExporter(
            options=option, client=client)
        desc = exporter.create_metric_descriptor(view_sum_float)
        self.assertIsNotNone(desc)
    def test_create_metric_descriptor_sum_int(self):
        client = mock.Mock()
        start_time = datetime.utcnow()
        end_time = datetime.utcnow()
        option = stackdriver.Options(project_id="project-test", metric_prefix="teste")

        view_name_sum_int= "view-sum-int"
        agg_sum = aggregation_module.SumAggregation(sum=2)
        view_sum_int = view_module.View(view_name_sum_int,
                                        "processed video size over time",
                                        [FRONTEND_KEY],
                                        VIDEO_SIZE_MEASURE,
                                        agg_sum)
        exporter = stackdriver.StackdriverStatsExporter(options=option, client=client)
        desc = exporter.create_metric_descriptor(view_sum_int)
        self.assertNotEqual(desc, None)
    def test_collector_to_metric_sum(self):
        agg = aggregation_module.SumAggregation(256.0)
        view = view_module.View(VIDEO_SIZE_VIEW_NAME,
                                "processed video size over time",
                                [FRONTEND_KEY], VIDEO_SIZE_MEASURE, agg)
        registry = mock.Mock()
        view_data = mock.Mock()
        options = prometheus.Options("test1", 8001, "localhost", registry)
        collector = prometheus.Collector(options=options, view_data=view_data)
        collector.register_view(view)
        desc = collector.registered_views[list(REGISTERED_VIEW)[0]]
        metric = collector.to_metric(desc=desc, view=view)

        self.assertEqual(desc['name'], metric.name)
        self.assertEqual(desc['documentation'], metric.documentation)
        self.assertEqual('untyped', metric.type)
        self.assertEqual(1, len(metric.samples))
    def test_export_double_point_value(self):
        view = view_module.View('', '', [FRONTEND_KEY], VIDEO_SIZE_MEASURE,
                                aggregation_module.SumAggregation())
        v_data = view_data_module.ViewData(view=view,
                                           start_time=TEST_TIME_STR,
                                           end_time=TEST_TIME_STR)
        v_data.record(context=tag_map_module.TagMap(),
                      value=2.5,
                      timestamp=None)
        view_data = [v_data]
        view_data = [metric_utils.view_data_to_metric(view_data[0], TEST_TIME)]

        handler = mock.Mock(spec=ocagent.ExportRpcHandler)
        ocagent.StatsExporter(handler).export_metrics(view_data)
        self.assertEqual(
            handler.send.call_args[0]
            [0].metrics[0].timeseries[0].points[0].double_value, 2.5)
    def test_create_timeseries_disjoint_tags(self, monitoring_resoure_mock):
        view_manager, stats_recorder, exporter = \
            self.setup_create_timeseries_test()

        # Register view with two tags
        view_name = "view-name"
        view = view_module.View(view_name, "test description",
                                [FRONTEND_KEY, FRONTEND_KEY_FLOAT],
                                VIDEO_SIZE_MEASURE,
                                aggregation_module.SumAggregation())

        view_manager.register_view(view)

        # Add point with one tag in common and one different tag
        measure_map = stats_recorder.new_measurement_map()
        tag_map = tag_map_module.TagMap()
        tag_map.insert(FRONTEND_KEY, tag_value_module.TagValue("1200"))
        tag_map.insert(FRONTEND_KEY_STR, tag_value_module.TagValue("1800"))
        measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB)
        measure_map.record(tag_map)

        v_data = measure_map.measure_to_view_map.get_view(view_name, None)

        v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME)

        time_series_list = exporter.create_time_series_list(v_data)

        self.assertEqual(len(time_series_list), 1)
        [time_series] = time_series_list

        # Verify first time series
        self.assertEqual(time_series.resource.type, "global")
        self.assertEqual(time_series.metric.type,
                         "custom.googleapis.com/opencensus/" + view_name)
        self.check_labels(time_series.metric.labels,
                          {FRONTEND_KEY_CLEAN: "1200"},
                          include_opencensus=True)
        self.assertIsNotNone(time_series.resource)

        self.assertEqual(len(time_series.points), 1)
        expected_value = monitoring_v3.types.TypedValue()
        # TODO: #565
        expected_value.double_value = 25.0 * MiB
        self.assertEqual(time_series.points[0].value, expected_value)
    def test_create_timeseries_float_tagvalue(self, monitor_resource_mock):
        client = mock.Mock()

        option = stackdriver.Options(project_id="project-test",
                                     resource="global")
        exporter = stackdriver.StackdriverStatsExporter(options=option,
                                                        client=client)

        stats = stats_module.Stats()
        view_manager = stats.view_manager
        stats_recorder = stats.stats_recorder

        if len(view_manager.measure_to_view_map.exporters) > 0:
            view_manager.unregister_exporter(
                view_manager.measure_to_view_map.exporters[0])

        view_manager.register_exporter(exporter)

        agg_2 = aggregation_module.SumAggregation(sum=2.2)
        view_name2 = "view-name2"
        new_view2 = view_module.View(view_name2,
                                     "processed video size over time",
                                     [FRONTEND_KEY_FLOAT],
                                     VIDEO_SIZE_MEASURE_FLOAT, agg_2)

        view_manager.register_view(new_view2)

        tag_value_float = tag_value_module.TagValue("1200")

        tag_map = tag_map_module.TagMap()

        tag_map.insert(FRONTEND_KEY_FLOAT, tag_value_float)

        measure_map = stats_recorder.new_measurement_map()
        measure_map.measure_float_put(VIDEO_SIZE_MEASURE_FLOAT, 25 * MiB)

        measure_map.record(tag_map)

        v_data = measure_map.measure_to_view_map.get_view(view_name2, None)

        time_series = exporter.create_time_series_list(v_data, "global", "")
        self.assertEquals(time_series.metric.type,
                          "custom.googleapis.com/opencensus/view-name2")
        self.assertIsNotNone(time_series)
Ejemplo n.º 12
0
 def test_record_with_none_context(self):
     measure = mock.Mock(spec=measure_module.MeasureInt)
     sum_aggregation = aggregation_module.SumAggregation()
     view = view_module.View("test_view", "description", ['key1', 'key2'],
                             measure, sum_aggregation)
     start_time = datetime.utcnow()
     end_time = datetime.utcnow()
     view_data = view_data_module.ViewData(view=view,
                                           start_time=start_time,
                                           end_time=end_time)
     time = utils.to_iso_str()
     value = 4
     view_data.record(context=None,
                      value=value,
                      timestamp=time,
                      attachments=None)
     tag_values = view_data.get_tag_values(tags={}, columns=view.columns)
     tuple_vals = tuple(tag_values)
     self.assertEqual([None, None], tag_values)
     self.assertTrue(tuple_vals in view_data.tag_value_aggregation_data_map)
     sum_data = view_data.tag_value_aggregation_data_map.get(tuple_vals)
     self.assertEqual(4, sum_data.sum_data)
Ejemplo n.º 13
0
 def test_new_aggregation_data_bad(self):
     measure = mock.Mock(spec=measure_module.BaseMeasure)
     sum_aggregation = aggregation_module.SumAggregation()
     with self.assertRaises(ValueError):
         sum_aggregation.new_aggregation_data(measure)
Ejemplo n.º 14
0
FAILED_TASK_THRESHOLD = 0.1

batch_blueprint = Blueprint("batch", __name__)

m_batch_count = measure.MeasureInt(
    "persistence/batch_persistence/batch_count",
    "The count of batch persistence calls",
    "1",
)

count_view = view.View(
    "recidiviz/persistence/batch_persistence/batch_count",
    "The sum of batch persistence calls that occurred",
    [monitoring.TagKey.REGION, monitoring.TagKey.STATUS, monitoring.TagKey.PERSISTED],
    m_batch_count,
    aggregation.SumAggregation(),
)
monitoring.register_views([count_view])


class BatchPersistError(Exception):
    """Raised when there was an error with batch persistence."""

    def __init__(self, region: str, scrape_type: ScrapeType):
        msg_template = "Error when running '{}' for region {}"
        msg = msg_template.format(scrape_type, region)
        super().__init__(msg)


class DatastoreError(Exception):
    """Raised when there was an error with Datastore."""
Ejemplo n.º 15
0
from flask import Blueprint, request

from opencensus.stats import aggregation, measure, view
from recidiviz.ingest.models.scrape_key import ScrapeKey
from recidiviz.ingest.scrape import sessions
from recidiviz.ingest.scrape.task_params import QueueRequest
from recidiviz.utils import monitoring, regions
from recidiviz.utils.auth import authenticate_request

m_tasks = measure.MeasureInt("ingest/scrape/task_count",
                             "The count of scrape tasks that occurred", "1")

task_view = view.View("recidiviz/ingest/scrape/task_count",
                      "The sum of scrape tasks that occurred",
                      [monitoring.TagKey.REGION, monitoring.TagKey.STATUS],
                      m_tasks, aggregation.SumAggregation())
monitoring.register_views([task_view])

class RequestProcessingError(Exception):
    """Exception containing the request that failed to process"""

    def __init__(self, region: str, task: str, queue_request: QueueRequest):
        request_string = pprint.pformat(queue_request.to_serializable())
        msg = "Error when running '{}' for '{}' with request:\n{}".format(
            task, region, request_string)
        super(RequestProcessingError, self).__init__(msg)

worker = Blueprint('worker', __name__)

# NB: Region is part of the url so that request logs can be filtered on it.
@worker.route("/work/<region>", methods=['POST'])
Ejemplo n.º 16
0
from opencensus.stats import aggregation as aggregation_module
from opencensus.stats import measure as measure_module
from opencensus.stats import stats as stats_module
from opencensus.stats import view as view_module
from opencensus.tags import tag_map as tag_map_module

stats = stats_module.stats
view_manager = stats.view_manager
stats_recorder = stats.stats_recorder

REQUEST_MEASURE = measure_module.MeasureFloat("Requests", "number of requests",
                                              "requests")
NUM_REQUESTS_VIEW = view_module.View("Number of Requests",
                                     "number of requests", ["url"],
                                     REQUEST_MEASURE,
                                     aggregation_module.SumAggregation())


def main():
    # Enable metrics
    # Set the interval in seconds in which you want to send metrics
    # TODO: you need to specify the instrumentation key in a connection string
    # and place it in the APPLICATIONINSIGHTS_CONNECTION_STRING
    # environment variable.
    exporter = metrics_exporter.new_metrics_exporter()
    view_manager.register_exporter(exporter)

    view_manager.register_view(NUM_REQUESTS_VIEW)
    mmap = stats_recorder.new_measurement_map()
    tmap = tag_map_module.TagMap()
    tmap.insert("url", "http://example.com")
Ejemplo n.º 17
0
from recidiviz.persistence.ingest_info_validator import ingest_info_validator
from recidiviz.utils import monitoring, regions
from recidiviz.utils.auth import authenticate_request

FAILED_TASK_THRESHOLD = 0.1

batch_blueprint = Blueprint('batch', __name__)

m_batch_count = measure.MeasureInt("persistence/batch_persistence/batch_count",
                                   "The count of batch persistence calls", "1")

count_view = view.View("recidiviz/persistence/batch_persistence/batch_count",
                       "The sum of batch persistence calls that occurred", [
                           monitoring.TagKey.REGION, monitoring.TagKey.STATUS,
                           monitoring.TagKey.PERSISTED
                       ], m_batch_count, aggregation.SumAggregation())
monitoring.register_views([count_view])


class BatchPersistError(Exception):
    """Raised when there was an error with batch persistence."""
    def __init__(self, region: str, scrape_type: ScrapeType):
        msg_template = "Error when running '{}' for region {}"
        msg = msg_template.format(scrape_type, region)
        super(BatchPersistError, self).__init__(msg)


class DatastoreError(Exception):
    """Raised when there was an error with Datastore."""
    def __init__(self, region: str, call_type: str):
        msg_template = "Error when calling '{}' for region {} in Datastore."
Ejemplo n.º 18
0
from typing import Dict, Optional

from aenum import Enum, EnumMeta
from opencensus.stats import aggregation, measure, view

from recidiviz.common.str_field_utils import normalize
from recidiviz.utils import monitoring

# TODO(ageiduschek): Should we change convert -> ingest_info_converter here?
m_enum_errors = measure.MeasureInt("converter/enum_error_count",
                                   "The number of enum errors", "1")
enum_errors_view = view.View("recidiviz/converter/enum_error_count",
                             "The sum of enum errors",
                             [monitoring.TagKey.REGION,
                              monitoring.TagKey.ENTITY_TYPE],
                             m_enum_errors, aggregation.SumAggregation())
monitoring.register_views([enum_errors_view])


class EnumParsingError(Exception):
    """Raised if an MappableEnum can't be built from the provided string."""

    def __init__(self, cls: type, string_to_parse: str):
        msg = "Could not parse {0} when building {1}".format(string_to_parse,
                                                             cls)
        self.entity_type = cls
        super().__init__(msg)


class EntityEnumMeta(EnumMeta):
    """Metaclass for mappable enums."""
Ejemplo n.º 19
0
from recidiviz.persistence.persistence_utils import should_persist
from recidiviz.utils import monitoring

m_people = measure.MeasureInt("persistence/num_people",
                              "The number of people persisted", "1")
m_aborts = measure.MeasureInt("persistence/num_aborts",
                              "The number of aborted writes", "1")
m_errors = measure.MeasureInt("persistence/num_errors", "The number of errors",
                              "1")
m_retries = measure.MeasureInt(
    "persistence/num_transaction_retries",
    "The number of transaction retries due to serialization failures", "1")
people_persisted_view = view.View(
    "recidiviz/persistence/num_people", "The sum of people persisted",
    [monitoring.TagKey.REGION, monitoring.TagKey.PERSISTED], m_people,
    aggregation.SumAggregation())
aborted_writes_view = view.View(
    "recidiviz/persistence/num_aborts",
    "The sum of aborted writes to persistence",
    [monitoring.TagKey.REGION, monitoring.TagKey.REASON], m_aborts,
    aggregation.SumAggregation())
errors_persisted_view = view.View(
    "recidiviz/persistence/num_errors",
    "The sum of errors in the persistence layer",
    [monitoring.TagKey.REGION, monitoring.TagKey.ERROR], m_errors,
    aggregation.SumAggregation())
retried_transactions_view = view.View(
    "recidiviz/persistence/num_transaction_retries",
    "The total number of transaction retries", [monitoring.TagKey.REGION],
    m_retries, aggregation.SumAggregation())
monitoring.register_views([
Ejemplo n.º 20
0
from recidiviz.utils.params import get_bool_param_value
from recidiviz.validation.checks.check_resolver import checker_for_validation

from recidiviz.validation.configured_validations import get_all_validations, STATES_TO_VALIDATE
from recidiviz.validation.validation_models import DataValidationJob, DataValidationJobResult
from recidiviz.validation.views import view_config

m_failed_validations = measure.MeasureInt("validation/num_failures",
                                          "The number of failed validations",
                                          "1")

failed_validations_view = view.View(
    "recidiviz/validation/num_failures", "The sum of failed validations", [
        monitoring.TagKey.REGION, monitoring.TagKey.VALIDATION_CHECK_TYPE,
        monitoring.TagKey.VALIDATION_VIEW_ID
    ], m_failed_validations, aggregation.SumAggregation())

monitoring.register_views([failed_validations_view])

validation_manager_blueprint = Blueprint('validation_manager', __name__)


@validation_manager_blueprint.route('/validate')
@authenticate_request
def handle_validation_request():
    """API endpoint to service data validation requests."""
    should_update_views = get_bool_param_value('should_update_views',
                                               request.args,
                                               default=False)
    failed_validations = execute_validation(
        should_update_views=should_update_views)
GAUGE_VIEWS = {
    "last": view_module.View(
        "last",
        "A last value",
        ("tag",),
        MEASURE,
        aggregation_module.LastValueAggregation(),
    )
}
COUNT_VIEWS = {
    "count": view_module.View(
        "count", "A count", ("tag",), MEASURE, aggregation_module.CountAggregation()
    ),
    "sum": view_module.View(
        "sum", "A sum", ("tag",), MEASURE, aggregation_module.SumAggregation()
    ),
}
DISTRIBUTION_VIEWS = {
    "distribution": view_module.View(
        "distribution",
        "A distribution",
        ("tag",),
        MEASURE,
        aggregation_module.DistributionAggregation([50.0, 200.0]),
    )
}
VIEWS = {}
VIEWS.update(GAUGE_VIEWS)
VIEWS.update(COUNT_VIEWS)
VIEWS.update(DISTRIBUTION_VIEWS)
Ejemplo n.º 22
0
    def test_constructor_defaults(self):
        sum_aggregation = aggregation_module.SumAggregation()

        self.assertEqual(0, sum_aggregation.sum.sum_data)
        self.assertEqual(aggregation_module.Type.SUM,
                         sum_aggregation.aggregation_type)
Ejemplo n.º 23
0
from recidiviz.persistence.database.session import Session
from recidiviz.persistence.entity.entities import EntityPersonType
from recidiviz.persistence.entity_matching.entity_matching_types import \
    MatchedEntities
from recidiviz.utils import monitoring

m_matching_errors = measure.MeasureInt(
    'persistence/entity_matching/error_count',
    'Number of EntityMatchingErrors thrown for a specific entity type', '1')

matching_errors_by_entity_view = view.View(
    'recidiviz/persistence/entity_matching/error_count',
    'Sum of the errors in the entit matching layer, by entity',
    [monitoring.TagKey.REGION, monitoring.TagKey.ENTITY_TYPE],
    m_matching_errors, aggregation.SumAggregation())

monitoring.register_views([matching_errors_by_entity_view])


class BaseEntityMatcher(Generic[EntityPersonType]):
    """Base class for all entity matchers."""
    @abstractmethod
    def run_match(self, session: Session, region: str,
                  ingested_people: List[EntityPersonType]) \
            -> MatchedEntities:
        """
        Attempts to match all people from |ingested_people| with corresponding
        people in our database for the given |region|. Returns an
        MatchedEntities object that contains the results of matching.
        """
from recidiviz.big_query.big_query_client import BigQueryClient
from recidiviz.ingest.direct.controllers.direct_ingest_raw_file_import_manager import DirectIngestRegionRawFileConfig, \
    DirectIngestRawFileConfig
from recidiviz.ingest.direct.controllers.direct_ingest_big_query_view_types import \
    DirectIngestRawDataTableLatestView

m_failed_latest_views_update = measure.MeasureInt(
    "ingest/direct/controllers/direct_ingest_raw_data_table_latest_view_updater/update_views_for_state_failure",
    "Counted every time updating views for state fails", "1")

failed_latest_view_updates_view = opencensus_view.View(
    "ingest/direct/controllers/direct_ingest_raw_data_table_latest_view_updater/num_update_views_for_state_failure",
    "The sum of times a view failed to update",
    [monitoring.TagKey.CREATE_UPDATE_RAW_DATA_LATEST_VIEWS_FILE_TAG],
    m_failed_latest_views_update, aggregation.SumAggregation())

monitoring.register_views([failed_latest_view_updates_view])


class DirectIngestRawDataTableLatestViewUpdater:
    """Controller for updating raw state data latest views in BQ."""
    def __init__(self,
                 state_code: str,
                 project_id: str,
                 bq_client: BigQueryClient,
                 dry_run: bool = False):
        self.state_code = state_code
        self.project_id = project_id
        self.bq_client = bq_client
        self.dry_run = dry_run
Ejemplo n.º 25
0
from recidiviz.validation.checks.check_resolver import checker_for_validation

from recidiviz.validation.configured_validations import get_all_validations, \
    get_validation_region_configs, get_validation_global_config
from recidiviz.validation.validation_models import DataValidationJob, DataValidationJobResult

m_failed_to_run_validations = measure.MeasureInt(
    "validation/num_fail_to_run",
    "The number of validations that failed to run entirely", "1")

failed_to_run_validations_view = view.View(
    "recidiviz/validation/num_fail_to_run",
    "The sum of validations that failed to run", [
        monitoring.TagKey.REGION, monitoring.TagKey.VALIDATION_CHECK_TYPE,
        monitoring.TagKey.VALIDATION_VIEW_ID
    ], m_failed_to_run_validations, aggregation.SumAggregation())

m_failed_validations = measure.MeasureInt("validation/num_failures",
                                          "The number of failed validations",
                                          "1")

failed_validations_view = view.View(
    "recidiviz/validation/num_failures", "The sum of failed validations", [
        monitoring.TagKey.REGION, monitoring.TagKey.VALIDATION_CHECK_TYPE,
        monitoring.TagKey.VALIDATION_VIEW_ID
    ], m_failed_validations, aggregation.SumAggregation())

monitoring.register_views(
    [failed_validations_view, failed_to_run_validations_view])

validation_manager_blueprint = Blueprint('validation_manager', __name__)
Ejemplo n.º 26
0
 def test_new_aggregation_data_float(self):
     measure = mock.Mock(spec=measure_module.MeasureFloat)
     sum_aggregation = aggregation_module.SumAggregation()
     agg_data = sum_aggregation.new_aggregation_data(measure)
     self.assertEqual(0, agg_data.sum_data)
     self.assertEqual(value.ValueDouble, agg_data.value_type)
Ejemplo n.º 27
0
 def test_new_aggregation_data_explicit(self):
     measure = mock.Mock(spec=measure_module.MeasureInt)
     sum_aggregation = aggregation_module.SumAggregation(sum=1)
     agg_data = sum_aggregation.new_aggregation_data(measure)
     self.assertEqual(1, agg_data.sum_data)
     self.assertEqual(value.ValueLong, agg_data.value_type)