Ejemplo n.º 1
0
 def setUp(self) -> None:
     self.database_key = SQLAlchemyDatabaseKey.for_schema(
         SchemaType.CASE_TRIAGE)
     local_postgres_helpers.use_on_disk_postgresql_database(
         self.database_key)
 def setUp(self) -> None:
     self.database_key = SQLAlchemyDatabaseKey.for_schema(SchemaType.JAILS)
     fakes.use_in_memory_sqlite_database(self.database_key)
Ejemplo n.º 3
0
 def tearDown(self) -> None:
     local_postgres_helpers.restore_local_env_vars(self.overridden_env_vars)
     local_postgres_helpers.teardown_on_disk_postgresql_database(
         SQLAlchemyDatabaseKey.for_schema(SchemaType.CASE_TRIAGE))
Ejemplo n.º 4
0
    def setUp(self) -> None:
        self.test_app = Flask(__name__)
        self.helpers = CaseTriageTestHelpers.from_test(self, self.test_app)
        self.test_client = self.helpers.test_client
        self.mock_segment_client = self.helpers.mock_segment_client

        self.database_key = SQLAlchemyDatabaseKey.for_schema(
            SchemaType.CASE_TRIAGE)
        self.overridden_env_vars = (
            local_postgres_helpers.update_local_sqlalchemy_postgres_env_vars())
        db_url = local_postgres_helpers.postgres_db_url_from_env_vars()
        engine = setup_scoped_sessions(self.test_app, db_url)
        # Auto-generate all tables that exist in our schema in this database
        self.database_key.declarative_meta.metadata.create_all(engine)
        self.session = self.test_app.scoped_session

        # Add seed data
        self.officer = generate_fake_officer("officer_id_1",
                                             "*****@*****.**")
        self.officer_without_clients = generate_fake_officer(
            "officer_id_2", "*****@*****.**")
        self.client_1 = generate_fake_client(
            client_id="client_1",
            supervising_officer_id=self.officer.external_id,
        )
        self.client_2 = generate_fake_client(
            client_id="client_2",
            supervising_officer_id=self.officer.external_id,
            last_assessment_date=date(2021, 2, 2),
        )
        self.client_3 = generate_fake_client(
            client_id="client_3",
            supervising_officer_id=self.officer.external_id,
        )
        self.client_info_3 = generate_fake_client_info(
            client=self.client_3,
            preferred_name="Alex",
        )
        self.case_update_1 = generate_fake_case_update(
            self.client_1,
            self.officer.external_id,
            action_type=CaseUpdateActionType.COMPLETED_ASSESSMENT,
            last_version=serialize_client_case_version(
                CaseUpdateActionType.COMPLETED_ASSESSMENT,
                self.client_1).to_json(),
        )
        self.case_update_2 = generate_fake_case_update(
            self.client_2,
            self.officer_without_clients.external_id,
            action_type=CaseUpdateActionType.COMPLETED_ASSESSMENT,
            last_version=serialize_client_case_version(
                CaseUpdateActionType.COMPLETED_ASSESSMENT,
                self.client_2).to_json(),
        )
        self.case_update_3 = generate_fake_case_update(
            self.client_1,
            self.officer.external_id,
            action_type=CaseUpdateActionType.NOT_ON_CASELOAD,
            last_version=serialize_client_case_version(
                CaseUpdateActionType.NOT_ON_CASELOAD, self.client_1).to_json(),
        )
        self.client_2.most_recent_assessment_date = date(2022, 2, 2)

        self.opportunity_1 = generate_fake_etl_opportunity(
            officer_id=self.officer.external_id,
            person_external_id=self.client_1.person_external_id,
        )
        tomorrow = datetime.now() + timedelta(days=1)
        self.deferral_1 = generate_fake_reminder(self.opportunity_1, tomorrow)
        self.opportunity_2 = generate_fake_etl_opportunity(
            officer_id=self.officer.external_id,
            person_external_id=self.client_2.person_external_id,
        )
        # all generated fake clients have no employer
        self.num_unemployed_opportunities = 3

        self.session.add_all([
            self.officer,
            self.officer_without_clients,
            self.client_1,
            self.client_2,
            self.client_3,
            self.client_info_3,
            self.case_update_1,
            self.case_update_2,
            self.case_update_3,
            self.opportunity_1,
            self.deferral_1,
            self.opportunity_2,
        ])
        self.session.commit()
Ejemplo n.º 5
0
    def test_scrape_data_and_more_no_persist_second_time_persist(
        self,
        mock_get_more: Mock,
        mock_fetch: Mock,
        mock_populate: Mock,
        mock_write: Mock,
    ) -> None:
        populate_task = Task.evolve(TEST_TASK, task_type=constants.TaskType.SCRAPE_DATA)
        mock_get_more.return_value = [populate_task]
        mock_fetch.return_value = (TEST_HTML, {})
        mock_populate.return_value = ScrapedData(
            ingest_info=self.ii,
            persist=False,
        )
        start_time = datetime.datetime.now()
        t = Task.evolve(TEST_TASK, task_type=constants.TaskType.SCRAPE_DATA_AND_MORE)
        req = QueueRequest(
            scrape_type=constants.ScrapeType.BACKGROUND,
            next_task=t,
            scraper_start_time=start_time,
        )

        scraper = FakeScraper("test")
        scraper.BATCH_WRITES = False
        scraper._generic_scrape(req)

        # Should send the ii since we chose not to persist.
        expected_tasks = [
            QueueRequest(
                scrape_type=constants.ScrapeType.BACKGROUND,
                next_task=populate_task,
                scraper_start_time=start_time,
                ingest_info=self.ii,
            )
        ]

        self.assertEqual(mock_get_more.call_count, 1)
        self.assertEqual(mock_populate.call_count, 1)
        self.assertEqual(mock_write.call_count, 0)
        mock_get_more.assert_called_once_with(TEST_HTML, t)
        self.assertCountEqual(expected_tasks, scraper.tasks)

        mock_populate.return_value = ScrapedData(
            ingest_info=self.ii,
            persist=True,
        )
        scraper._generic_scrape(scraper.tasks[0])
        self.assertEqual(mock_get_more.call_count, 1)
        self.assertEqual(mock_populate.call_count, 2)
        self.assertEqual(mock_write.call_count, 1)

        expected_metadata = IngestMetadata(
            region=scraper.region.region_code,
            jurisdiction_id=scraper.region.jurisdiction_id,
            ingest_time=start_time,
            enum_overrides=scraper.get_enum_overrides(),
            system_level=SystemLevel.COUNTY,
            database_key=SQLAlchemyDatabaseKey.for_schema(SchemaType.JAILS),
        )
        expected_proto = convert_ingest_info_to_proto(self.ii)
        mock_write.assert_called_once_with(expected_proto, expected_metadata)
Ejemplo n.º 6
0
    def setUp(self) -> None:
        self.get_local_patcher = mock.patch(
            "recidiviz.case_triage.authorization.get_local_file",
            new=_test_get_local_file,
        )
        self.get_local_patcher.start()

        self.auth_store = AuthorizationStore()
        self.auth_store.refresh()

        self.database_key = SQLAlchemyDatabaseKey.for_schema(SchemaType.CASE_TRIAGE)
        local_postgres_helpers.use_on_disk_postgresql_database(self.database_key)

        self.case_triage_user = generate_fake_user_restrictions(
            "US_XX",
            "*****@*****.**",
            can_access_leadership_dashboard=False,
            can_access_case_triage=True,
        )
        self.dashboard_user = generate_fake_user_restrictions(
            "US_XX",
            "*****@*****.**",
            can_access_leadership_dashboard=True,
            can_access_case_triage=False,
        )
        self.both_user = generate_fake_user_restrictions(
            "US_XX",
            "*****@*****.**",
            can_access_leadership_dashboard=True,
            can_access_case_triage=True,
        )

        self.overridden_user = generate_fake_user_restrictions(
            "US_XX",
            "*****@*****.**",
            can_access_leadership_dashboard=True,
            can_access_case_triage=False,
        )

        self.both_user_different_state = generate_fake_user_restrictions(
            "US_YY",
            "*****@*****.**",
            can_access_leadership_dashboard=True,
            can_access_case_triage=True,
        )

        self.officer = generate_fake_officer(
            "test", "*****@*****.**", state_code="US_XX"
        )

        with SessionFactory.using_database(self.database_key) as session:
            session.expire_on_commit = False
            session.add_all(
                [
                    self.case_triage_user,
                    self.dashboard_user,
                    self.both_user,
                    self.overridden_user,
                    self.both_user_different_state,
                    self.officer,
                ]
            )
Ejemplo n.º 7
0
def _get_mismatch_data_for_officer(
    officer_email: str,
) -> List[Dict[str, str]]:
    """Fetches the list of supervision mismatches on an officer's caseload for display
    in our email templates."""
    with SessionFactory.using_database(
        SQLAlchemyDatabaseKey.for_schema(SchemaType.CASE_TRIAGE), autocommit=False
    ) as session:
        try:
            officer = CaseTriageQuerier.officer_for_email(session, officer_email)
        except OfficerDoesNotExistError:
            return []

        try:
            policy_requirements = policy_requirements_for_state(
                StateCode(officer.state_code)
            )
        except Exception:
            # If for some reason we can't fetch the policy requirements, we should not show mismatches.
            return []

        user_context = UserContext(
            email=officer_email,
            authorization_store=AuthorizationStore(),  # empty store won't actually be leveraged
            current_user=officer,
        )
        opportunities = [
            opp.opportunity
            for opp in CaseTriageQuerier.opportunities_for_officer(
                session, user_context
            )
            if not opp.is_deferred()
            and opp.opportunity.opportunity_type
            == OpportunityType.OVERDUE_DOWNGRADE.value
        ]
        mismatches: List[Dict[str, str]] = []
        for opp in opportunities:
            client = CaseTriageQuerier.etl_client_for_officer(
                session, user_context, opp.person_external_id
            )

            client_name = json.loads(client.full_name)
            # TODO(#7957): We shouldn't be converting to title-case because there
            # are many names whose preferred casing is not that. Once we figure out
            # how to access the original name casing, we should use that wherever possible.
            given_names = client_name.get("given_names", "").title()
            surname = client_name.get("surname", "").title()
            full_name = " ".join([given_names, surname]).strip()
            mismatches.append(
                {
                    "name": full_name,
                    "person_external_id": client.person_external_id,
                    "last_score": opp.opportunity_metadata["assessmentScore"],
                    "last_assessment_date": opp.opportunity_metadata[
                        "latestAssessmentDate"
                    ],
                    "current_supervision_level": policy_requirements.get_supervision_level_name(
                        StateSupervisionLevel(client.supervision_level)
                    ),
                    "recommended_level": policy_requirements.get_supervision_level_name(
                        StateSupervisionLevel(
                            opp.opportunity_metadata["recommendedSupervisionLevel"]
                        )
                    ),
                }
            )

        mismatches.sort(key=lambda x: x["last_assessment_date"], reverse=True)
        if len(mismatches) > MAX_SUPERVISION_MISMATCHES_TO_SHOW:
            cutoff_date = date.today() - timedelta(
                days=IDEAL_SUPERVISION_MISMATCH_AGE_IN_DAYS
            )

            cutoff_index = len(mismatches) - MAX_SUPERVISION_MISMATCHES_TO_SHOW
            for i in range(cutoff_index):
                if (
                    dateutil.parser.parse(mismatches[i]["last_assessment_date"]).date()
                    <= cutoff_date
                ):
                    cutoff_index = i
                    break

            return mismatches[
                cutoff_index : cutoff_index + MAX_SUPERVISION_MISMATCHES_TO_SHOW
            ]

        return mismatches
Ejemplo n.º 8
0
 def __init__(self, region_code: str, ingest_database_name: str):
     self.region_code = region_code.upper()
     self.database_key = SQLAlchemyDatabaseKey.for_schema(
         SchemaType.OPERATIONS)
     self.ingest_database_name = ingest_database_name
Ejemplo n.º 9
0
app.secret_key = get_local_secret("case_triage_secret_key")
CSRFProtect(app).exempt(e2e_blueprint)
register_error_handlers(app)

limiter = Limiter(
    app,
    key_func=get_remote_address,
    default_limits=["15 per second"],
    storage_uri=get_rate_limit_storage_uri(),
)

if in_development():
    db_url = local_postgres_helpers.postgres_db_url_from_env_vars()
else:
    db_url = SQLAlchemyEngineManager.get_server_postgres_instance_url(
        database_key=SQLAlchemyDatabaseKey.for_schema(SchemaType.CASE_TRIAGE))
    app.config["SESSION_COOKIE_HTTPONLY"] = True
    app.config["SESSION_COOKIE_SECURE"] = True
    app.config["SESSION_COOKIE_SAMESITE"] = "Strict"

app.config["MAX_CONTENT_LENGTH"] = 16 * 1024 * 1024  # 16 MiB max body size
setup_scoped_sessions(app, db_url)


# Auth setup
def on_successful_authorization(payload: Dict[str, str], token: str) -> None:
    """
    Memoize the user's info (email_address, picture, etc) into our session
    """

    # Populate the session with user information; This could have changed since the last request
Ejemplo n.º 10
0
    def setUp(self) -> None:
        self.project_id_patcher = patch("recidiviz.utils.metadata.project_id")
        self.email_generation_patcher = patch(
            "recidiviz.reporting.email_generation.generate"
        )
        self.gcs_file_system_patcher = patch(
            "recidiviz.cloud_storage.gcsfs_factory.GcsfsFactory.build"
        )
        self.project_id_patcher.start().return_value = "recidiviz-test"
        self.mock_email_generation = self.email_generation_patcher.start()
        self.gcs_file_system = FakeGCSFileSystem()
        self.mock_gcs_file_system = self.gcs_file_system_patcher.start()
        self.mock_gcs_file_system.return_value = self.gcs_file_system

        self.get_secret_patcher = patch("recidiviz.utils.secrets.get_secret")
        self.get_secret_patcher.start()

        self.state_code = StateCode.US_ID

        self.database_key = SQLAlchemyDatabaseKey.for_schema(SchemaType.CASE_TRIAGE)
        local_postgres_helpers.use_on_disk_postgresql_database(self.database_key)

        self.officer = generate_fake_officer("officer_id_1", "*****@*****.**")
        self.client_downgradable_high = generate_fake_client(
            client_id="client_1",
            supervising_officer_id=self.officer.external_id,
            supervision_level=StateSupervisionLevel.HIGH,
            last_assessment_date=date(2021, 1, 2),
            assessment_score=1,
        )
        self.client_downgradable_medium_1 = generate_fake_client(
            client_id="client_2",
            supervising_officer_id=self.officer.external_id,
            supervision_level=StateSupervisionLevel.MEDIUM,
            last_assessment_date=date(2021, 1, 2),
            assessment_score=1,
        )
        self.client_downgradable_medium_2 = generate_fake_client(
            client_id="client_3",
            supervising_officer_id=self.officer.external_id,
            supervision_level=StateSupervisionLevel.MEDIUM,
            last_assessment_date=date(2021, 1, 2),
            assessment_score=1,
        )
        self.client_no_downgrade = generate_fake_client(
            client_id="client_4",
            supervising_officer_id=self.officer.external_id,
            supervision_level=StateSupervisionLevel.HIGH,
            last_assessment_date=date(2021, 1, 2),
            assessment_score=100,
        )
        self.opportunities = [
            generate_fake_etl_opportunity(
                officer_id=self.officer.external_id,
                person_external_id=client.person_external_id,
                opportunity_type=OpportunityType.OVERDUE_DOWNGRADE,
                opportunity_metadata={
                    "assessmentScore": client.assessment_score,
                    "latestAssessmentDate": str(client.most_recent_assessment_date),
                    "recommendedSupervisionLevel": "MINIMUM",
                },
            )
            for client in [
                self.client_downgradable_high,
                self.client_downgradable_medium_1,
                self.client_downgradable_medium_2,
            ]
        ]

        with SessionFactory.using_database(self.database_key) as session:
            session.expire_on_commit = False
            session.add_all(
                [
                    self.officer,
                    self.client_downgradable_high,
                    self.client_downgradable_medium_1,
                    self.client_downgradable_medium_2,
                    self.client_no_downgrade,
                    *self.opportunities,
                ]
            )

        self.top_opps_email_recipient_patcher = patch(
            "recidiviz.reporting.data_retrieval._top_opps_email_recipient_addresses"
        )
        self.top_opps_email_recipient_patcher.start().return_value = [
            self.officer.email_address
        ]
Ejemplo n.º 11
0
    def _generic_scrape(self, request: QueueRequest):
        """
        General handler for all scrape tasks.  This function is a generic entry
        point into all types of scrapes.  It decides what to call based on
        params.

        Args:
            params: dict of parameters passed from the last scrape session.
        """
        try:
            task = request.next_task

            # Here we handle a special case where we weren't really sure
            # we were going to get data when we submitted a task, but then
            # we ended up with data, so no more requests are required,
            # just the content we already have.
            # TODO(#680): remove this
            if task.content is not None:
                content = self._parse_html_content(task.content)
                cookies = None
            else:
                post_data = task.post_data

                # Let the child transform the post_data if it wants before
                # sending the requests.  This hook is in here in case the
                # child did something like compress the post_data before
                # it put it on the queue.
                self.transform_post_data(post_data)

                # We always fetch some content before doing anything.
                # Note that we use get here for the post_data to return a
                # default value of None if this scraper doesn't set it.
                try:
                    content, cookies = self._fetch_content(
                        task.endpoint,
                        task.response_type,
                        headers=task.headers,
                        cookies=task.cookies,
                        params=task.params,
                        post_data=post_data,
                        json_data=task.json,
                    )
                except Exception as e:
                    raise ScraperFetchError(str(e)) from e

            scraped_data = None
            if self.should_scrape_data(task.task_type):
                # If we want to scrape data, we should either create an
                # ingest_info object or get the one that already exists.
                logging.info(
                    "Scraping data for [%s] and endpoint: [%s]",
                    self.region.region_code,
                    task.endpoint,
                )
                try:
                    scraped_data = self.populate_data(
                        content, task, request.ingest_info or IngestInfo())
                except Exception as e:
                    raise ScraperPopulateDataError(str(e)) from e

            if self.should_get_more_tasks(task.task_type):
                logging.info(
                    "Getting more tasks for [%s] and endpoint: [%s]",
                    self.region.region_code,
                    task.endpoint,
                )

                # Only send along ingest info if it will not be persisted now.
                ingest_info_to_send = None
                if scraped_data is not None and not scraped_data.persist:
                    ingest_info_to_send = scraped_data.ingest_info

                try:
                    # pylint: disable=assignment-from-no-return
                    next_tasks = self.get_more_tasks(content, task)
                except Exception as e:
                    raise ScraperGetMoreTasksError(str(e)) from e
                for next_task in next_tasks:
                    # Include cookies received from response, if any
                    if cookies:
                        cookies.update(next_task.cookies)
                        next_task = Task.evolve(next_task, cookies=cookies)
                    self.add_task(
                        "_generic_scrape",
                        QueueRequest(
                            scrape_type=request.scrape_type,
                            scraper_start_time=request.scraper_start_time,
                            next_task=next_task,
                            ingest_info=ingest_info_to_send,
                        ),
                    )

            if scraped_data is not None and scraped_data.persist:
                if scraped_data.ingest_info:
                    logging.info(
                        "Logging at most 4 people (were %d):",
                        len(scraped_data.ingest_info.people),
                    )
                    loop_count = min(
                        len(scraped_data.ingest_info.people),
                        constants.MAX_PEOPLE_TO_LOG,
                    )
                    for i in range(loop_count):
                        logging.info("[%s]",
                                     str(scraped_data.ingest_info.people[i]))
                    logging.info(
                        "Last seen time of person being set as: [%s]",
                        request.scraper_start_time,
                    )
                    metadata = IngestMetadata(
                        region=self.region.region_code,
                        jurisdiction_id=self.region.jurisdiction_id,
                        ingest_time=request.scraper_start_time,
                        enum_overrides=self.get_enum_overrides(),
                        system_level=SystemLevel.COUNTY,
                        database_key=SQLAlchemyDatabaseKey.for_schema(
                            SchemaType.JAILS),
                    )
                    if self.BATCH_WRITES:
                        logging.info(
                            "Queuing ingest_info ([%d] people) to "
                            "batch_persistence for [%s]",
                            len(scraped_data.ingest_info.people),
                            self.region.region_code,
                        )
                        scrape_key = ScrapeKey(self.region.region_code,
                                               request.scrape_type)
                        batch_persistence.write(
                            ingest_info=scraped_data.ingest_info,
                            scrape_key=scrape_key,
                            task=task,
                        )
                    else:
                        logging.info(
                            "Writing ingest_info ([%d] people) to the database"
                            " for [%s]",
                            len(scraped_data.ingest_info.people),
                            self.region.region_code,
                        )
                        persistence.write(
                            serialization.convert_ingest_info_to_proto(
                                scraped_data.ingest_info),
                            metadata,
                        )
                for sc in scraped_data.single_counts:
                    if not sc.date:
                        scrape_key = ScrapeKey(self.region.region_code,
                                               constants.ScrapeType.BACKGROUND)
                        session = sessions.get_current_session(scrape_key)
                        if session:
                            sc = attr.evolve(sc, date=session.start.date())
                    single_count.store_single_count(
                        sc, self.region.jurisdiction_id)
        except Exception as e:
            if self.BATCH_WRITES:
                scrape_key = ScrapeKey(self.region.region_code,
                                       request.scrape_type)
                batch_persistence.write_error(
                    error=str(e),
                    trace_id=get_trace_id_from_flask(),
                    task=task,
                    scrape_key=scrape_key,
                )
            raise e
Ejemplo n.º 12
0
def update_auth0_user_metadata() -> Tuple[str, HTTPStatus]:
    """This endpoint is triggered from a GCS bucket when a new user restrictions file is created. It downloads the
     user restrictions file and updates each user's app_metadata with the restrictions from the file.

    This function first queries the Auth0 Management API to request all users and their user_ids by the
    list of email addresses that exist in the downloaded file. Then it iterates over those users and updates their
    app_metadata.

    Query parameters:
        region_code: (required) The region code that has the updated user restrictions file, e.g. US_MO

    Returns:
         Text indicating the results of the run and an HTTP status

    Raises:
        Nothing. Catch everything so that we can always return a response to the request
    """
    region_code = get_only_str_param_value("region_code",
                                           request.args,
                                           preserve_case=True)

    if not region_code:
        return (
            "Missing required region_code param",
            HTTPStatus.BAD_REQUEST,
        )

    try:
        _validate_region_code(region_code)
    except ValueError as error:
        logging.error(error)
        return str(error), HTTPStatus.BAD_REQUEST

    database_key = SQLAlchemyDatabaseKey.for_schema(
        schema_type=SchemaType.CASE_TRIAGE)
    with SessionFactory.using_database(database_key=database_key,
                                       autocommit=False) as session:
        try:
            all_user_restrictions = (session.query(
                DashboardUserRestrictions.restricted_user_email,
                DashboardUserRestrictions.allowed_supervision_location_ids,
                DashboardUserRestrictions.allowed_supervision_location_level,
                DashboardUserRestrictions.can_access_leadership_dashboard,
                DashboardUserRestrictions.can_access_case_triage,
            ).filter(DashboardUserRestrictions.state_code == region_code.upper(
            )).order_by(DashboardUserRestrictions.restricted_user_email).all())
            user_restrictions_by_email: Dict[str, Auth0AppMetadata] = {}
            for user_restrictions in all_user_restrictions:
                email = user_restrictions["restricted_user_email"].lower()
                user_restrictions_by_email[email] = _format_db_results(
                    user_restrictions)

            auth0 = Auth0Client()
            email_addresses = list(user_restrictions_by_email.keys())
            users = auth0.get_all_users_by_email_addresses(email_addresses)
            num_updated_users = 0

            for user in users:
                email = user.get("email", "")
                current_app_metadata = user.get("app_metadata", {})
                new_restrictions: Optional[
                    Auth0AppMetadata] = user_restrictions_by_email.get(email)
                current_restrictions = _normalize_current_restrictions(
                    current_app_metadata)

                if (new_restrictions is not None
                        and current_restrictions != new_restrictions):
                    num_updated_users += 1
                    auth0.update_user_app_metadata(
                        user_id=user.get("user_id", ""),
                        app_metadata=new_restrictions)

            return (
                f"Finished updating {num_updated_users} auth0 users with restrictions for region {region_code}",
                HTTPStatus.OK,
            )

        except Exception as error:
            logging.error(error)
            return (
                f"Error using Auth0 management API to update users: {error}",
                HTTPStatus.INTERNAL_SERVER_ERROR,
            )
Ejemplo n.º 13
0
def dashboard_user_restrictions_by_email(
) -> Tuple[Union[Auth0AppMetadata, str], HTTPStatus]:
    """This endpoint is accessed by a service account used by an Auth0 hook that is called at the pre-registration when
    a user first signs up for an account. Given a user email address in the request, it responds with
    the app_metadata that the hook will save on the user so that the UP dashboards can apply the appropriate
    restrictions.

    Query parameters:
        email_address: (required) The email address that requires a user restrictions lookup
        region_code: (required) The region code to use to lookup the user restrictions

    Returns:
         JSON response of the app_metadata associated with the given email address and an HTTP status

    Raises:
        Nothing. Catch everything so that we can always return a response to the request
    """
    email_address = get_only_str_param_value("email_address", request.args)
    region_code = get_only_str_param_value("region_code",
                                           request.args,
                                           preserve_case=True)

    try:
        if not email_address:
            return "Missing email_address param", HTTPStatus.BAD_REQUEST
        if not region_code:
            return "Missing region_code param", HTTPStatus.BAD_REQUEST
        _validate_region_code(region_code)
        validate_email_address(email_address)
    except ValueError as error:
        logging.error(error)
        return str(error), HTTPStatus.BAD_REQUEST

    database_key = SQLAlchemyDatabaseKey.for_schema(
        schema_type=SchemaType.CASE_TRIAGE)
    # TODO(#8046): Don't use the deprecated session fetcher
    session = SessionFactory.deprecated__for_database(
        database_key=database_key)
    try:
        user_restrictions = (session.query(
            DashboardUserRestrictions.allowed_supervision_location_ids,
            DashboardUserRestrictions.allowed_supervision_location_level,
            DashboardUserRestrictions.can_access_leadership_dashboard,
            DashboardUserRestrictions.can_access_case_triage,
        ).filter(
            DashboardUserRestrictions.state_code == region_code.upper(),
            func.lower(DashboardUserRestrictions.restricted_user_email) ==
            email_address.lower(),
        ).one())

        restrictions = _format_db_results(user_restrictions)

        return (restrictions, HTTPStatus.OK)

    except sqlalchemy.orm.exc.NoResultFound:
        return (
            f"User not found for email address {email_address} and region code {region_code}.",
            HTTPStatus.NOT_FOUND,
        )

    except Exception as error:
        logging.error(error)
        return (
            f"An error occurred while fetching dashboard user restrictions with the email {email_address} for "
            f"region_code {region_code}: {error}",
            HTTPStatus.INTERNAL_SERVER_ERROR,
        )

    finally:
        session.close()