Esempio n. 1
0
 def setUp(self) -> None:
     self.db_dir = local_postgres_helpers.start_on_disk_postgresql_database(
     )
     self.overridden_env_vars = (
         local_postgres_helpers.update_local_sqlalchemy_postgres_env_vars())
     self.engine = create_engine(
         local_postgres_helpers.postgres_db_url_from_env_vars())
Esempio n. 2
0
    def setUp(self) -> None:
        self.test_app = Flask(__name__)
        self.helpers = CaseTriageTestHelpers.from_test(self, self.test_app)

        self.database_key = SQLAlchemyDatabaseKey.for_schema(
            SchemaType.CASE_TRIAGE)
        self.overridden_env_vars = (
            local_postgres_helpers.update_local_sqlalchemy_postgres_env_vars())
        db_url = local_postgres_helpers.postgres_db_url_from_env_vars()
        engine = setup_scoped_sessions(self.test_app, db_url)
        # Auto-generate all tables that exist in our schema in this database
        self.database_key = SQLAlchemyDatabaseKey.for_schema(
            SchemaType.CASE_TRIAGE)
        self.database_key.declarative_meta.metadata.create_all(engine)

        self.demo_clients = get_fixture_clients()
        self.demo_opportunities = get_fixture_opportunities()

        self.client_1 = self.demo_clients[0]

        with self.helpers.using_demo_user():
            self.helpers.create_case_update(
                self.client_1.person_external_id,
                CaseUpdateActionType.COMPLETED_ASSESSMENT.value,
            )
Esempio n. 3
0
 def setUp(self) -> None:
     self.db_dir = local_postgres_helpers.start_on_disk_postgresql_database(
     )
     self.database_key = SQLAlchemyDatabaseKey.canonical_for_schema(
         self.schema_type)
     self.overridden_env_vars = (
         local_postgres_helpers.update_local_sqlalchemy_postgres_env_vars())
     self.engine = create_engine(
         local_postgres_helpers.postgres_db_url_from_env_vars())
Esempio n. 4
0
    def test_direct_ingest_instance_status_contains_data_for_all_states(
            self) -> None:
        '''Enforces that after all migrations the set of direct ingest instance statuses
        matches the list of known states.

        If this test fails, you will likely have to add a new migration because a new state
        was recently created. To do so, first run:
        ```
        python -m recidiviz.tools.migrations.autogenerate_migration \
            --database OPERATIONS \
            --message add_us_xx
        ```

        This will generate a blank migration. You should then modify the migration, changing
        the `upgrade` method to look like:
        ```
        def upgrade() -> None:
            op.execute("""
                INSERT INTO direct_ingest_instance_status (region_code, instance, is_paused) VALUES
                ('US_XX', 'PRIMARY', TRUE),
                ('US_XX', 'SECONDARY', TRUE);
            """)
        ```

        Afterwards, this test should ideally pass.
        '''

        with runner(self.default_config(), self.engine) as r:
            r.migrate_up_to("head")

            engine = create_engine(
                local_postgres_helpers.postgres_db_url_from_env_vars())

            conn = engine.connect()
            rows = conn.execute(
                "SELECT region_code, instance FROM direct_ingest_instance_status;"
            )

            instance_to_state_codes = defaultdict(set)
            for row in rows:
                instance_to_state_codes[DirectIngestInstance(row[1])].add(
                    row[0])

            required_states = {
                name.upper()
                for name in get_existing_region_dir_names()
            }

            for instance in DirectIngestInstance:
                self.assertEqual(required_states,
                                 instance_to_state_codes[instance])
Esempio n. 5
0
    def fetch_all_enums(self) -> Dict[str, Set[str]]:
        engine = create_engine(
            local_postgres_helpers.postgres_db_url_from_env_vars())

        conn = engine.connect()
        rows = conn.execute("""
        SELECT t.typname as enum_name,
            e.enumlabel as enum_value
        FROM pg_type t
            JOIN pg_enum e ON t.oid = e.enumtypid
            JOIN pg_catalog.pg_namespace n ON n.oid = t.typnamespace
        WHERE
            n.nspname = 'public';
        """)

        enums = defaultdict(set)
        for row in rows:
            enums[row[0]].add(row[1])

        return enums
Esempio n. 6
0
def _get_old_enum_values(schema_type: SchemaType, enum_name: str) -> List[str]:
    """Fetches the current enum values for the given schema and enum name."""
    # Setup temp pg database
    db_dir = local_postgres_helpers.start_on_disk_postgresql_database()
    database_key = SQLAlchemyDatabaseKey.canonical_for_schema(schema_type)
    overridden_env_vars = (
        local_postgres_helpers.update_local_sqlalchemy_postgres_env_vars())
    engine = create_engine(
        local_postgres_helpers.postgres_db_url_from_env_vars())

    try:
        # Fetch enums
        default_config = {
            "file": database_key.alembic_file,
            "script_location": database_key.migrations_location,
        }
        with runner(default_config, engine) as r:
            r.migrate_up_to("head")
        conn = engine.connect()
        rows = conn.execute(f"""
        SELECT e.enumlabel as enum_value
        FROM pg_type t
            JOIN pg_enum e ON t.oid = e.enumtypid
            JOIN pg_catalog.pg_namespace n ON n.oid = t.typnamespace
        WHERE
            n.nspname = 'public'
            AND t.typname = '{enum_name}';
        """)
        enums = [row[0] for row in rows]
    finally:
        # Teardown temp pg database
        local_postgres_helpers.restore_local_env_vars(overridden_env_vars)
        local_postgres_helpers.stop_and_clear_on_disk_postgresql_database(
            db_dir)

    return enums
Esempio n. 7
0
from recidiviz.utils.flask_exception import FlaskException
from recidiviz.utils.timer import RepeatedTimer

# Flask setup
static_folder = os.path.abspath(
    os.path.join(
        os.path.dirname(os.path.realpath(__file__)),
        "../../frontends/case-triage/build/",
    ))

app = Flask(__name__, static_folder=static_folder)
app.secret_key = get_local_secret("case_triage_secret_key")
CSRFProtect(app)

if in_development():
    db_url = local_postgres_helpers.postgres_db_url_from_env_vars()
else:
    db_url = SQLAlchemyEngineManager.get_server_postgres_instance_url(
        schema_type=SchemaType.CASE_TRIAGE)
    app.config["SESSION_COOKIE_HTTPONLY"] = True
    app.config["SESSION_COOKIE_SECURE"] = True
    app.config["SESSION_COOKIE_SAMESITE"] = "Strict"

setup_scoped_sessions(app, db_url)


# Auth setup
def on_successful_authorization(_payload: Dict[str, str], token: str) -> None:
    """
    Memoize the user's info (email_address, picture, etc) into our session
    Expose the user on the flask request global
Esempio n. 8
0
    def setUp(self) -> None:
        self.test_app = Flask(__name__)
        self.helpers = CaseTriageTestHelpers.from_test(self, self.test_app)
        self.test_client = self.helpers.test_client
        self.mock_segment_client = self.helpers.mock_segment_client

        self.database_key = SQLAlchemyDatabaseKey.for_schema(
            SchemaType.CASE_TRIAGE)
        self.overridden_env_vars = (
            local_postgres_helpers.update_local_sqlalchemy_postgres_env_vars())
        db_url = local_postgres_helpers.postgres_db_url_from_env_vars()
        engine = setup_scoped_sessions(self.test_app, db_url)
        # Auto-generate all tables that exist in our schema in this database
        self.database_key.declarative_meta.metadata.create_all(engine)
        self.session = self.test_app.scoped_session

        # Add seed data
        self.officer = generate_fake_officer("officer_id_1",
                                             "*****@*****.**")
        self.officer_without_clients = generate_fake_officer(
            "officer_id_2", "*****@*****.**")
        self.client_1 = generate_fake_client(
            client_id="client_1",
            supervising_officer_id=self.officer.external_id,
        )
        self.client_2 = generate_fake_client(
            client_id="client_2",
            supervising_officer_id=self.officer.external_id,
            last_assessment_date=date(2021, 2, 2),
        )
        self.client_3 = generate_fake_client(
            client_id="client_3",
            supervising_officer_id=self.officer.external_id,
        )
        self.client_info_3 = generate_fake_client_info(
            client=self.client_3,
            preferred_name="Alex",
        )
        self.case_update_1 = generate_fake_case_update(
            self.client_1,
            self.officer.external_id,
            action_type=CaseUpdateActionType.COMPLETED_ASSESSMENT,
            last_version=serialize_client_case_version(
                CaseUpdateActionType.COMPLETED_ASSESSMENT,
                self.client_1).to_json(),
        )
        self.case_update_2 = generate_fake_case_update(
            self.client_2,
            self.officer_without_clients.external_id,
            action_type=CaseUpdateActionType.COMPLETED_ASSESSMENT,
            last_version=serialize_client_case_version(
                CaseUpdateActionType.COMPLETED_ASSESSMENT,
                self.client_2).to_json(),
        )
        self.case_update_3 = generate_fake_case_update(
            self.client_1,
            self.officer.external_id,
            action_type=CaseUpdateActionType.NOT_ON_CASELOAD,
            last_version=serialize_client_case_version(
                CaseUpdateActionType.NOT_ON_CASELOAD, self.client_1).to_json(),
        )
        self.client_2.most_recent_assessment_date = date(2022, 2, 2)

        self.opportunity_1 = generate_fake_etl_opportunity(
            officer_id=self.officer.external_id,
            person_external_id=self.client_1.person_external_id,
        )
        tomorrow = datetime.now() + timedelta(days=1)
        self.deferral_1 = generate_fake_reminder(self.opportunity_1, tomorrow)
        self.opportunity_2 = generate_fake_etl_opportunity(
            officer_id=self.officer.external_id,
            person_external_id=self.client_2.person_external_id,
        )
        # all generated fake clients have no employer
        self.num_unemployed_opportunities = 3

        self.session.add_all([
            self.officer,
            self.officer_without_clients,
            self.client_1,
            self.client_2,
            self.client_3,
            self.client_info_3,
            self.case_update_1,
            self.case_update_2,
            self.case_update_3,
            self.opportunity_1,
            self.deferral_1,
            self.opportunity_2,
        ])
        self.session.commit()
Esempio n. 9
0
    def setUp(self) -> None:
        self.test_app = Flask(__name__)
        self.test_app.register_blueprint(api)
        self.test_client = self.test_app.test_client()

        @self.test_app.errorhandler(FlaskException)
        def _handle_auth_error(ex: FlaskException) -> Response:
            response = jsonify({
                "code": ex.code,
                "description": ex.description,
            })
            response.status_code = ex.status_code
            return response

        self.overridden_env_vars = (
            local_postgres_helpers.update_local_sqlalchemy_postgres_env_vars())
        db_url = local_postgres_helpers.postgres_db_url_from_env_vars()
        setup_scoped_sessions(self.test_app, db_url)

        # Add seed data
        self.officer_1 = generate_fake_officer("officer_id_1")
        self.officer_2 = generate_fake_officer("officer_id_2")
        self.client_1 = generate_fake_client(
            client_id="client_1",
            supervising_officer_id=self.officer_1.external_id,
        )
        self.client_2 = generate_fake_client(
            client_id="client_2",
            supervising_officer_id=self.officer_1.external_id,
            last_assessment_date=date(2021, 2, 2),
        )
        self.case_update_1 = CaseUpdate(
            person_external_id=self.client_1.person_external_id,
            officer_external_id=self.client_1.supervising_officer_external_id,
            state_code=self.client_1.state_code,
            update_metadata={
                "actions":
                CaseUpdatesInterface.serialize_actions(
                    self.client_1,
                    [CaseUpdateActionType.COMPLETED_ASSESSMENT],
                ),
            },
        )
        self.case_update_2 = CaseUpdate(
            person_external_id=self.client_2.person_external_id,
            officer_external_id=self.client_2.supervising_officer_external_id,
            state_code=self.client_2.state_code,
            update_metadata={
                "actions":
                CaseUpdatesInterface.serialize_actions(
                    self.client_2,
                    [CaseUpdateActionType.COMPLETED_ASSESSMENT],
                ),
            },
        )
        self.client_2.most_recent_assessment_date = date(2022, 2, 2)

        sess = SessionFactory.for_schema_base(CaseTriageBase)
        sess.add(self.officer_1)
        sess.add(self.client_1)
        sess.add(self.client_2)
        sess.add(self.case_update_1)
        sess.add(self.case_update_2)
        sess.commit()