Esempio n. 1
0
    def test_enums_match_schema(self):
        with runner(self.default_config()) as r:
            r.migrate_up_to('head')

        # Fetch enum values
        migration_enums = self.fetch_all_enums()

        # Doing teardown/setup to generate a new postgres instance
        local_postgres_helpers.restore_local_env_vars(self.overridden_env_vars)
        local_postgres_helpers.stop_and_clear_on_disk_postgresql_database(
            self.db_dir)

        self.db_dir = local_postgres_helpers.start_on_disk_postgresql_database(
        )
        self.overridden_env_vars = local_postgres_helpers.update_local_sqlalchemy_postgres_env_vars(
        )

        local_postgres_helpers.use_on_disk_postgresql_database(
            SQLAlchemyEngineManager.declarative_method_for_schema(
                self.schema_type))

        # Check enum values
        schema_enums = self.fetch_all_enums()

        # Assert that they all match
        self.assertEqual(len(migration_enums), len(schema_enums))
        for enum_name, migration_values in migration_enums.items():
            schema_values = schema_enums[enum_name]
            self.assertEqual(len(migration_values),
                             len(schema_values),
                             msg=f'{enum_name} lengths differ')
            self.assertEqual(len(migration_values),
                             len(migration_values.intersection(schema_values)),
                             msg=f'{enum_name} values differ')
Esempio n. 2
0
    def test_enums_match_schema(self) -> None:
        with runner(self.default_config(), self.engine) as r:
            r.migrate_up_to("head")

        # Fetch enum values
        migration_enums = self.fetch_all_enums()

        # Doing teardown/setup to generate a new postgres instance
        local_postgres_helpers.restore_local_env_vars(self.overridden_env_vars)
        local_postgres_helpers.stop_and_clear_on_disk_postgresql_database(
            self.db_dir)

        self.db_dir = local_postgres_helpers.start_on_disk_postgresql_database(
        )
        self.overridden_env_vars = (
            local_postgres_helpers.update_local_sqlalchemy_postgres_env_vars())

        local_postgres_helpers.use_on_disk_postgresql_database(
            self.database_key)

        # Check enum values
        schema_enums = self.fetch_all_enums()

        # Assert that they all match
        self.assertEqual(len(migration_enums), len(schema_enums))
        for enum_name, migration_values in migration_enums.items():
            schema_values = schema_enums[enum_name]
            self.assertCountEqual(migration_values, schema_values)

        # Cleanup needed for this method.
        local_postgres_helpers.teardown_on_disk_postgresql_database(
            self.database_key)
Esempio n. 3
0
def alembic_runner(alembic_config, alembic_engine):
    """Produce an alembic migration context in which to execute alembic tests."""
    import pytest_alembic

    with pytest_alembic.runner(config=alembic_config,
                               engine=alembic_engine) as runner:
        yield runner
Esempio n. 4
0
    def test_direct_ingest_instance_status_contains_data_for_all_states(
            self) -> None:
        '''Enforces that after all migrations the set of direct ingest instance statuses
        matches the list of known states.

        If this test fails, you will likely have to add a new migration because a new state
        was recently created. To do so, first run:
        ```
        python -m recidiviz.tools.migrations.autogenerate_migration \
            --database OPERATIONS \
            --message add_us_xx
        ```

        This will generate a blank migration. You should then modify the migration, changing
        the `upgrade` method to look like:
        ```
        def upgrade() -> None:
            op.execute("""
                INSERT INTO direct_ingest_instance_status (region_code, instance, is_paused) VALUES
                ('US_XX', 'PRIMARY', TRUE),
                ('US_XX', 'SECONDARY', TRUE);
            """)
        ```

        Afterwards, this test should ideally pass.
        '''

        with runner(self.default_config(), self.engine) as r:
            r.migrate_up_to("head")

            engine = create_engine(
                local_postgres_helpers.postgres_db_url_from_env_vars())

            conn = engine.connect()
            rows = conn.execute(
                "SELECT region_code, instance FROM direct_ingest_instance_status;"
            )

            instance_to_state_codes = defaultdict(set)
            for row in rows:
                instance_to_state_codes[DirectIngestInstance(row[1])].add(
                    row[0])

            required_states = {
                name.upper()
                for name in get_existing_region_dir_names()
            }

            for instance in DirectIngestInstance:
                self.assertEqual(required_states,
                                 instance_to_state_codes[instance])
Esempio n. 5
0
 def test_up_down(self) -> None:
     """Enforce that migrations can be run all the way up, back, and up again."""
     with runner(self.default_config(), self.engine) as r:
         revisions = r.history.revisions
         r.migrate_up_to("head")
         for rev in reversed(revisions):
             try:
                 r.migrate_down_to(rev)
             except Exception:
                 self.fail(f"Migrate down failed at revision: {rev}")
         for rev in revisions:
             try:
                 r.migrate_up_to(rev)
             except Exception:
                 self.fail(f"Migrate back up failed at revision: {rev}")
Esempio n. 6
0
    def test_migrate_matches_defs(self):
        """Enforces that after all migrations, database state matches known models

        Important note: This test will not detect changes made to enums that have failed to
        be incorporated by existing migrations. It only reliably handles table schema.
        """
        def verify_is_empty(_, __, directives):
            script = directives[0]

            migration_is_empty = script.upgrade_ops.is_empty()
            if not migration_is_empty:
                raise RuntimeError('migration should be empty')

        with runner(self.default_config()) as r:
            r.migrate_up_to('head')
            r.generate_revision(message="test_rev",
                                autogenerate=True,
                                process_revision_directives=verify_is_empty)
Esempio n. 7
0
    def setUp(self) -> None:
        self.db_key = SQLAlchemyDatabaseKey.for_schema(SchemaType.CASE_TRIAGE)
        self.env_vars = (
            local_postgres_helpers.update_local_sqlalchemy_postgres_env_vars())

        # We need to build up the database using the migrations instead of
        # by just loading from the internal representation because the different
        # methods induce different orders.
        # The migration order is the one seen in staging/prod, as well as what
        # we do in development.
        engine = SQLAlchemyEngineManager.init_engine_for_postgres_instance(
            database_key=self.db_key,
            db_url=local_postgres_helpers.on_disk_postgres_db_url(),
        )
        with runner(
            {
                "file": self.db_key.alembic_file,
                "script_location": self.db_key.migrations_location,
            },
                engine,
        ) as r:
            r.migrate_up_to("head")
Esempio n. 8
0
    def test_migrate_matches_defs(self) -> None:
        """Enforces that after all migrations, database state matches known models

        Important note: This test will not detect changes made to enums that have failed to
        be incorporated by existing migrations. It only reliably handles table schema.
        """
        def verify_is_empty(_, __, directives) -> None:
            script = directives[0]

            migration_is_empty = script.upgrade_ops.is_empty()
            if not migration_is_empty:
                raise RuntimeError(
                    "expected empty autogenerated migration. actual contained these operations:\n"
                    f"{render_python_code(script.upgrade_ops)}")

        with runner(self.default_config(), self.engine) as r:
            r.migrate_up_to("head")
            r.generate_revision(
                message="test_rev",
                autogenerate=True,
                process_revision_directives=verify_is_empty,
            )
Esempio n. 9
0
def _get_old_enum_values(schema_type: SchemaType, enum_name: str) -> List[str]:
    """Fetches the current enum values for the given schema and enum name."""
    # Setup temp pg database
    db_dir = local_postgres_helpers.start_on_disk_postgresql_database()
    database_key = SQLAlchemyDatabaseKey.canonical_for_schema(schema_type)
    overridden_env_vars = (
        local_postgres_helpers.update_local_sqlalchemy_postgres_env_vars())
    engine = create_engine(
        local_postgres_helpers.postgres_db_url_from_env_vars())

    try:
        # Fetch enums
        default_config = {
            "file": database_key.alembic_file,
            "script_location": database_key.migrations_location,
        }
        with runner(default_config, engine) as r:
            r.migrate_up_to("head")
        conn = engine.connect()
        rows = conn.execute(f"""
        SELECT e.enumlabel as enum_value
        FROM pg_type t
            JOIN pg_enum e ON t.oid = e.enumtypid
            JOIN pg_catalog.pg_namespace n ON n.oid = t.typnamespace
        WHERE
            n.nspname = 'public'
            AND t.typname = '{enum_name}';
        """)
        enums = [row[0] for row in rows]
    finally:
        # Teardown temp pg database
        local_postgres_helpers.restore_local_env_vars(overridden_env_vars)
        local_postgres_helpers.stop_and_clear_on_disk_postgresql_database(
            db_dir)

    return enums
Esempio n. 10
0
 def test_single_head_revision(self) -> None:
     """Enforce that there is exactly one head revision."""
     with runner(self.default_config(), self.engine) as r:
         self.assertEqual(len(r.heads), 1)
Esempio n. 11
0
 def test_full_upgrade(self) -> None:
     """Enforce that migrations can be run forward to completion."""
     with runner(self.default_config(), self.engine) as r:
         r.migrate_up_to("head")
Esempio n. 12
0
 def test_full_upgrade(self):
     """Enforce that migrations can be run forward to completion."""
     with runner(self.default_config()) as r:
         r.migrate_up_to('head')