示例#1
0
    def test_enums_match_schema(self):
        with runner(self.default_config()) as r:
            r.migrate_up_to('head')

        # Fetch enum values
        migration_enums = self.fetch_all_enums()

        # Doing teardown/setup to generate a new postgres instance
        local_postgres_helpers.restore_local_env_vars(self.overridden_env_vars)
        local_postgres_helpers.stop_and_clear_on_disk_postgresql_database(
            self.db_dir)

        self.db_dir = local_postgres_helpers.start_on_disk_postgresql_database(
        )
        self.overridden_env_vars = local_postgres_helpers.update_local_sqlalchemy_postgres_env_vars(
        )

        local_postgres_helpers.use_on_disk_postgresql_database(
            SQLAlchemyEngineManager.declarative_method_for_schema(
                self.schema_type))

        # Check enum values
        schema_enums = self.fetch_all_enums()

        # Assert that they all match
        self.assertEqual(len(migration_enums), len(schema_enums))
        for enum_name, migration_values in migration_enums.items():
            schema_values = schema_enums[enum_name]
            self.assertEqual(len(migration_values),
                             len(schema_values),
                             msg=f'{enum_name} lengths differ')
            self.assertEqual(len(migration_values),
                             len(migration_values.intersection(schema_values)),
                             msg=f'{enum_name} values differ')
示例#2
0
    def test_enums_match_schema(self) -> None:
        with runner(self.default_config(), self.engine) as r:
            r.migrate_up_to("head")

        # Fetch enum values
        migration_enums = self.fetch_all_enums()

        # Doing teardown/setup to generate a new postgres instance
        local_postgres_helpers.restore_local_env_vars(self.overridden_env_vars)
        local_postgres_helpers.stop_and_clear_on_disk_postgresql_database(
            self.db_dir)

        self.db_dir = local_postgres_helpers.start_on_disk_postgresql_database(
        )
        self.overridden_env_vars = (
            local_postgres_helpers.update_local_sqlalchemy_postgres_env_vars())

        local_postgres_helpers.use_on_disk_postgresql_database(
            self.database_key)

        # Check enum values
        schema_enums = self.fetch_all_enums()

        # Assert that they all match
        self.assertEqual(len(migration_enums), len(schema_enums))
        for enum_name, migration_values in migration_enums.items():
            schema_values = schema_enums[enum_name]
            self.assertCountEqual(migration_values, schema_values)

        # Cleanup needed for this method.
        local_postgres_helpers.teardown_on_disk_postgresql_database(
            self.database_key)
示例#3
0
def cleanup_run(tmp_postgres_db_dir: str, clean_up_db: bool) -> None:
    if clean_up_db:
        local_postgres_helpers.stop_and_clear_on_disk_postgresql_database(
            tmp_postgres_db_dir)
        logging.info("Db automatically cleaned up")
    else:
        # Don't cleanup the database so that user can query the data afterward.
        logging.info(
            "For future cleanup, the postgres data directory is at %s.",
            tmp_postgres_db_dir,
        )
        logging.info("To query the data, connect to the local database with "
                     "`psql --dbname=recidiviz_test_db`")
示例#4
0
def main(schema_type: SchemaType, message: str, use_local_db: bool) -> None:
    """Runs the script to autogenerate migrations."""
    database_key = SQLAlchemyDatabaseKey.canonical_for_schema(schema_type)
    if use_local_db:
        # TODO(#4619): We should eventually move this from a local postgres instance to running
        # postgres from a docker container.
        if not local_postgres_helpers.can_start_on_disk_postgresql_database():
            logging.error(
                "pg_ctl is not installed, so the script cannot be run locally. "
                "--project-id must be specified to run against staging or production."
            )
            logging.error("Exiting...")
            sys.exit(1)
        logging.info("Starting local postgres database for autogeneration...")
        tmp_db_dir = local_postgres_helpers.start_on_disk_postgresql_database()
        original_env_vars = (
            local_postgres_helpers.update_local_sqlalchemy_postgres_env_vars())
    else:
        # TODO(Recidiviz/zenhub-tasks#134): This code path will throw when pointed at staging
        # because we havne't created valid read-only users there just yet.
        try:
            original_env_vars = SQLAlchemyEngineManager.update_sqlalchemy_env_vars(
                database_key=database_key, readonly_user=True)
        except ValueError as e:
            logging.warning("Error fetching SQLAlchemy credentials: %s", e)
            logging.warning(
                "Until readonly users are created, we cannot autogenerate migrations against staging."
            )
            logging.warning(
                "See https://github.com/Recidiviz/zenhub-tasks/issues/134")
            sys.exit(1)

    try:
        config = alembic.config.Config(database_key.alembic_file)
        if use_local_db:
            upgrade(config, "head")
        revision(config, autogenerate=True, message=message)
    except Exception as e:
        logging.error("Automigration generation failed: %s", e)

    local_postgres_helpers.restore_local_env_vars(original_env_vars)
    if use_local_db:
        logging.info("Stopping local postgres database...")
        local_postgres_helpers.stop_and_clear_on_disk_postgresql_database(
            tmp_db_dir)
示例#5
0
def _get_old_enum_values(schema_type: SchemaType, enum_name: str) -> List[str]:
    """Fetches the current enum values for the given schema and enum name."""
    # Setup temp pg database
    db_dir = local_postgres_helpers.start_on_disk_postgresql_database()
    database_key = SQLAlchemyDatabaseKey.canonical_for_schema(schema_type)
    overridden_env_vars = (
        local_postgres_helpers.update_local_sqlalchemy_postgres_env_vars())
    engine = create_engine(
        local_postgres_helpers.postgres_db_url_from_env_vars())

    try:
        # Fetch enums
        default_config = {
            "file": database_key.alembic_file,
            "script_location": database_key.migrations_location,
        }
        with runner(default_config, engine) as r:
            r.migrate_up_to("head")
        conn = engine.connect()
        rows = conn.execute(f"""
        SELECT e.enumlabel as enum_value
        FROM pg_type t
            JOIN pg_enum e ON t.oid = e.enumtypid
            JOIN pg_catalog.pg_namespace n ON n.oid = t.typnamespace
        WHERE
            n.nspname = 'public'
            AND t.typname = '{enum_name}';
        """)
        enums = [row[0] for row in rows]
    finally:
        # Teardown temp pg database
        local_postgres_helpers.restore_local_env_vars(overridden_env_vars)
        local_postgres_helpers.stop_and_clear_on_disk_postgresql_database(
            db_dir)

    return enums
示例#6
0
 def tearDownClass(cls) -> None:
     local_postgres_helpers.stop_and_clear_on_disk_postgresql_database(
         cls.temp_db_dir)
示例#7
0
 def tearDown(self) -> None:
     local_postgres_helpers.restore_local_env_vars(self.overridden_env_vars)
     local_postgres_helpers.stop_and_clear_on_disk_postgresql_database(
         self.db_dir)
def main(
    schema_type: SchemaType,
    repo_root: str,
    ssl_cert_path: str,
    dry_run: bool,
    skip_db_name_check: bool,
    confirm_hash: Optional[str],
) -> None:
    """
    Invokes the main code path for running migrations.

    This checks for user validations that the database and branches are correct and then runs existing pending
    migrations.
    """
    if dry_run:
        if not local_postgres_helpers.can_start_on_disk_postgresql_database():
            logging.error("pg_ctl is not installed. Cannot perform a dry-run.")
            logging.error("Exiting...")
            sys.exit(1)
        logging.info("Creating a dry-run...\n")
    else:
        if not ssl_cert_path:
            logging.error(
                "SSL certificates are required when running against live databases"
            )
            logging.error("Exiting...")
            sys.exit(1)
        logging.info("Using SSL certificate path: %s", ssl_cert_path)

    is_prod = metadata.project_id() == GCP_PROJECT_PRODUCTION
    if is_prod:
        logging.info("RUNNING AGAINST PRODUCTION\n")

    if not skip_db_name_check:
        confirm_correct_db_instance(schema_type)
    confirm_correct_git_branch(repo_root, confirm_hash=confirm_hash)

    if dry_run:
        db_keys = [SQLAlchemyDatabaseKey.canonical_for_schema(schema_type)]
    else:
        db_keys = [
            key for key in SQLAlchemyDatabaseKey.all()
            if key.schema_type == schema_type
        ]

    # Run migrations
    for key in db_keys:
        if dry_run:
            overriden_env_vars = (local_postgres_helpers.
                                  update_local_sqlalchemy_postgres_env_vars())
        else:
            overriden_env_vars = SQLAlchemyEngineManager.update_sqlalchemy_env_vars(
                database_key=key,
                ssl_cert_path=ssl_cert_path,
                migration_user=True,
            )
        try:
            logging.info(
                "*** Starting postgres migrations for schema [%s], db_name [%s] ***",
                key.schema_type,
                key.db_name,
            )
            if dry_run:
                db_dir = local_postgres_helpers.start_on_disk_postgresql_database(
                )
            config = alembic.config.Config(key.alembic_file)
            alembic.command.upgrade(config, "head")
        except Exception as e:
            logging.error("Migrations failed to run: %s", e)
            sys.exit(1)
        finally:
            local_postgres_helpers.restore_local_env_vars(overriden_env_vars)
            if dry_run:
                try:
                    logging.info("Stopping local postgres database")
                    local_postgres_helpers.stop_and_clear_on_disk_postgresql_database(
                        db_dir)
                except Exception as e2:
                    logging.error("Error cleaning up postgres: %s", e2)
示例#9
0
def main(database: SchemaType, repo_root: str, ssl_cert_path: str,
         dry_run: bool):
    """
    Invokes the main code path for running migrations.

    This checks for user validations that the database and branches are correct and then runs existing pending
    migrations.
    """

    requires_ssl = SQLAlchemyEngineManager.database_requires_ssl(
        metadata.project_id())

    if requires_ssl and not ssl_cert_path:
        logging.error(
            'Specifying an argument to --ssl-cert-path is required for the specified database.'
        )
        logging.error('Exiting...')
        sys.exit(1)

    if dry_run:
        if not local_postgres_helpers.can_start_on_disk_postgresql_database():
            logging.error('pg_ctl is not installed. Cannot perform a dry-run.')
            logging.error('Exiting...')
            sys.exit(1)
        logging.info('Creating a dry-run...\n')

    is_prod = metadata.project_id() == GCP_PROJECT_PRODUCTION
    dbname = SQLAlchemyEngineManager.get_stripped_cloudsql_instance_id(
        database)
    if is_prod:
        logging.info('RUNNING AGAINST PRODUCTION\n')

    db_check = input(
        f'Running new migrations on {dbname}. Please type "{dbname}" to confirm. (Anything else exits):\n'
    )
    if db_check != dbname:
        logging.warning('\nConfirmation aborted.')
        sys.exit(1)

    # Get user to validate git branch
    try:
        repo = Repository(repo_root)
    except Exception as e:
        logging.error('improper project root provided: %s', e)
        sys.exit(1)

    current_branch = repo.head.shorthand

    if is_prod and not current_branch.startswith('releases/'):
        logging.error(
            'Migrations run against production must be from a release branch. The current branch is %s.',
            current_branch)
        sys.exit(1)

    branch_check = input(
        f'\nThis script will execute all migrations on this branch ({current_branch}) that have not yet been run '
        f'against {dbname}. Please type "{current_branch}" to confirm your branch. (Anything else exits):\n'
    )
    if branch_check != current_branch:
        logging.warning('\nConfirmation aborted.')
        sys.exit(1)

    # Fetch secrets
    if dry_run:
        overriden_env_vars = local_postgres_helpers.update_local_sqlalchemy_postgres_env_vars(
        )
    else:
        overriden_env_vars = SQLAlchemyEngineManager.update_sqlalchemy_env_vars(
            database, ssl_cert_path=ssl_cert_path)

    # Run migrations
    try:
        if dry_run:
            logging.info(
                'Starting local postgres database for migrations dry run')
            db_dir = local_postgres_helpers.start_on_disk_postgresql_database()
        config = alembic.config.Config(
            SQLAlchemyEngineManager.get_alembic_file(database))
        alembic.command.upgrade(config, 'head')
    except Exception as e:
        logging.error('Migrations failed to run: %s', e)
        sys.exit(1)
    finally:
        local_postgres_helpers.restore_local_env_vars(overriden_env_vars)
        if dry_run:
            try:
                logging.info('Stopping local postgres database')
                local_postgres_helpers.stop_and_clear_on_disk_postgresql_database(
                    db_dir)
            except Exception as e2:
                logging.error('Error cleaning up postgres: %s', e2)
def main(
    database: SchemaType, repo_root: str, ssl_cert_path: str, dry_run: bool
) -> None:
    """
    Invokes the main code path for running migrations.

    This checks for user validations that the database and branches are correct and then runs existing pending
    migrations.
    """
    if dry_run:
        if not local_postgres_helpers.can_start_on_disk_postgresql_database():
            logging.error("pg_ctl is not installed. Cannot perform a dry-run.")
            logging.error("Exiting...")
            sys.exit(1)
        logging.info("Creating a dry-run...\n")
    else:
        if not ssl_cert_path:
            logging.error(
                "SSL certificates are required when running against live databases"
            )
            logging.error("Exiting...")
            sys.exit(1)
        logging.info("Using SSL certificate path: %s", ssl_cert_path)

    is_prod = metadata.project_id() == GCP_PROJECT_PRODUCTION
    if is_prod:
        logging.info("RUNNING AGAINST PRODUCTION\n")

    confirm_correct_db(database)
    confirm_correct_git_branch(repo_root, is_prod=is_prod)

    if dry_run:
        overriden_env_vars = (
            local_postgres_helpers.update_local_sqlalchemy_postgres_env_vars()
        )
    else:
        overriden_env_vars = SQLAlchemyEngineManager.update_sqlalchemy_env_vars(
            database,
            ssl_cert_path=ssl_cert_path,
            migration_user=True,
        )

    # Run migrations
    try:
        if dry_run:
            logging.info("Starting local postgres database for migrations dry run")
            db_dir = local_postgres_helpers.start_on_disk_postgresql_database()
        config = alembic.config.Config(
            SQLAlchemyEngineManager.get_alembic_file(database)
        )
        alembic.command.upgrade(config, "head")
    except Exception as e:
        logging.error("Migrations failed to run: %s", e)
        sys.exit(1)
    finally:
        local_postgres_helpers.restore_local_env_vars(overriden_env_vars)
        if dry_run:
            try:
                logging.info("Stopping local postgres database")
                local_postgres_helpers.stop_and_clear_on_disk_postgresql_database(
                    db_dir
                )
            except Exception as e2:
                logging.error("Error cleaning up postgres: %s", e2)