def attempt_init_engines_for_server(cls,
                                        schema_types: Set[SchemaType]) -> None:
        """Attempts to initialize engines for the server for the given schema types.

        Ignores any connections that fail, so that a single down database does not cause
        our server to crash."""
        for database_key in SQLAlchemyDatabaseKey.all():
            if database_key.schema_type in schema_types:
                try:
                    cls.init_engine(database_key)
                except BaseException:
                    pass
Ejemplo n.º 2
0
    def test_get_all(self, state_codes_fn) -> None:
        all_keys = SQLAlchemyDatabaseKey.all()

        expected_all_keys = [
            SQLAlchemyDatabaseKey(SchemaType.JAILS, db_name="postgres"),
            SQLAlchemyDatabaseKey(SchemaType.STATE, db_name="postgres"),
            SQLAlchemyDatabaseKey(SchemaType.OPERATIONS, db_name="postgres"),
            SQLAlchemyDatabaseKey(SchemaType.JUSTICE_COUNTS,
                                  db_name="postgres"),
            SQLAlchemyDatabaseKey(SchemaType.CASE_TRIAGE, db_name="postgres"),
            SQLAlchemyDatabaseKey(SchemaType.STATE, db_name="us_xx_primary"),
            SQLAlchemyDatabaseKey(SchemaType.STATE, db_name="us_ww_primary"),
            SQLAlchemyDatabaseKey(SchemaType.STATE, db_name="us_xx_secondary"),
            SQLAlchemyDatabaseKey(SchemaType.STATE, db_name="us_ww_secondary"),
        ]

        self.assertCountEqual(expected_all_keys, all_keys)

        state_codes_fn.assert_called()
def main(schema_type: SchemaType, repo_root: str, target_revision: str,
         ssl_cert_path: str) -> None:
    """
    Invokes the main code path for running a downgrade.

    This checks for user validations that the database and branches are correct and then runs the downgrade
    migration.
    """

    is_prod = metadata.project_id() == GCP_PROJECT_PRODUCTION
    if is_prod:
        logging.info("RUNNING AGAINST PRODUCTION\n")

    prompt_for_confirmation("This script will run a DOWNGRADE migration.",
                            "DOWNGRADE")
    confirm_correct_db_instance(schema_type)
    confirm_correct_git_branch(repo_root)

    db_keys = [
        key for key in SQLAlchemyDatabaseKey.all()
        if key.schema_type == schema_type
    ]

    for key in db_keys:
        # Run downgrade
        try:
            overriden_env_vars = SQLAlchemyEngineManager.update_sqlalchemy_env_vars(
                database_key=key,
                ssl_cert_path=ssl_cert_path,
                migration_user=True,
            )
            config = alembic.config.Config(key.alembic_file)
            alembic.command.downgrade(config, target_revision)
        except Exception as e:
            logging.error("Downgrade failed to run: %s", e)
            sys.exit(1)
        finally:
            local_postgres_helpers.restore_local_env_vars(overriden_env_vars)
Ejemplo n.º 4
0
def main(
    schema_type: SchemaType,
    repo_root: str,
    ssl_cert_path: str,
    dry_run: bool,
    skip_db_name_check: bool,
    confirm_hash: Optional[str],
) -> None:
    """
    Invokes the main code path for running migrations.

    This checks for user validations that the database and branches are correct and then runs existing pending
    migrations.
    """
    if dry_run:
        if not local_postgres_helpers.can_start_on_disk_postgresql_database():
            logging.error("pg_ctl is not installed. Cannot perform a dry-run.")
            logging.error("Exiting...")
            sys.exit(1)
        logging.info("Creating a dry-run...\n")
    else:
        if not ssl_cert_path:
            logging.error(
                "SSL certificates are required when running against live databases"
            )
            logging.error("Exiting...")
            sys.exit(1)
        logging.info("Using SSL certificate path: %s", ssl_cert_path)

    is_prod = metadata.project_id() == GCP_PROJECT_PRODUCTION
    if is_prod:
        logging.info("RUNNING AGAINST PRODUCTION\n")

    if not skip_db_name_check:
        confirm_correct_db_instance(schema_type)
    confirm_correct_git_branch(repo_root, confirm_hash=confirm_hash)

    if dry_run:
        db_keys = [SQLAlchemyDatabaseKey.canonical_for_schema(schema_type)]
    else:
        db_keys = [
            key for key in SQLAlchemyDatabaseKey.all()
            if key.schema_type == schema_type
        ]

    # Run migrations
    for key in db_keys:
        if dry_run:
            overriden_env_vars = (local_postgres_helpers.
                                  update_local_sqlalchemy_postgres_env_vars())
        else:
            overriden_env_vars = SQLAlchemyEngineManager.update_sqlalchemy_env_vars(
                database_key=key,
                ssl_cert_path=ssl_cert_path,
                migration_user=True,
            )
        try:
            logging.info(
                "*** Starting postgres migrations for schema [%s], db_name [%s] ***",
                key.schema_type,
                key.db_name,
            )
            if dry_run:
                db_dir = local_postgres_helpers.start_on_disk_postgresql_database(
                )
            config = alembic.config.Config(key.alembic_file)
            alembic.command.upgrade(config, "head")
        except Exception as e:
            logging.error("Migrations failed to run: %s", e)
            sys.exit(1)
        finally:
            local_postgres_helpers.restore_local_env_vars(overriden_env_vars)
            if dry_run:
                try:
                    logging.info("Stopping local postgres database")
                    local_postgres_helpers.stop_and_clear_on_disk_postgresql_database(
                        db_dir)
                except Exception as e2:
                    logging.error("Error cleaning up postgres: %s", e2)