def run_migrations_online(): """Run migrations in 'online' mode. In this scenario we need to create an Engine and associate a connection with the context. """ # connectable = engine_from_config( # config.get_section(config.config_ini_section), # prefix='sqlalchemy.', # poolclass=pool.NullPool) global config script = ScriptDirectory.from_config(config) env = EnvironmentContext(config, script=script) def do_upgrade(revision, context): return script._upgrade_revs(script.get_heads(), revision) url = get_url() connectable = create_engine(url) with connectable.connect() as connection: config = env.configure( connection=connection, url=url, target_metadata=target_metadata, ) with context.begin_transaction(): context.run_migrations()
def run_migrations_offline(): """Run migrations in 'offline' mode. This configures the context with just a URL and not an Engine, though an Engine is acceptable here as well. By skipping the Engine creation we don't even need a DBAPI to be available. Calls to context.execute() here emit the given string to the script output. """ print("== offline ==") global config url = get_url() script = ScriptDirectory.from_config(config) def do_upgrade(revision, context): return script._upgrade_revs(script.get_heads(), revision) env = EnvironmentContext(config, script=script) connectable = create_engine(url) connection = connectable.connect() config = env.configure( url=url, target_metadata=target_metadata, fn=do_upgrade, dialect_name="postgresql", transactional_ddl=False, as_sql=False, connection=connection, ) context = env.get_context() with context.begin_transaction(): context.run_migrations()
def _run_env_offline(self, e_ctx: EnvironmentContext): """ Equivalent of running env.py script in offline mode (without connecting to database) """ e_ctx.configure(url=self.dsn, target_metadata=self.metadata, literal_binds=True) with e_ctx.begin_transaction(): e_ctx.run_migrations()
def check_schema(config): # Derived from alembic.command.current. alembic_cfg = build_alembic_config(config) script = ScriptDirectory.from_config(alembic_cfg) def fn(current, context): target = script.get_current_head() current = context.get_current_revision() if current: logger.debug("temBoard database revision is %s.", current) else: logger.debug("temBoard database is uninitialized.") if current != target: raise UserError( "Database is not up to date. Please use temboard-migratedb.") else: logger.info("temBoard database is up-to-date.") return [] # Tells MigrationContext to skip migrations. with EnvironmentContext(alembic_cfg, script, fn=fn): try: script.run_env() except sqlalchemy.exc.OperationalError as e: raise UserError("Failed to check schema: %s." % e)
def check_migrations(timeout): """ Function to wait for all airflow migrations to complete. :param timeout: Timeout for the migration in seconds :return: None """ from alembic.runtime.environment import EnvironmentContext from alembic.script import ScriptDirectory config = _get_alembic_config() script_ = ScriptDirectory.from_config(config) with EnvironmentContext( config, script_, ) as env, settings.engine.connect() as connection: env.configure(connection) context = env.get_context() ticker = 0 while True: source_heads = set(script_.get_heads()) db_heads = set(context.get_current_heads()) if source_heads == db_heads: break if ticker >= timeout: raise TimeoutError( f"There are still unapplied migrations after {ticker} seconds. Migration" f"Head(s) in DB: {db_heads} | Migration Head(s) in Source Code: {source_heads}" ) ticker += 1 time.sleep(1) log.info('Waiting for migrations... %s second(s)', ticker)
def downgrade_database( alembic_config_filename: str, destination_revision: str, alembic_base_dir: str = None, starting_revision: str = None, version_table: str = DEFAULT_ALEMBIC_VERSION_TABLE, as_sql: bool = False) -> None: """ Use Alembic to downgrade our database. USE WITH EXTREME CAUTION. "revision" is the destination revision. See http://alembic.readthedocs.org/en/latest/api/runtime.html but also, in particular, ``site-packages/alembic/command.py`` Arguments: alembic_config_filename: config filename alembic_base_dir: directory to start in, so relative paths in the config file work starting_revision: revision to start at (typically ``None`` to ask the database) destination_revision: revision to aim for version_table: table name for Alembic versions as_sql: run in "offline" mode: print the migration SQL, rather than modifying the database. See http://alembic.zzzcomputing.com/en/latest/offline.html """ if alembic_base_dir is None: alembic_base_dir = os.path.dirname(alembic_config_filename) os.chdir(alembic_base_dir) # so the directory in the config file works config = Config(alembic_config_filename) script = ScriptDirectory.from_config(config) # noinspection PyUnusedLocal,PyProtectedMember def downgrade(rev, context): return script._downgrade_revs(destination_revision, rev) log.info("Downgrading database to revision {!r} using Alembic", destination_revision) with EnvironmentContext(config, script, fn=downgrade, as_sql=as_sql, starting_rev=starting_revision, destination_rev=destination_revision, tag=None, version_table=version_table): script.run_env() log.info("Database downgrade completed")
def environment_context(self) -> EnvironmentContext: if isinstance(self.__environment_context, EnvironmentContext): return self.__environment_context self.__environment_context = EnvironmentContext( self.config, self.script_directory ) return self.__environment_context
def upgrade_database(alembic_config_filename: str, alembic_base_dir: str = None) -> None: """ Use Alembic to upgrade our database. See http://alembic.readthedocs.org/en/latest/api/runtime.html but also, in particular, site-packages/alembic/command.py """ if alembic_base_dir is None: alembic_base_dir = os.path.dirname(alembic_config_filename) os.chdir(alembic_base_dir) # so the directory in the config file works config = Config(alembic_config_filename) script = ScriptDirectory.from_config(config) revision = 'head' # where we want to get to # noinspection PyUnusedLocal,PyProtectedMember def upgrade(rev, context): return script._upgrade_revs(revision, rev) log.info( "Upgrading database to revision '{}' using Alembic".format(revision)) with EnvironmentContext(config, script, fn=upgrade, as_sql=False, starting_rev=None, destination_rev=revision, tag=None): script.run_env() log.info("Database upgrade completed")
def _new_migration_context(self, *, message=None): revision_context = RevisionContext( self.config, self.script_directory, command_args={ 'message': message, 'autogenerate': True, 'sql': False, 'head': 'head', 'splice': False, 'branch_label': None, 'version_path': None, 'rev_id': None, 'depends_on': None, }, ) def retrieve_migrations(rev, context): revision_context.run_autogenerate(rev, context) return [] e_ctx = EnvironmentContext( self.config, self.script_directory, fn=retrieve_migrations, as_sql=False, template_args=revision_context.template_args, revision_context=revision_context, ) self._run_env_online(e_ctx) return revision_context
def perform_migratons(config_name): ''' If fails, then we should revert to previous version of SLUG running on Heroku link: http://stackoverflow.com/questions/24622170/using-alembic-api-from-inside-application-code ''' db_url = configuration[config_name].SQLALCHEMY_DATABASE_URI alembic_config = AlembicConfig('.\\AlertWeb\\alembic.ini') alembic_config.set_main_option('sqlalchemy.url', db_url) alembic_config.set_main_option('script_location', '.\\AlertWeb\\migrations') script_dir = ScriptDirectory.from_config(alembic_config) head_revision = script_dir.get_current_head() current_revision = get_current_revision(db_url) def upgrade(rev, context): print(rev) return script_dir._upgrade_revs(head_revision, rev) #script_dir. # Facade for migration context. with EnvironmentContext(alembic_config, script_dir, as_sql=False, fn=upgrade, starting_rev=current_revision, destination_rev=head_revision, tag=None): script_dir.run_env()
def upgrade(self, script_location=None, db_path=None, dest_rev=None): script_location = script_location or self.script_location db_path = db_path or const.db_path dest_rev = dest_rev or 'head' if not exists(dirname(db_path)): makedirs(dirname(db_path)) sa_url = 'sqlite:///' + db_path config_filepath = joinpath(dirname(script_location), 'alembic.ini') config = AlConfig(file_=config_filepath) config.set_main_option('script_location', script_location) config.set_main_option('sqlalchemy.url', sa_url) script = ScriptDirectory.from_config(config) warnings.filterwarnings('ignore', category=UserWarning, module='.*alembic.*') def upgrade(rev, context): return script._upgrade_revs(dest_rev, rev) with EnvironmentContext(config, script, fn=upgrade, as_sql=False, starting_rev=None, destination_rev=dest_rev, tag=None): script.run_env()
def check_migrations(timeout): """ Function to wait for all airflow migrations to complete. :param timeout: Timeout for the migration in seconds :return: None """ from alembic.runtime.environment import EnvironmentContext script_, config = _get_script_dir_and_config() with EnvironmentContext( config, script_, ) as env, settings.engine.connect() as connection: env.configure(connection) context = env.get_context() source_heads = None db_heads = None for ticker in range(timeout): source_heads = set(script_.get_heads()) db_heads = set(context.get_current_heads()) if source_heads == db_heads: return time.sleep(1) log.info('Waiting for migrations... %s second(s)', ticker) raise TimeoutError( f"There are still unapplied migrations after {timeout} seconds. Migration" f"Head(s) in DB: {db_heads} | Migration Head(s) in Source Code: {source_heads}" )
def _init_users(): """Initialize Afterglow user datastore if AUTH_ENABLED = True""" # noinspection PyUnresolvedReferences from .. import oauth2 # register oauth token-related models # All imports put here to avoid unnecessary loading of packages on startup # if user auth is disabled from alembic import (config as alembic_config, context as alembic_context) from alembic.script import ScriptDirectory from alembic.runtime.environment import EnvironmentContext global user_datastore, security user_datastore = SQLAlchemyUserDatastore(db, DbUser, DbRole) security = Security(app, user_datastore, register_blueprint=False) # Make sure that the database directory exists try: os.makedirs(os.path.abspath(app.config['DATA_ROOT'])) except OSError as e: if e.errno != errno.EEXIST: raise # Create/upgrade tables via Alembic cfg = alembic_config.Config() cfg.set_main_option( 'script_location', os.path.abspath( os.path.join(__file__, '../..', 'db_migration', 'users'))) script = ScriptDirectory.from_config(cfg) # noinspection PyProtectedMember with EnvironmentContext( cfg, script, fn=lambda rev, _: script._upgrade_revs('head', rev), as_sql=False, starting_rev=None, destination_rev='head', tag=None, ), db.engine.connect() as connection: alembic_context.configure(connection=connection) with alembic_context.begin_transaction(): alembic_context.run_migrations() # Initialize user roles if missing try: roles_created = False for name, descr in [('admin', 'Afterglow Administrator'), ('user', 'Afterglow User')]: if not user_datastore.find_role(name): user_datastore.create_role(name=name, description=descr) roles_created = True if roles_created: user_datastore.commit() except Exception: db.session.rollback() raise
def __db_alembic_setup(self): """Создает среду алембика.""" migrations_path = Path(Path.home() / ccfg.ALL_CONFIGS_FOLDER) if not migrations_path.exists(): migrations_path.mkdir() alembic_config = Config() alembic_config.set_main_option("script_location", "migrations") alembic_config.set_main_option( "url", 'sqlite:///' + self.config.restore_value(ccfg.DATABASE_FILE_KEY)) self.alembic_script = ScriptDirectory.from_config(alembic_config) self.alembic_env = EnvironmentContext(alembic_config, self.alembic_script) self.alembic_env.configure(connection=self.engine.connect(), target_metadata=canc.Base.metadata, fn=self.__db_upgrade) self.alembic_context = self.alembic_env.get_context()
def schema_update(self): """Examines the Alembic schema version and performs database migration if needed Returns: result (dict): summary of changes made status = ok if no errors """ results = [] errors = 0 try: version = self.session.query(AlembicVersion).one().version_num except (sqlalchemy.orm.exc.NoResultFound, sqlalchemy.exc.OperationalError, sqlalchemy.exc.ProgrammingError) as ex: Syslog.logger.warn('DB schema does not yet exist: %s' % str(ex)) version = None cfg = alembic.config.Config() cfg.set_main_option( 'script_location', os.path.join(os.path.abspath(os.path.dirname(__file__)), 'alembic')) cfg.set_main_option('url', str(self.engine.url)) script = alembic.script.ScriptDirectory.from_config(cfg) env = EnvironmentContext(cfg, script) if (version == script.get_heads()[0]): Syslog.logger.info('action=schema-update version=%s is current, ' 'skipping' % version) results.append(dict(name=version, action='skipped')) else: def _do_upgrade(revision, context): return script._upgrade_revs(script.get_heads(), revision) conn = self.engine.connect() env.configure(connection=conn, target_metadata=metadata, fn=_do_upgrade) with env.begin_transaction(): env.run_migrations() results.append(dict(name=script.get_heads()[0], action='migrated')) Syslog.logger.info('action=schema-update finished migration, ' 'version=%s' % script.get_heads()[0]) if (version is None): # Seed a new db with host and volume records record = Host(hostname=self.backup_host) self.session.add(record) self.session.flush() self.session.add( Volume(volume=Constants.DEFAULT_VOLUME, path=Constants.SNAPSHOT_ROOT, host_id=record.id)) self.session.commit() return { 'status': 'ok' if errors == 0 else 'error', 'schema-update': results }
def migrated_db(db_uri): config = Config() config.set_main_option("script_location", "apd.aggregation:alembic") config.set_main_option("sqlalchemy.url", db_uri) script = ScriptDirectory.from_config(config) def upgrade(rev, context): return script._upgrade_revs(script.get_current_head(), rev) def downgrade(rev, context): return script._downgrade_revs(None, rev) with EnvironmentContext(config, script, fn=upgrade): script.run_env() yield with EnvironmentContext(config, script, fn=downgrade): script.run_env()
def environment_context(self): """Get the Alembic :class:`~alembic.runtime.environment.EnvironmentContext` for the current app.""" cache = self._get_cache() if 'env' not in cache: cache['env'] = EnvironmentContext(self.config, self.script_directory) return cache['env']
def check_alembic_revision(alembic_config, conn): migration_context = MigrationContext.configure(conn) db_revision = migration_context.get_current_revision() script = ScriptDirectory.from_config(alembic_config) with EnvironmentContext(alembic_config, script): # alembic dynamically populates the contents of alembic.context based on this context # manager, so pylint can't figure out what members are available head_revision = alembic.context.get_head_revision() # pylint: disable=no-member return (db_revision, head_revision)
def __run_migration_callback(self, callback): with self.__engine.begin() as connection: script = ScriptDirectory.from_config(self.alembic_cfg) from alembic.runtime.environment import EnvironmentContext with EnvironmentContext(self.alembic_cfg, script, fn=callback) as context: context.configure(version_table=self.config.version_table, connection=connection) with context.begin_transaction(): context.run_migrations()
def check_and_run_migrations(): """Check and run migrations if necessary. Only use in a tty""" from alembic.runtime.environment import EnvironmentContext script_, config = _get_script_dir_and_config() with EnvironmentContext( config, script_, ) as env, settings.engine.connect() as connection: env.configure(connection) context = env.get_context() source_heads = set(script_.get_heads()) db_heads = set(context.get_current_heads()) db_command = None command_name = None verb = None if len(db_heads) < 1: db_command = initdb command_name = "init" verb = "initialization" elif source_heads != db_heads: db_command = upgradedb command_name = "upgrade" verb = "upgrade" if sys.stdout.isatty() and verb: print() question = f"Please confirm database {verb} (or wait 4 seconds to skip it). Are you sure? [y/N]" try: answer = helpers.prompt_with_timeout(question, timeout=4, default=False) if answer: try: db_command() print(f"DB {verb} done") except Exception as error: print(error) print( "You still have unapplied migrations. " f"You may need to {verb} the database by running `airflow db {command_name}`. ", f"Make sure the command is run using Airflow version {version}.", file=sys.stderr, ) sys.exit(1) except AirflowException: pass elif source_heads != db_heads: print( f"ERROR: You need to {verb} the database. Please run `airflow db {command_name}`. " f"Make sure the command is run using Airflow version {version}.", file=sys.stderr, ) sys.exit(1)
def _migrate(self, revision, apply_migration): e_ctx = EnvironmentContext( self.config, self.script_directory, fn=apply_migration, as_sql=False, starting_rev=None, destination_rev=revision, tag=None, ) self._run_env_online(e_ctx)
def migrated_db(db_uri, db_session): config = Config() config.set_main_option("script_location", "apd.aggregation:alembic") config.set_main_option("sqlalchemy.url", db_uri) script = ScriptDirectory.from_config(config) def upgrade(rev, context): return script._upgrade_revs(script.get_current_head(), rev) def downgrade(rev, context): return script._downgrade_revs(None, rev) with EnvironmentContext(config, script, fn=upgrade): script.run_env() try: yield finally: # Clear any pending work from the db_session connection db_session.rollback() with EnvironmentContext(config, script, fn=downgrade): script.run_env()
def migration_context(self): """Context manager to return an instance of an Alembic migration context. This migration context will have been configured with the current database connection, which allows this context to be used to inspect the contents of the database, such as the current revision. """ from alembic.runtime.environment import EnvironmentContext from alembic.script import ScriptDirectory with self.alembic_config() as config: script = ScriptDirectory.from_config(config) with EnvironmentContext(config, script) as context: context.configure(context.config.attributes['connection']) yield context.get_context()
def _show_migrate_sql(self, starting_revision, revision, apply_migration): if starting_revision in (None, 'current'): current = self.get_current() starting_revision = 'base' if current is None else current.revision e_ctx = EnvironmentContext( self.config, self.script_directory, fn=apply_migration, as_sql=True, starting_rev=starting_revision, destination_rev=revision, tag=None, ) self._run_env_offline(e_ctx)
def _run_env_online(self, e_ctx: EnvironmentContext): """ Equivalent of running env.py script in online mode (with actual database connection) """ engine = sqlalchemy.engine_from_config( e_ctx.config.get_section(e_ctx.config.config_ini_section), prefix='sqlalchemy.', poolclass=sqlalchemy.pool.NullPool, ) transaction_per_migration = \ e_ctx.config.get_main_option('transaction_per_migration', False) with engine.connect() as connection: e_ctx.configure( connection=connection, target_metadata=self.metadata, transaction_per_migration=transaction_per_migration, ) with e_ctx.begin_transaction(): e_ctx.run_migrations() engine.dispose()
def get_revision(config: Config, engine: Engine, script: ScriptDirectory, revision_type="current") -> str: """ Helper to get revision id """ with engine.connect() as conn: with EnvironmentContext(config, script) as env_context: env_context.configure(conn, version_table="migrate_version") if revision_type == "head": revision = env_context.get_head_revision() else: migration_context = env_context.get_context() revision = migration_context.get_current_revision() return revision
def get_current(self): revision = None def set_revision(rev, context): nonlocal revision revision = rev return [] e_ctx = EnvironmentContext( self.config, self.script_directory, fn=set_revision, ) self._run_env_online(e_ctx) return self.script_directory.revision_map.get_revision(revision)
def migrate(): connection = session.connection() config = _get_alembic_config() script_directory = ScriptDirectory.from_config(config) context = EnvironmentContext(config, script_directory) context.configure( connection=connection, target_metadata=Base.metadata, fn=lambda rev, context: script_directory._upgrade_revs('head', rev)) context.run_migrations() session.commit()
def downgrade(self, revision, sql=False, tag=None): """Revert to a previous version. :param revision: string revision target or range for --sql mode :param sql: if True, use ``--sql`` mode :param tag: an arbitrary "tag" that can be intercepted by custom ``env.py`` scripts via the :meth:`.EnvironmentContext.get_tag_argument` method. """ config = self.config script = self.script_directory config.attributes["engine"] = self.engine output_buffer = io.StringIO() config.attributes["output_buffer"] = output_buffer starting_rev = None if ":" in revision: if not sql: raise util.CommandError("Range revision not allowed") starting_rev, revision = revision.split(":", 2) elif sql: raise util.CommandError( "downgrade with --sql requires <fromrev>:<torev>" ) def do_downgrade(rev, context): return script._downgrade_revs(revision, rev) with EnvironmentContext( config, script, fn=do_downgrade, as_sql=sql, starting_rev=starting_rev, destination_rev=revision, tag=tag, ): script.run_env() output_buffer.seek(0) return output_buffer.read()
def _configured_alembic_environment(): from alembic.runtime.environment import EnvironmentContext config = _get_alembic_config() script = _get_script_object(config) with EnvironmentContext( config, script, ) as env, settings.engine.connect() as connection: alembic_logger = logging.getLogger('alembic') level = alembic_logger.level alembic_logger.setLevel(logging.WARNING) env.configure(connection) alembic_logger.setLevel(level) yield env