def migrate_db(self, eggs_in_order, explain=False): opts = {'target_metadata': metadata} with Operations.context( MigrationContext.configure(connection=Session.connection(), opts=opts)) as op: self.op = op return super().migrate_db(eggs_in_order, explain=explain)
def _run_with_alembic_context(func, *args, **kwargs): from alembic.runtime.environment import EnvironmentContext from alembic import context from alembic.script import ScriptDirectory from sqlalchemy import engine_from_config from alembic.operations import Operations from sqlalchemy import pool config = _get_config() script_directory = ScriptDirectory.from_config(config) with EnvironmentContext(config, script_directory): connectable = engine_from_config(config.get_section( config.config_ini_section), prefix='sqlalchemy.', poolclass=pool.NullPool) with connectable.connect() as connection: context.configure(connection=connection) with context.begin_transaction(): with Operations.context(context.get_context()): func(*args, **kwargs)
def run_migrations(self, **kw): """Run migrations as determined by the current command line configuration as well as versioning information present (or not) in the current database connection (if one is present). The function accepts optional ``**kw`` arguments. If these are passed, they are sent directly to the ``upgrade()`` and ``downgrade()`` functions within each target revision file. By modifying the ``script.py.mako`` file so that the ``upgrade()`` and ``downgrade()`` functions accept arguments, parameters can be passed here so that contextual information, usually information to identify a particular database in use, can be passed from a custom ``env.py`` script to the migration functions. This function requires that a :class:`.MigrationContext` has first been made available via :meth:`.configure`. """ with Operations.context(self._migration_context): self.get_context().run_migrations(**kw)
def run_migrations(self, **kw): """Run migrations as determined by the current command line configuration as well as versioning information present (or not) in the current database connection (if one is present). The function accepts optional ``**kw`` arguments. If these are passed, they are sent directly to the ``upgrade()`` and ``downgrade()`` functions within each target revision file. By modifying the ``script.py.mako`` file so that the ``upgrade()`` and ``downgrade()`` functions accept arguments, parameters can be passed here so that contextual information, usually information to identify a particular database in use, can be passed from a custom ``env.py`` script to the migration functions. This function requires that a :class:`.MigrationContext` has first been made available via :meth:`.configure`. """ with Operations.context(self._migration_context): self.get_context().run_migrations(**kw)
def prune_schemas_to_only(self, live_versions): opts = {'target_metadata': metadata} with Operations.context( MigrationContext.configure(connection=Session.connection(), opts=opts)) as op: self.op = op to_remove = [ i for i in self.get_outstanding_migrations().upgrade_ops.as_diffs() if i[0].startswith('remove_') ] tables_to_drop = [] unhandled = [] for migration in to_remove: name = migration[0] if name == 'remove_table': table = migration[1] tables_to_drop.append(table) for foreign_key in table.foreign_key_constraints: op.drop_constraint(foreign_key.name, table.name) elif name == 'remove_index': op.drop_index(migration[1].name) else: unhandled.append(migration) for table in tables_to_drop: op.drop_table(table.name) if unhandled: print( 'These migrations have not been automatically done, please effect them by other means:' ) for migration in unhandled: print(migration) installed_version_names = [version.name for version in live_versions] for created_schema_version in Session.query(SchemaVersion).all(): if created_schema_version.egg_name not in installed_version_names: Session.delete(created_schema_version)
def diff_db(self, output_sql=False): migrations = self.get_outstanding_migrations() if output_sql: commented_source_code = render_python_code( migrations.upgrade_ops, alembic_module_prefix='op2.', sqlalchemy_module_prefix="sqlalchemy.") uncommented_source_code = [ i.strip() for i in commented_source_code.split('\n') if not i.strip().startswith('#') ] source_code = '\n'.join(['import sqlalchemy'] + uncommented_source_code) opts = {'as_sql': output_sql, 'target_metadata': metadata} with Operations.context( MigrationContext.configure(connection=Session.connection(), opts=opts)) as op2: exec(source_code, globals(), locals()) return uncommented_source_code else: migrations_required = migrations.upgrade_ops.as_diffs() if migrations_required: pprint.pprint(migrations_required, indent=2, width=20) return migrations_required
def migrate_db(self, eggs_in_order): with Operations.context( MigrationContext.configure(Session.connection())) as op: self.op = op return super(SqlAlchemyControl, self).migrate_db(eggs_in_order)
def upgrade(): global addTo global doFk print( 'Exception no missing primary keys that we try to drop are expected to happen, please ignore' ) with db.engine.connect() as con: # lets see if this is a mariadb that supports check check_supported = supports_check(con) print('Dropping all Foreign Keys...') for table_name in table_names: # drop foreign keys try: fk_dbinfos: ResultProxy = con.execute( mysql_show_fk % (database_name, table_name)) for fk_dbinfo in fk_dbinfos: key_name = fk_dbinfo['CONSTRAINT_NAME'] statement = mysql_drop_fk % (table_name, key_name) #print(f'Executing: {statement.strip()}') con.execute(statement) except Exception as e: print(e) print('Dropping all Foreign Keys Done') # drop checks # mysql (as of now) ignores check as well as mariadb < 10.2.1 if check_supported: print('Dropping Check Constraints...') for table_name in table_names: # drop foreign keys try: ck_dbinfos: ResultProxy = con.execute( mariadb_show_check % (database_name, table_name)) for ck_dbinfo in ck_dbinfos: key_name = ck_dbinfo['CONSTRAINT_NAME'] statement = mariadb_drop_check % (table_name, key_name) #print(f'Executing: {statement.strip()}') con.execute(statement) except Exception as e: print(e) print('Dropping Check Constraints Done') # rename all the columns # this also removes auto increment :) print('Renaming Columns...') tables_with_auto_increment = [] for table_name in table_names: columns_with_auto_increment = [] table_columns: ResultProxy = db.engine.execute( sql_get_column_names, dbname=database_name, tablename=table_name) columns: List[List[str, str]] = [] for column_info in table_columns: type, auto_increment = get_type(column_info) if auto_increment: columns_with_auto_increment.append( [column_info['COLUMN_NAME'], type + " AUTO_INCREMENT"]) columns.append([column_info['COLUMN_NAME'], type]) table_columns.close() for column_info in columns: new_column_name = get_new_column_name(column_info[0]) mysql_alter = mysql_rename_column % ( table_name, column_info[0], new_column_name, column_info[1]) #print(f'Executing: {mysql_alter.strip()}') con.execute(mysql_alter) if len(columns_with_auto_increment) > 0: tables_with_auto_increment.append( [table_name, columns_with_auto_increment]) print("Renaming Columns Done") # now that auto_increment is gone, we can drop all indexes # drop all the primary keys print('Dropping Primary Keys...') mysql_drop_pk: str = """ ALTER TABLE `%s` DROP PRIMARY KEY; """ for table_name in table_names: # there might be lots of exceptions where when no primary key exists, just ignore try: statement = mysql_drop_pk % (table_name, ) #print(f'Executing: {statement.strip()}') con.execute(statement) except Exception as e: print(e) print('Dropping Primary Keys Done') # drop indices print('Dropping Indices..') for table_name in table_names: index_dbinfos: ResultProxy = con.execute(mysql_show_index % table_name) # for some reason PRIMARY is in there multiple times, sometimes removed_index: set = set() for index_dbinfo in index_dbinfos: try: index_name = index_dbinfo['Key_name'] if index_name in removed_index: continue statement = mysql_drop_index % (table_name, index_name) #print(f'Executing: {statement.strip()}') con.execute(statement) removed_index.add(index_name) except Exception as e: print(e) print('Dropping Indices Done') # create contexts for alembic mig_context = MigrationContext.configure( connection=con, dialect_name='mysql', opts={'target_metadata': db.metadata}) with Operations.context(mig_context): doFk = False print('Creating Primary Keys and Indices and Checks...') add_keys_and_constraits() print('Creating Primary Keys and Indices and Checks Done') # lets add the new column first print('Adding new colum to waitlists table...') with op.batch_alter_table('waitlists') as batch: batch.add_column(Column('waitlist_type', String(20))) print('Adding new colum to waitlists table Done') # now get waitlist groups mysql_get_wl_groups: str = """ SELECT xupwl_id, logiwl_id, dpswl_id, sniperwl_id, otherwl_id FROM `%s`; """ groups_result: ResultProxy = con.execute(mysql_get_wl_groups % ('waitlist_groups')) mysql_set_wl_type: str = """ UPDATE `waitlists` SET waitlist_type = '%s' WHERE id = %s; """ print('Setting new waitlists column from old references...') for group_info in groups_result: for name in [ 'xupwl_id', 'logiwl_id', 'dpswl_id', 'sniperwl_id', 'otherwl_id' ]: if group_info[name] is not None: # we got the id here statement = mysql_set_wl_type % (name.replace( 'wl_id', ''), group_info[name]) #print(statement) con.execute(statement) # now all the waitlists should know their type print('Setting new waitlists column from old references Done') # we can remove the columns 'xupwl_id', 'logiwl_id', 'dpswl_id', 'sniperwl_id', 'otherwl_id' now print( 'Removing old reference columns from waitlist_groups tables...' ) with op.batch_alter_table('waitlist_groups') as batch: for column_name in [ 'xupwl_id', 'logiwl_id', 'dpswl_id', 'sniperwl_id', 'otherwl_id' ]: batch.drop_column(column_name) print( 'Removing old reference columns from waitlist_groups tables Done' ) # UniqueConstraint('group_id', 'waitlist_type') print('Adding Unique constraint for new waitlists column...') with op.batch_alter_table('waitlists') as batch: batch.create_unique_constraint( constraint_name='uq_waitlists_group_id_waitlist_type', columns=['group_id', 'waitlist_type']) print('Adding Unique constraint for new waitlists column Done') # lets readd autoincrement columns print( 'Readding AUTO_INCREMENT to columns where it got removed earlier...' ) for table_data in tables_with_auto_increment: table_name = table_data[0] for column_info in table_data[1]: new_column_name = get_new_column_name(column_info[0]) print(f'Working on Table {table_name}.{new_column_name}') mysql_alter = mysql_rename_column % ( table_name, new_column_name, new_column_name, column_info[1]) #print(mysql_alter) con.execute(mysql_alter) print( 'Readding AUTO_INCREMENT to columns where it got removed earlier... Done' ) doFk = True print('Readding Foreign Keys...') add_keys_and_constraits() print("Readding Foreign Keys Done") # now that we are done, set the alembic version print('Setting Alembic version') mysql_set_alembic_version: str = """ DELETE FROM alembic_version; INSERT INTO alembic_version VALUES ('%s'); """ con.execute(mysql_set_alembic_version % (alembic_version, )) print(f"Changed alembic_version to {alembic_version}")
def run_migrations(context): with Operations.context(context): context.run_migrations()