def bootstrap_db(config_uri=None, with_migration=True): """Bring a blank database to a functional state.""" db = get_session_maker() if with_migration: context = MigrationContext.configure(db().connection()) db_version = context.get_current_revision() if db_version: sys.stderr.write('Database already initialized. Bailing out.\n') sys.exit(0) config = Config(config_uri) script_dir = ScriptDirectory.from_config(config) heads = script_dir.get_heads() if len(heads) > 1: sys.stderr.write('Error: migration scripts have more than one ' 'head.\nPlease resolve the situation before ' 'attempting to bootstrap the database.\n') sys.exit(2) import assembl.models get_metadata().create_all(db().connection()) # Clean up the sccoped session to allow a later app instantiation. if with_migration and heads: context = MigrationContext.configure(db().connection()) context._ensure_version_table() # The latter step seems optional? # I am unclear as to why we'd migrate after creating tables # on a clean database. context.stamp(script_dir, heads[0]) return db
def test_include_symbol(self): diffs = [] def include_symbol(name, schema=None): return name in ('address', 'order') context = MigrationContext.configure( connection=self.bind.connect(), opts={ 'compare_type': True, 'compare_server_default': True, 'target_metadata': self.m2, 'include_symbol': include_symbol, } ) diffs = autogenerate.compare_metadata( context, context.opts['target_metadata']) alter_cols = set([ d[2] for d in self._flatten_diffs(diffs) if d[0].startswith('modify') ]) eq_(alter_cols, set(['order']))
def test_migration(self): self.setup_base_db() # we have no alembic base revision self.assertTrue(self.current_db_revision() is None) # run the migration, afterwards the DB is stamped self.run_migration() db_revision = self.current_db_revision() self.assertTrue(db_revision is not None) # db revision matches latest alembic revision alembic_head = self.alembic_script().get_current_head() self.assertEqual(db_revision, alembic_head) # compare the db schema from a migrated database to # one created fresh from the model definitions opts = { 'compare_type': db_compare_type, 'compare_server_default': True, } with self.db.engine.connect() as conn: context = MigrationContext.configure(connection=conn, opts=opts) metadata_diff = compare_metadata(context, self.head_metadata) self.assertEqual(metadata_diff, [])
def test_compare_metadata_include_object(self): metadata = self.m2 def include_object(obj, name, type_, reflected, compare_to): if type_ == "table": return name in ("extra", "order") elif type_ == "column": return name != "amount" else: return True context = MigrationContext.configure( connection=self.bind.connect(), opts={ 'compare_type': True, 'compare_server_default': True, 'include_object': include_object, } ) diffs = autogenerate.compare_metadata(context, metadata) eq_(diffs[0][0], 'remove_table') eq_(diffs[0][1].name, "extra") eq_(diffs[1][0], "add_column") eq_(diffs[1][1], None) eq_(diffs[1][2], "order") eq_(diffs[1][3], metadata.tables['order'].c.user_id)
def setUp(self): self.conn = conn = self.bind.connect() ctx_opts = { 'compare_type': True, 'compare_server_default': True, 'target_metadata': self.m2, 'upgrade_token': "upgrades", 'downgrade_token': "downgrades", 'alembic_module_prefix': 'op.', 'sqlalchemy_module_prefix': 'sa.', } if self.configure_opts: ctx_opts.update(self.configure_opts) self.context = context = MigrationContext.configure( connection=conn, opts=ctx_opts ) connection = context.bind self.autogen_context = { 'imports': set(), 'connection': connection, 'dialect': connection.dialect, 'context': context }
def _setup_db_connection(self): super(SchemaMigration, self)._setup_db_connection() self.dialect_name = self.connection.dialect.name self.migration_context = MigrationContext.configure(self.connection) self.metadata = MetaData(self.connection, reflect=True) self.op = self._create_operations()
def runserver(): """This command is meant for development. If no configuration is found, we start the app listening from all hosts, from port 9999.""" # Testig Alembic engine = create_engine(app.config['SQLALCHEMY_DATABASE_URI']) conn = engine.connect() context = MigrationContext.configure(conn) current_ver = context.get_current_revision() if not current_ver: print("Automatic DB Upgrade") print("Press Ctrl+C when finished") upgrade() print("Upgrade completed. Press Ctrl+C and runserver again.") try: from application import config PORT = config.Config.PORT DEBUG = config.Config.DEBUG HOST = config.Config.HOST except ImportError: DEBUG = False PORT = 9999 HOST = '0.0.0.0' app.run( port=PORT, debug=DEBUG, host=HOST, threaded=True)
def test_include_symbol(self): context = MigrationContext.configure( connection=self.bind.connect(), opts={ 'compare_type': True, 'compare_server_default': True, 'target_metadata': self.m2, 'include_symbol': lambda name, schema=None: name in ('address', 'order'), 'upgrade_token': "upgrades", 'downgrade_token': "downgrades", 'alembic_module_prefix': 'op.', 'sqlalchemy_module_prefix': 'sa.', } ) template_args = {} autogenerate._produce_migration_diffs(context, template_args, set()) template_args['upgrades'] = \ template_args['upgrades'].replace("u'", "'") template_args['downgrades'] = template_args['downgrades'].\ replace("u'", "'") assert "alter_column('user'" not in template_args['upgrades'] assert "alter_column('user'" not in template_args['downgrades'] assert "alter_column('order'" in template_args['upgrades'] assert "alter_column('order'" in template_args['downgrades']
def setup_database(alembic_config_path=None, destructive=False): """ Setup database tables (if they do not yet exist). """ if alembic_config_path and not os.path.isfile(alembic_config_path): raise UserError('Cannot find Alembic configuration: %s' % alembic_config_path) bind = db.session.get_bind() if destructive: db.Base.metadata.drop_all(bind) if destructive or not bind.has_table(Assembly.__tablename__): # We assume our migrations will take care of everything if at least # the Assembly table exists. db.Base.metadata.create_all(bind) if alembic_config_path: context = MigrationContext.configure(db.session.connection()) if destructive or context.get_current_revision() is None: # We need to close the current session before running Alembic. db.session.remove() alembic_config = alembic.config.Config(alembic_config_path) alembic.command.stamp(alembic_config, 'head')
def is_alembic_head(): alembic_cfg = get_alembic_config() context = MigrationContext.configure(db_session.connection()) script = ScriptDirectory.from_config(alembic_cfg) current_revision = context.get_current_revision() head_revision = script.get_current_head() return current_revision == head_revision
def test_render_nothing(self): context = MigrationContext.configure( connection=self.bind.connect(), opts={ 'compare_type': True, 'compare_server_default': True, 'target_metadata': self.m1, 'upgrade_token': "upgrades", 'downgrade_token': "downgrades", 'alembic_module_prefix': 'op.', 'sqlalchemy_module_prefix': 'sa.', } ) template_args = {} autogenerate._produce_migration_diffs( context, template_args, set(), include_symbol=lambda name, schema: False ) eq_(re.sub(r"u'", "'", template_args['upgrades']), """### commands auto generated by Alembic - please adjust! ### pass ### end Alembic commands ###""") eq_(re.sub(r"u'", "'", template_args['downgrades']), """### commands auto generated by Alembic - please adjust! ### pass ### end Alembic commands ###""")
def get_migration_context(connection, table_names): opts = { 'include_symbol': partial(include_symbol, table_names), 'compare_type': True, # 'compare_server_default': True # we don't care about this } return MigrationContext.configure(connection, opts=opts)
def setup_db(): """Create database and required tables.""" if not app.config['DATABASE_URI'].startswith('sqlite'): try: with create_engine( app.config['DATABASE_URI'], ).connect() as connection: connection.execute('CREATE DATABASE {0}'.format( app.config['DATABASE_NAME'])) print("Database created") except sqlalchemy.exc.OperationalError: pass except sqlalchemy.exc.ProgrammingError: # If database already exists pass engine = create_engine(app.config['SQLALCHEMY_DATABASE_URI']) conn = engine.connect() context = MigrationContext.configure(conn) current_ver = context.get_current_revision() if not current_ver: print("Automatic DB Upgrade") print("Press Ctrl+C when finished") upgrade() print("Upgrade completed. Press Ctrl+C and runserver again.")
def setup_database(self): context = MigrationContext.configure(self._database.store.connection()) current_rev = context.get_current_revision() head_rev = self._script.get_current_head() if current_rev == head_rev: # We're already at the latest revision so there's nothing to do. return head_rev if current_rev is None: # No Alembic information is available. storm_version = self._get_storm_schema_version() if storm_version is None: # Initial database creation. Model.metadata.create_all(self._database.engine) self._database.commit() alembic.command.stamp(alembic_cfg, 'head') else: # The database was previously managed by Storm. if storm_version.version < LAST_STORM_SCHEMA_VERSION: raise DatabaseError( 'Upgrades skipping beta versions is not supported.') # Run migrations to remove the Storm-specific table and upgrade # to SQLAlchemy and Alembic. alembic.command.upgrade(alembic_cfg, 'head') elif current_rev != head_rev: alembic.command.upgrade(alembic_cfg, 'head') return head_rev
def init_db(): connection = SQLEngine.connect() context = MigrationContext.configure(connection) current_revision = context.get_current_revision() logger.boot('Database revision: %s', current_revision) if current_revision is None: DataBase.metadata.create_all(SQLEngine) config = Config(ALEMBIC_CONFIG) script = ScriptDirectory.from_config(config) head_revision = script.get_current_head() if current_revision is None or current_revision != head_revision: logger.boot('Upgrading database to version %s.', head_revision) command.upgrade(config, 'head') from option import Option session = Session() options = session.query(Option).first() if options is None: options = Option() options.version = head_revision session.add(options) from pulse import Pulse pulse = session.query(Pulse).first() if pulse is None: pulse = Pulse() session.add(pulse) session.commit()
def setup_class(cls): cls.bind = config.db cls.conn = cls.bind.connect() staging_env() cls.migration_context = MigrationContext.configure( connection=cls.conn, opts={"compare_type": True, "compare_server_default": True} )
def bootstrap_db(config_uri=None, engine=None, with_migration=True): """Bring a blank database to a functional state.""" if engine is None: engine = create_engine(config_uri) db.configure(bind=engine) if with_migration: context = MigrationContext.configure(engine.connect()) db_version = context.get_current_revision() if db_version: sys.stderr.write('Database already initialized. Bailing out.\n') sys.exit(2) config = Config(config_uri) script_dir = ScriptDirectory.from_config(config) heads = script_dir.get_heads() if len(heads) > 1: sys.stderr.write('Error: migration scripts have more than one ' 'head.\nPlease resolve the situation before ' 'attempting to bootstrap the database.\n') sys.exit(2) metadata.create_all(engine) with transaction.manager: model = MyModel(name='one', value=1) db.add(model) # Clean up the sccoped session to allow a later app instantiation. db.remove() if with_migration and heads: command.stamp(config, 'head')
def setup_class(cls): staging_env() cls.bind = sqlite_db() cls.m3 = _model_three() cls.m3.create_all(cls.bind) cls.m4 = _model_four() cls.empty_context = empty_context = MigrationContext.configure( connection = cls.bind.connect(), opts = { 'compare_type':True, 'compare_server_default':True, 'target_metadata':cls.m3, 'upgrade_token':"upgrades", 'downgrade_token':"downgrades", 'alembic_module_prefix':'op.', 'sqlalchemy_module_prefix':'sa.' } ) connection = empty_context.bind cls.autogen_empty_context = { 'imports':set(), 'connection':connection, 'dialect':connection.dialect, 'context':empty_context }
def _fixture(self, m1, m2, include_schemas=False): self.metadata, model_metadata = m1, m2 self.metadata.create_all(self.bind) with self.bind.connect() as conn: self.context = context = MigrationContext.configure( connection=conn, opts={ 'compare_type': True, 'compare_server_default': True, 'target_metadata': model_metadata, 'upgrade_token': "upgrades", 'downgrade_token': "downgrades", 'alembic_module_prefix': 'op.', 'sqlalchemy_module_prefix': 'sa.', } ) connection = context.bind autogen_context = { 'imports': set(), 'connection': connection, 'dialect': connection.dialect, 'context': context } diffs = [] autogenerate._produce_net_changes(connection, model_metadata, diffs, autogen_context, object_filters=_default_object_filters, include_schemas=include_schemas ) return diffs
def main(): if len(sys.argv) < 3: sys.stderr.write('Usage: %s CONFIG_URI {bootstrap | ALEMBIC_OPTS}\n' % sys.argv[0]) sys.exit(1) config_uri = sys.argv.pop(1) if sys.argv[1] == 'bootstrap': bootstrap_db(config_uri) else: engine = create_engine(config_uri) db.configure(bind=engine) context = MigrationContext.configure(engine.connect()) db_version = context.get_current_revision() if not db_version: sys.stderr.write('Database not initialized.\n' 'Try this: "sortie-db-manage %s bootstrap"\n' % config_uri) sys.exit(2) cmd = ['alembic', '-c', config_uri] + sys.argv[1:] print(subprocess.check_output(cmd))
def versiondb(self): """Downgrade the database """ engine = create_engine(self.url) conn = engine.connect() context = MigrationContext.configure(conn) return context.get_current_heads()
def test_compare_metadata_schema(self): metadata = self.m2 context = MigrationContext.configure( connection=self.bind.connect(), opts={ "include_schemas": True } ) diffs = autogenerate.compare_metadata(context, metadata) eq_( diffs[0], ('add_table', metadata.tables['test_schema.item']) ) eq_(diffs[1][0], 'remove_table') eq_(diffs[1][1].name, "extra") eq_(diffs[2][0], "add_column") eq_(diffs[2][1], "test_schema") eq_(diffs[2][2], "address") eq_(diffs[2][3], metadata.tables['test_schema.address'].c.street) eq_(diffs[3][0], "add_column") eq_(diffs[3][1], "test_schema") eq_(diffs[3][2], "order") eq_(diffs[3][3], metadata.tables['test_schema.order'].c.user_id) eq_(diffs[4][0][0], 'modify_nullable') eq_(diffs[4][0][5], False) eq_(diffs[4][0][6], True)
def main(): if len(sys.argv) < 3: sys.stderr.write('Usage: %s CONFIG_URI {bootstrap | ALEMBIC_OPTS}\n' % sys.argv[0]) sys.exit(1) config_uri = sys.argv.pop(1) if sys.argv[1] == 'bootstrap': settings = get_appsettings(config_uri) configure_zmq(settings['changes.socket'], False) engine = configure_engine(settings, True) bootstrap_db(config_uri) mark_changed() transaction.commit() else: context = MigrationContext.configure(engine.connect()) db_version = context.get_current_revision() if not db_version: sys.stderr.write('Database not initialized.\n' 'Try this: "assembl-db-manage %s bootstrap"\n' % config_uri) sys.exit(2) cmd = ['alembic', '-c', config_uri] + sys.argv[1:] print(subprocess.check_output(cmd))
def create_session(db_url, debug=False, pool_recycle=3600): """ Create the Session object to use to query the database. :arg db_url: URL used to connect to the database. The URL contains information with regards to the database engine, the host to connect to, the user and password and the database name. ie: <engine>://<user>:<password>@<host>/<dbname> :kwarg debug: a boolean specifying wether we should have the verbose output of sqlalchemy or not. :return a Session that can be used to query the database. """ engine = sa.create_engine( db_url, echo=debug, pool_recycle=pool_recycle, convert_unicode=True) session = scoped_session(sessionmaker( autocommit=False, autoflush=False, bind=engine)) # check that the database's schema is up-to-date script_dir = ScriptDirectory.from_config(get_alembic_config(db_url)) head_rev = script_dir.get_current_head() context = MigrationContext.configure(session.connection()) current_rev = context.get_current_revision() if current_rev != head_rev: raise DatabaseNeedsUpgrade # everything looks good here return session
def test_migrations(database_uri): """ Run all migrations and assert the result is up to date with the model definitions. """ alembic_config = alembic.config.Config('migrations/alembic.ini') engine = create_engine(database_uri) with engine.begin() as connection: # http://alembic.readthedocs.org/en/latest/cookbook.html#sharing-a-connection-with-a-series-of-migration-commands-and-environments alembic_config.attributes['connection'] = connection if database_uri != 'sqlite://': db.Base.metadata.drop_all(connection) # Create initial schema by running the first migration. alembic.command.upgrade(alembic_config, 'ea660b66f26') # Add some database content to run the migrations on. add_database_content(connection) # Run the remaining migrations. alembic.command.upgrade(alembic_config, 'head') context = MigrationContext.configure(connection) assert not alembic.autogenerate.compare_metadata( context, db.Base.metadata) engine.dispose()
def check(self): engine = create_engine(self.url) context = MigrationContext.configure(engine) current_rev = context.get_current_revision() return self.head == current_rev
def get_database_version(): """ Gets the current database revision (partial GUID). :rtype: str """ context = MigrationContext.configure(meta.engine) return context.get_current_revision()
def __new__(cls): if cls.__instance is None: i = object.__new__(cls) i.SQLEngine = SQLEngine i.DataBase = DataBase i.Session = Session i.connection = SQLEngine.connect() i.context = MigrationContext.configure(i.connection) i.current_revision = i.context.get_current_revision() logger.boot('Database revision: %s', i.current_revision) i.config = Config(ALEMBIC_CONFIG) i.script = ScriptDirectory.from_config(i.config) i.head_revision = i.script.get_current_head() if i.current_revision is None or i.current_revision != i.head_revision: logger.boot('Upgrading database to version %s.', i.head_revision) command.upgrade(i.config, 'head') from option import Option from log import Log session = Session() options = session.query(Option).first() if options is None: options = Option() session.add(options) options.version = i.head_revision session.commit() i.current_revision = i.head_revision cls.__instance = i h = SQLAlchemyHandler() logger.addHandler(h) return cls.__instance
def ensure_db_version(config_uri, session_maker): """Exit if database is not up-to-date.""" config = Config(config_uri) script_dir = ScriptDirectory.from_config(config) heads = script_dir.get_heads() if len(heads) > 1: sys.stderr.write('Error: migration scripts have more than one head.\n' 'Please resolve the situation before attempting to ' 'start the application.\n') sys.exit(2) else: repo_version = heads[0] if heads else None context = MigrationContext.configure(session_maker()().connect()) db_version = context.get_current_revision() if not db_version: sys.stderr.write('Database not initialized.\n' 'Try this: "assembl-db-manage %s bootstrap".\n' % config_uri) sys.exit(2) if db_version != repo_version: sys.stderr.write('Stopping: DB version (%s) not up-to-date (%s).\n' % (db_version, repo_version)) sys.stderr.write('Try this: "assembl-db-manage %s upgrade head".\n' % config_uri) sys.exit(2)
def setup_class(cls): staging_env() cls.bind = cls._get_bind() cls.m1 = cls._get_db_schema() cls.m1.create_all(cls.bind) cls.m2 = cls._get_model_schema() conn = cls.bind.connect() cls.context = context = MigrationContext.configure( connection=conn, opts={ 'compare_type': True, 'compare_server_default':True, 'target_metadata':cls.m2, 'upgrade_token':"upgrades", 'downgrade_token':"downgrades", 'alembic_module_prefix':'op.', 'sqlalchemy_module_prefix':'sa.', } ) connection = context.bind cls.autogen_context = { 'imports':set(), 'connection':connection, 'dialect':connection.dialect, 'context':context }
def _fixture(self, m1, m2, include_schemas=False, opts=None, object_filters=_default_object_filters): self.metadata, model_metadata = m1, m2 self.metadata.create_all(self.bind) with self.bind.connect() as conn: ctx_opts = { 'compare_type': True, 'compare_server_default': True, 'target_metadata': model_metadata, 'upgrade_token': "upgrades", 'downgrade_token': "downgrades", 'alembic_module_prefix': 'op.', 'sqlalchemy_module_prefix': 'sa.', } if opts: ctx_opts.update(opts) self.context = context = MigrationContext.configure( connection=conn, opts=ctx_opts) connection = context.bind autogen_context = { 'imports': set(), 'connection': connection, 'dialect': connection.dialect, 'context': context } diffs = [] autogenerate._produce_net_changes(connection, model_metadata, diffs, autogen_context, object_filters=object_filters, include_schemas=include_schemas) return diffs
def upgrade_database(tag, sql, revision): """Upgrades database schema to newest version.""" from sqlalchemy_utils import database_exists, create_database from alembic.migration import MigrationContext alembic_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "alembic.ini") alembic_cfg = AlembicConfig(alembic_path) if not database_exists(str(config.SQLALCHEMY_DATABASE_URI)): create_database(str(config.SQLALCHEMY_DATABASE_URI)) Base.metadata.create_all(engine) alembic_command.stamp(alembic_cfg, "head") else: conn = engine.connect() context = MigrationContext.configure(conn) current_rev = context.get_current_revision() if not current_rev: Base.metadata.create_all(engine) alembic_command.stamp(alembic_cfg, "head") else: alembic_command.upgrade(alembic_cfg, revision, sql=sql, tag=tag) sync_triggers() click.secho("Success.", fg="green")
def create_backtest_management_table(self): backtest_management_template = BacktestManagement table_def = backtest_management_template.__table__ # add specific params columns table_def = self.append_specific_params_column(table_def) backtest_summary_id = Column("backtest_summary_id", Integer) table_def.relation = relationship("BacktestSummary") table_def.append_column(backtest_summary_id) table_def.name = self.backtest_management_table_name table_def.create(bind=self.db_client.connector) # add foreign key constraint ctx = MigrationContext.configure(self.db_client.connector) op = Operations(ctx) with op.batch_alter_table(self.bot_name + "_backtest_management") as batch_op: batch_op.create_foreign_key("fk_management_summary", "backtest_summary", ["backtest_summary_id"], ["id"])
def test_include_object(self): def include_object(obj, name, type_, reflected, compare_to): assert obj.name == name if type_ == "table": if reflected: assert obj.metadata is not self.m2 else: assert obj.metadata is self.m2 return name in ("address", "order", "user") elif type_ == "column": if reflected: assert obj.table.metadata is not self.m2 else: assert obj.table.metadata is self.m2 return name != "street" else: return True context = MigrationContext.configure(connection=self.bind.connect(), opts={ 'compare_type': True, 'compare_server_default': True, 'target_metadata': self.m2, 'include_object': include_object, }) diffs = autogenerate.compare_metadata(context, context.opts['target_metadata']) alter_cols = set([ d[2] for d in self._flatten_diffs(diffs) if d[0].startswith('modify') ]).union(d[3].name for d in self._flatten_diffs(diffs) if d[0] == 'add_column').union( d[1].name for d in self._flatten_diffs(diffs) if d[0] == 'add_table') eq_(alter_cols, set(['user_id', 'order', 'user']))
def __init__(self): sql_url = f'sqlite:///{filesystem.database_file()}' self.engine = sql.create_engine(sql_url) # Find the current alembic revision with self.engine.connect() as conn: ctx = MigrationContext.configure(conn) current_revision = ctx.get_current_revision() # Find the head alembic revision cfg = AlembicCfg(Path(__file__).parent / 'alembic.ini') cfg.set_main_option('script_location', str(Path(__file__).parent / 'migrations')) cfg.set_main_option('sqlalchemy.url', sql_url) current_head = ScriptDirectory.from_config(cfg).as_revision_number('head') # Upgrade to the first revision manually, to accommodate # possibly pre-existing databases FIRST_REVISION = 'ee8fca468df0' if current_revision is None and current_head == FIRST_REVISION: DeclarativeBase.metadata.create_all(self.engine) alembic_command.stamp(cfg, FIRST_REVISION) current_revision = FIRST_REVISION elif current_revision is None: print('The current database is too old to be automatically updated.', file=sys.stderr) print(f'Please delete {filesystem.database_file()} before restarting.', file=sys.stderr) sys.exit(1) # Upgrade to the newest revision automatically alembic_command.upgrade(cfg, 'head') # The session object lasts for the lifetime of one thread self._session = orm.scoped_session(orm.sessionmaker(bind=self.engine)) # A bidirectional dictionary mapping database Polygon IDs to leaflet.js IDs self.lfid = bidict() # A list of objects to be notified by changes self.listeners = []
def _fixture( self, m1, m2, include_schemas=False, opts=None, object_filters=_default_object_filters, return_ops=False, ): self.metadata, model_metadata = m1, m2 for m in util.to_list(self.metadata): m.create_all(self.bind) with self.bind.connect() as conn: ctx_opts = { "compare_type": True, "compare_server_default": True, "target_metadata": model_metadata, "upgrade_token": "upgrades", "downgrade_token": "downgrades", "alembic_module_prefix": "op.", "sqlalchemy_module_prefix": "sa.", "include_object": object_filters, "include_schemas": include_schemas, } if opts: ctx_opts.update(opts) self.context = context = MigrationContext.configure( connection=conn, opts=ctx_opts) autogen_context = api.AutogenContext(context, model_metadata) uo = ops.UpgradeOps(ops=[]) autogenerate._produce_net_changes(autogen_context, uo) if return_ops: return uo else: return uo.as_diffs()
def prune_schemas_to_only(self, live_versions): opts = {'target_metadata': metadata} with Operations.context( MigrationContext.configure(connection=Session.connection(), opts=opts)) as op: self.op = op to_remove = [ i for i in self.get_outstanding_migrations().upgrade_ops.as_diffs() if i[0].startswith('remove_') ] tables_to_drop = [] unhandled = [] for migration in to_remove: name = migration[0] if name == 'remove_table': table = migration[1] tables_to_drop.append(table) for foreign_key in table.foreign_key_constraints: op.drop_constraint(foreign_key.name, table.name) elif name == 'remove_index': op.drop_index(migration[1].name) else: unhandled.append(migration) for table in tables_to_drop: op.drop_table(table.name) if unhandled: print( 'These migrations have not been automatically done, please effect them by other means:' ) for migration in unhandled: print(migration) installed_version_names = [version.name for version in live_versions] for created_schema_version in Session.query(SchemaVersion).all(): if created_schema_version.egg_name not in installed_version_names: Session.delete(created_schema_version)
def create_fk_constraint(engine: object) -> bool: """ Get list of foreign keys from static list `fk_constraints` and created it """ try: conn = engine.connect() ctx = MigrationContext.configure(conn) op = Operations(ctx) print(Migrate.fk_constraints) for constraint in Migrate.fk_constraints: dest_table_name = constraint.pop("table_name") column_name = constraint.pop("column_name") source_table = constraint.pop("source_table") dest_column = constraint.pop("dest_column") # if not self.check_column(dest_table_name,column_name): print(constraint) op.create_foreign_key(None, source_table, dest_table_name, [dest_column], [column_name], **constraint) return True except Exception as err: print(err) return False finally: conn.close()
def setup_class(cls): cls.bind = config.db staging_env() context = MigrationContext.configure( connection=cls.bind.connect(), opts={ 'compare_type': True, 'compare_server_default': True } ) connection = context.bind cls.autogen_context = { 'imports': set(), 'connection': connection, 'dialect': connection.dialect, 'context': context, 'opts': { 'compare_type': True, 'compare_server_default': True, 'alembic_module_prefix': 'op.', 'sqlalchemy_module_prefix': 'sa.', } }
def create_column(engine, table_oid, column_data): column_type = column_data[TYPE] column_nullable = column_data.get(NULLABLE, True) supported_types = alteration.get_supported_alter_column_types( engine, friendly_names=False, ) sa_type = supported_types.get(column_type) if sa_type is None: logger.warning("Requested type not supported. falling back to VARCHAR") sa_type = supported_types["VARCHAR"] table = tables.reflect_table_from_oid(table_oid, engine) column = MathesarColumn( column_data[NAME], sa_type, nullable=column_nullable, ) with engine.begin() as conn: ctx = MigrationContext.configure(conn) op = Operations(ctx) op.add_column(table.name, column, schema=table.schema) return tables.reflect_table_from_oid(table_oid, engine).columns[column_data[NAME]]
def wrapper(context): util = getUtility(ISessionUtility) dsn = util.get_dsn(utils.getSite()) engine = create_engine(dsn, isolation_level='SERIALIZABLE') connection = engine.connect() transaction = connection.begin() try: context = MigrationContext.configure(connection) operations = Operations(context) metadata = MetaData(bind=engine) fn(operations, metadata) transaction.commit() except: transaction.rollback() raise finally: connection.close()
def db_setup(): engine = create_engine(config.SQLALCHEMY_URL) logging.info("Waiting database to come online. Use CTRL + C to interrupt at any moment.") conn = None while conn is None: try: logging.info("Trying connection...") conn = establish_connection(engine) except OperationalError: logging.warning("""Couldn't connect to the server in {time} seconds to the database.""" .format(time=config.DATABASE_CONNECTION_MERCY_TIME)) logging.info("Connection with database established") context = MigrationContext.configure(conn) alembic_script = ScriptDirectory.from_config(Config("./alembic.ini")) if context.get_current_revision() != alembic_script.get_current_head(): logging.info("Applying database evolutions, this might take a while.") process = subprocess.Popen("alembic upgrade head", shell=True) process.wait() logging.info("Databases evolutions applied.") conn.close()
def resetdb(rbac): """ Clear out the database """ from airflow import models # alembic adds significant import time, so we import it lazily from alembic.migration import MigrationContext log.info("Dropping tables that exist") models.base.Base.metadata.drop_all(settings.engine) mc = MigrationContext.configure(settings.engine) if mc._version.exists(settings.engine): mc._version.drop(settings.engine) if rbac: # drop rbac security tables from flask_appbuilder.security.sqla import models from flask_appbuilder.models.sqla import Base Base.metadata.drop_all(settings.engine) initdb(rbac)
def test_render_nothing(self): context = MigrationContext.configure(connection=self.bind.connect(), opts={ 'compare_type': True, 'compare_server_default': True, 'target_metadata': self.m1, 'upgrade_token': "upgrades", 'downgrade_token': "downgrades", }) template_args = {} autogenerate._produce_migration_diffs(context, template_args, set()) eq_( re.sub(r"u'", "'", template_args['upgrades']), """### commands auto generated by Alembic - please adjust! ### pass ### end Alembic commands ###""") eq_( re.sub(r"u'", "'", template_args['downgrades']), """### commands auto generated by Alembic - please adjust! ### pass ### end Alembic commands ###""")
def setUp(self): self.conn = conn = self.bind.connect() ctx_opts = { 'compare_type': True, 'compare_server_default': True, 'target_metadata': self.m2, 'upgrade_token': "upgrades", 'downgrade_token': "downgrades", 'alembic_module_prefix': 'op.', 'sqlalchemy_module_prefix': 'sa.', } if self.configure_opts: ctx_opts.update(self.configure_opts) self.context = context = MigrationContext.configure(connection=conn, opts=ctx_opts) connection = context.bind self.autogen_context = { 'imports': set(), 'connection': connection, 'dialect': connection.dialect, 'context': context }
def test_database_schema_and_sqlalchemy_model_are_in_sync(self): # combine Airflow and Flask-AppBuilder (if rbac enabled) models all_meta_data = MetaData() for (table_name, table) in airflow_base.metadata.tables.items(): all_meta_data._add_table(table_name, table.schema, table) if RBAC: for (table_name, table) in fab_base.metadata.tables.items(): all_meta_data._add_table(table_name, table.schema, table) # create diff between database schema and SQLAlchemy model mc = MigrationContext.configure(engine.connect()) diff = compare_metadata(mc, all_meta_data) # known diffs to ignore ignores = [ # users.password is not part of User model, # otherwise it would show up in (old) UI lambda t: (t[0] == 'remove_column' and t[2] == 'users' and t[3]. name == 'password'), # ignore tables created by other tests lambda t: (t[0] == 'remove_table' and t[1].name == 't'), lambda t: (t[0] == 'remove_table' and t[1].name == 'test_airflow'), lambda t: (t[0] == 'remove_table' and t[1].name == 'test_postgres_to_postgres'), lambda t: (t[0] == 'remove_table' and t[1].name == 'test_mysql_to_mysql'), # ignore tables created by celery lambda t: (t[0] == 'remove_table' and t[1].name == 'celery_taskmeta'), lambda t: (t[0] == 'remove_table' and t[1].name == 'celery_tasksetmeta'), ] for ignore in ignores: diff = [d for d in diff if not ignore(d)] self.assertFalse( diff, 'Database schema and SQLAlchemy model are not in sync')
def check_alembic_version(): config = Config() config.set_main_option("script_location", "migrations") script = ScriptDirectory.from_config(config) head_revision = script.get_current_head() with app.app_context(): try: conn = db.session.connection() except ImportError as ex: if not faraday.server.config.database.connection_string: print( "\n\nNo database configuration found. Did you execute \"faraday-manage initdb\"? \n\n" ) sys.exit(1) except sqlalchemy.exc.OperationalError as ex: print( "Bad Credentials, please check the .faraday/config/server.ini") sys.exit(1) context = MigrationContext.configure(conn) current_revision = context.get_current_revision() if head_revision != current_revision: if glob.glob( os.path.join(FARADAY_BASE, 'migrations', 'versions', '{}_*.py'.format(current_revision))): print('--' * 20) print('Missing migrations, please execute: \n\n') print('faraday-manage migrate') sys.exit(1) else: logger.warning( "You are using an unknown schema version. If you are a " "developer, this probably happened because you used branch " "with a schema migration not merged yet. If you are a " "normal user, consider reporting this bug back to us")
def setUp(self): self.conn = config.db.connect() self.metadata = MetaData() t1 = Table( 'foo', self.metadata, Column('id', Integer, primary_key=True), Column('data', String(50)), Column('x', Integer), mysql_engine='InnoDB' ) t1.create(self.conn) self.conn.execute( t1.insert(), [ {"id": 1, "data": "d1", "x": 5}, {"id": 2, "data": "22", "x": 6}, {"id": 3, "data": "8.5", "x": 7}, {"id": 4, "data": "9.46", "x": 8}, {"id": 5, "data": "d5", "x": 9} ] ) context = MigrationContext.configure(self.conn) self.op = Operations(context)
def generate(self, message: str, allow_empty: bool) -> None: """ Generate upgrade scripts using alembic. """ if not self.__exists(): raise DBCreateException( 'Tables have not been created yet, use create to create them!') # Verify that there are actual changes, and refuse to create empty migration scripts context = MigrationContext.configure( self.__config['database']['engine'].connect(), opts={'compare_type': True}) diff = compare_metadata(context, metadata) if (not allow_empty) and (len(diff) == 0): raise DBCreateException( 'There is nothing different between code and the DB, refusing to create migration!' ) self.__alembic_cmd( 'revision', '--autogenerate', '-m', message, )
def drop_airflow_models(connection): """ Drops all airflow models. :param connection: SQLAlchemy Connection :return: None """ from airflow.models.base import Base # Drop connection and chart - those tables have been deleted and in case you # run resetdb on schema with chart or users table will fail chart = Table('chart', Base.metadata) chart.drop(settings.engine, checkfirst=True) user = Table('user', Base.metadata) user.drop(settings.engine, checkfirst=True) users = Table('users', Base.metadata) users.drop(settings.engine, checkfirst=True) dag_stats = Table('dag_stats', Base.metadata) dag_stats.drop(settings.engine, checkfirst=True) session = Table('session', Base.metadata) session.drop(settings.engine, checkfirst=True) Base.metadata.drop_all(connection) # we remove the Tables here so that if resetdb is run metadata does not keep the old tables. Base.metadata.remove(session) Base.metadata.remove(dag_stats) Base.metadata.remove(users) Base.metadata.remove(user) Base.metadata.remove(chart) # alembic adds significant import time, so we import it lazily from alembic.migration import MigrationContext migration_ctx = MigrationContext.configure(connection) version = migration_ctx._version if version.exists(connection): version.drop(connection)
def connect(engine: eng.Engine) -> None: """Instantiates an sqlalchemy connection shareable accross all seeders Args: engine: SqlAlchemy engine """ #set engine Seeder._engine: engine.Engine = engine # set connection Seeder.connection = Seeder._engine.connect() # get operations context Seeder.operation = Operations(MigrationContext.configure(Seeder.connection)) # get metadata from current connection Seeder.meta = MetaData(bind=Seeder.operation.get_bind()) Seeder._base = automap_base() # reflect database Seeder._base.prepare(engine, reflect=True) Seeder.meta.reflect()
def setup_db(): """Create database and required tables.""" if not app.config['DATABASE_URI'].startswith('sqlite'): try: with create_engine( app.config['DATABASE_URI'], ).connect() as connection: connection.execute('CREATE DATABASE {0}'.format( app.config['DATABASE_NAME'])) print("Database created") except sqlalchemy.exc.OperationalError: pass except sqlalchemy.exc.ProgrammingError: # If database already exists pass engine = create_engine(app.config['SQLALCHEMY_DATABASE_URI']) conn = engine.connect() context = MigrationContext.configure(conn) current_ver = context.get_current_revision() if not current_ver: print("Automatic DB Upgrade") print("Press Ctrl+C when finished") upgrade() print("Upgrade completed. Press Ctrl+C and runserver again.")
def test_compare_metadata_schema(self): metadata = self.m2 context = MigrationContext.configure( connection=self.bind.connect(), opts={ "include_schemas": True } ) diffs = autogenerate.compare_metadata(context, metadata) eq_( diffs[0], ('add_table', metadata.tables['test_schema.item']) ) eq_(diffs[1][0], 'remove_table') eq_(diffs[1][1].name, "extra") eq_(diffs[2][0], "add_column") eq_(diffs[2][1], "test_schema") eq_(diffs[2][2], "address") eq_(diffs[2][3], metadata.tables['test_schema.address'].c.street) eq_(diffs[3][0], "add_constraint") eq_(diffs[3][1].name, "uq_email") eq_(diffs[4][0], "add_column") eq_(diffs[4][1], "test_schema") eq_(diffs[4][2], "order") eq_(diffs[4][3], metadata.tables['test_schema.order'].c.user_id) eq_(diffs[5][0][0], 'modify_nullable') eq_(diffs[5][0][5], False) eq_(diffs[5][0][6], True)
def _schema_init_and_update(self): """ This method tries to create the database table and update the schema. :return: None """ try: # Try to create the database metadata.create_all(self.engine) except OperationalError as exx: # pragma: no cover log.info("{0!r}".format(exx)) # Schema update conn = self.engine.connect() ctx = MigrationContext.configure(conn) op = Operations(ctx) try: # Try to add resolver column op.add_column( TABLE_NAME, Column('resolver', String(length=column_length.get("resolver")))) except Exception as exx: # pragma: no cover log.info("{0!r}".format(exx))
def test_compare_metadata_include_symbol(self): metadata = self.m2 def include_symbol(table_name, schema_name): return table_name in ('extra', 'order') context = MigrationContext.configure(connection=self.bind.connect(), opts={ 'compare_type': True, 'compare_server_default': True, 'include_symbol': include_symbol, }) diffs = autogenerate.compare_metadata(context, metadata) eq_(diffs[0][0], 'remove_table') eq_(diffs[0][1].name, "extra") eq_(diffs[1][0], "add_column") eq_(diffs[1][1], None) eq_(diffs[1][2], "order") eq_(diffs[1][3], metadata.tables['order'].c.user_id) eq_(diffs[2][0][0], "modify_type") eq_(diffs[2][0][1], None) eq_(diffs[2][0][2], "order") eq_(diffs[2][0][3], "amount") eq_(repr(diffs[2][0][5]), "NUMERIC(precision=8, scale=2)") eq_(repr(diffs[2][0][6]), "Numeric(precision=10, scale=2)") eq_(diffs[2][1][0], 'modify_nullable') eq_(diffs[2][1][2], 'order') eq_(diffs[2][1][5], False) eq_(diffs[2][1][6], True)
def test_include_symbol(self): context = MigrationContext.configure( connection=self.bind.connect(), opts={ 'compare_type': True, 'compare_server_default': True, 'target_metadata': self.m2, 'include_symbol': lambda name, schema=None: name in ('address', 'order'), 'upgrade_token': "upgrades", 'downgrade_token': "downgrades", 'alembic_module_prefix': 'op.', 'sqlalchemy_module_prefix': 'sa.', } ) template_args = {} autogenerate._produce_migration_diffs(context, template_args, set()) template_args['upgrades'] = template_args['upgrades'].replace("u'", "'") template_args['downgrades'] = template_args['downgrades'].\ replace("u'", "'") assert "alter_column('user'" not in template_args['upgrades'] assert "alter_column('user'" not in template_args['downgrades'] assert "alter_column('order'" in template_args['upgrades'] assert "alter_column('order'" in template_args['downgrades']
def diff_db(self, output_sql=False): migrations = self.get_outstanding_migrations() if output_sql: commented_source_code = render_python_code( migrations.upgrade_ops, alembic_module_prefix='op2.', sqlalchemy_module_prefix="sqlalchemy.") uncommented_source_code = [ i.strip() for i in commented_source_code.split('\n') if not i.strip().startswith('#') ] source_code = '\n'.join(['import sqlalchemy'] + uncommented_source_code) opts = {'as_sql': output_sql, 'target_metadata': metadata} with Operations.context( MigrationContext.configure(connection=Session.connection(), opts=opts)) as op2: exec(source_code, globals(), locals()) return uncommented_source_code else: migrations_required = migrations.upgrade_ops.as_diffs() if migrations_required: pprint.pprint(migrations_required, indent=2, width=20) return migrations_required
def deleteChannel(): """ Delete an instrument's data channel along with its data. Only instrument owners and system administrators can delete data channels .. :quickref: Delete data channel; Deletes an instrument data channel and its data :param: channelid: (Integer) unique instrument data channel identifier """ Session = sessionmaker(bind=current_user.engineObj) session = Session() if('channelid' in request.form): channelid = request.form['channelid'] elif('channelid' in request.args): channelid = request.args['channelid'] else: raise InvalidUsage('No channel ID provided', status_code=500) try: result = session.query(daqbrokerDatabase.channels).filter_by(channelid=channelid).first() if current_user.type != 1: if result.chann.meta.username != current_user.username: raise InvalidUsage("You are not the instrument operator", status_code=400) session.delete(result) conn = current_user.engineObj.connect() ctx = MigrationContext.configure(conn) op = Operations(ctx) if result.channeltype == 1 or result.channeltype == 2: op.drop_column(result.chann.meta.Name + "_data", result.Name) else: op.drop_column(result.chann.meta.Name + "_custom", result.Name) conn.close() session.commit() return jsonify('done') except Exception as e: session.rollback() raise InvalidUsage('Error : ' + str(e), status_code=500)
def create_missing_database_entities(Model, engine): m = Model.metadata current_info = get_current_database_info(engine) print(current_info) conn = engine.connect() ctx = MigrationContext.configure(conn) op = Operations(ctx) print "metadata", m for table_name in m.tables: table = m.tables[table_name] if current_info.has_key(table_name): for col in table.columns: print "col", col if not col.name in current_info[table_name]: print " IN TABLE: %s CREATING COLUMN: %s"%(table_name, col.name) op.add_column(table_name, mimic_column(col)) print " ... done" else: args = [table_name] + map(mimic_column, list(table.columns)) print "CREATING TABLE: " + repr(args) op.create_table(*args)
def op(self): """Get an alembic operations context.""" ctx = MigrationContext.configure(self.executable) return Operations(ctx)