def _prep_testing_database(options, file_config): from alembic.testing import config from alembic.testing.exclusions import against from sqlalchemy import schema from alembic import util if util.sqla_08: from sqlalchemy import inspect else: from sqlalchemy.engine.reflection import Inspector inspect = Inspector.from_engine if options.dropfirst: for cfg in config.Config.all_configs(): e = cfg.db inspector = inspect(e) try: view_names = inspector.get_view_names() except NotImplementedError: pass else: for vname in view_names: e.execute(schema._DropView( schema.Table(vname, schema.MetaData()) )) if config.requirements.schemas.enabled_for_config(cfg): try: view_names = inspector.get_view_names( schema="test_schema") except NotImplementedError: pass else: for vname in view_names: e.execute(schema._DropView( schema.Table(vname, schema.MetaData(), schema="test_schema") )) for tname in reversed(inspector.get_table_names( order_by="foreign_key")): e.execute(schema.DropTable( schema.Table(tname, schema.MetaData()) )) if config.requirements.schemas.enabled_for_config(cfg): for tname in reversed(inspector.get_table_names( order_by="foreign_key", schema="test_schema")): e.execute(schema.DropTable( schema.Table(tname, schema.MetaData(), schema="test_schema") )) if against(cfg, "postgresql") and util.sqla_100: from sqlalchemy.dialects import postgresql for enum in inspector.get_enums("*"): e.execute(postgresql.DropEnumType( postgresql.ENUM( name=enum['name'], schema=enum['schema'])))
def _prep_testing_database(options, file_config): from sqlalchemy.testing import config from sqlalchemy import schema, inspect if options.dropfirst: for cfg in config.Config.all_configs(): e = cfg.db inspector = inspect(e) try: view_names = inspector.get_view_names() except NotImplementedError: pass else: for vname in view_names: e.execute(schema._DropView(schema.Table(vname, schema.MetaData()))) if config.requirements.schemas.enabled_for_config(cfg): try: view_names = inspector.get_view_names(schema="test_schema") except NotImplementedError: pass else: for vname in view_names: e.execute(schema._DropView( schema.Table(vname, schema.MetaData(), schema="test_schema"))) for tname in reversed(inspector.get_table_names(order_by="foreign_key")): e.execute(schema.DropTable(schema.Table(tname, schema.MetaData()))) if config.requirements.schemas.enabled_for_config(cfg): for tname in reversed(inspector.get_table_names( order_by="foreign_key", schema="test_schema")): e.execute(schema.DropTable( schema.Table(tname, schema.MetaData(), schema="test_schema")))
def tearDown(self): sqlalchemy.orm.session.Session.close_all() meta = sqlalchemy.MetaData() meta.bind = self.engine meta.reflect(self.engine) with self.engine.begin() as conn: inspector = reflection.Inspector.from_engine(self.engine) metadata = schema.MetaData() tbs = [] all_fks = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk['name']: continue fks.append( schema.ForeignKeyConstraint((), (), name=fk['name'])) table = schema.Table(table_name, metadata, *fks) tbs.append(table) all_fks.extend(fks) for fkc in all_fks: conn.execute(schema.DropConstraint(fkc)) for table in tbs: conn.execute(schema.DropTable(table)) sql.cleanup() super(SqlMigrateBase, self).tearDown()
def _cleanupDB(self): meta = sqlalchemy.MetaData() meta.bind = self.engine meta.reflect(self.engine) with self.engine.begin() as conn: inspector = reflection.Inspector.from_engine(self.engine) metadata = schema.MetaData() tbs = [] all_fks = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk['name']: continue fks.append( schema.ForeignKeyConstraint((), (), name=fk['name'])) table = schema.Table(table_name, metadata, *fks) tbs.append(table) all_fks.extend(fks) for fkc in all_fks: if self.engine.name != 'sqlite': conn.execute(schema.DropConstraint(fkc)) for table in tbs: conn.execute(schema.DropTable(table))
def clear_db(db_path): db_engine = create_engine(db_path, poolclass=NullPool) with db_engine.begin() as conn: meta = MetaData() meta.reflect(bind=db_engine) inspector = reflection.Inspector.from_engine(db_engine) tbs = [] all_fks = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk['name']: continue fks.append( schema.ForeignKeyConstraint(tuple(), tuple(), name=fk['name'])) t = schema.Table(table_name, meta, *fks, extend_existing=True) tbs.append(t) all_fks.extend(fks) for fkc in all_fks: conn.execute(schema.DropConstraint(fkc)) for table in tbs: conn.execute(schema.DropTable(table)) # such construction is available only for postgresql if db_engine.name == "postgresql": for en in _get_enums(conn): conn.execute("DROP TYPE {0}".format(en))
def clear(names=None, sql=False, echo=True): """Clear the database; drop all specified tables.""" # TODO: Support multi-db routing. engine = application.databases['default'] # Iterate through each collected package metadata. for name, metadata in reversed(list(limit_metadata(names).items())): if echo: # Log the sequence. print_command('alchemist db', 'clear', name) # Iterate through all tables. for table in metadata.sorted_tables: if table.exists(engine): if echo: # Log the sequence. print_command('alchemist db', 'drop', table.name, 'default') if sql: # Print the creation statement. print(str(schema.DropTable(table)) + ';') else: # Create the table. table.drop(engine)
def test_drop(self): conn = self._conn_fixture() m = MetaData() t = Table('test', m, Column('x', Integer), Column('y', Integer)) self._exec_stmt(conn, schema.CreateTable(t)) self._exec_stmt(conn, schema.DropTable(t)) assert 'test' not in conn._namespace
def _prep_testing_database(options, file_config): from sqlalchemy.testing import engines from sqlalchemy import schema, inspect # also create alt schemas etc. here? if options.dropfirst: e = engines.utf8_engine() inspector = inspect(e) try: view_names = inspector.get_view_names() except NotImplementedError: pass else: for vname in view_names: e.execute( schema._DropView(schema.Table(vname, schema.MetaData()))) try: view_names = inspector.get_view_names(schema="test_schema") except NotImplementedError: pass else: for vname in view_names: e.execute( schema._DropView( schema.Table(vname, schema.MetaData(), schema="test_schema"))) for tname in reversed( inspector.get_table_names(order_by="foreign_key")): e.execute(schema.DropTable(schema.Table(tname, schema.MetaData()))) for tname in reversed( inspector.get_table_names(order_by="foreign_key", schema="test_schema")): e.execute( schema.DropTable( schema.Table(tname, schema.MetaData(), schema="test_schema"))) e.dispose()
def dropdb(): from nailgun.db import migration conn = engine.connect() trans = conn.begin() meta = MetaData() meta.reflect(bind=engine) inspector = reflection.Inspector.from_engine(engine) tbs = [] all_fks = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk['name']: continue fks.append( schema.ForeignKeyConstraint((), (), name=fk['name']) ) t = schema.Table( table_name, meta, *fks, extend_existing=True ) tbs.append(t) all_fks.extend(fks) for fkc in all_fks: conn.execute(schema.DropConstraint(fkc)) for table in tbs: conn.execute(schema.DropTable(table)) custom_types = conn.execute( "SELECT n.nspname as schema, t.typname as type " "FROM pg_type t LEFT JOIN pg_catalog.pg_namespace n " "ON n.oid = t.typnamespace " "WHERE (t.typrelid = 0 OR (SELECT c.relkind = 'c' " "FROM pg_catalog.pg_class c WHERE c.oid = t.typrelid)) " "AND NOT EXISTS(SELECT 1 FROM pg_catalog.pg_type el " "WHERE el.oid = t.typelem AND el.typarray = t.oid) " "AND n.nspname NOT IN ('pg_catalog', 'information_schema')" ) for tp in custom_types: conn.execute("DROP TYPE {0}".format(tp[1])) trans.commit() migration.drop_migration_meta(engine) conn.close() engine.dispose()
def visit_table(self, table, drop_ok=False): if not drop_ok and not self._can_drop_table(table): return table.dispatch.before_drop(table, self.connection, checkfirst=self.checkfirst) for column in table.columns: if column.default is not None: self.traverse_single(column.default) self.connection.execute(schema.DropTable(table)) table.dispatch.after_drop(table, self.connection, checkfirst=self.checkfirst)
def visit_table(self, table, drop_ok=False): if not drop_ok and not self._can_drop(table): return for listener in table.ddl_listeners['before-drop']: listener('before-drop', table, self.connection) for column in table.columns: if column.default is not None: self.traverse_single(column.default) self.connection.execute(schema.DropTable(table)) for listener in table.ddl_listeners['after-drop']: listener('after-drop', table, self.connection)
def dropAllTables(db): """Drop all tables from the database. For testing only! Recipe from http://www.sqlalchemy.org/trac/wiki/UsageRecipes/DropEverything . Adapted for Flask-SQLAlchemy by http://www.mbeckler.org/blog/?p=218 . :param db: (flask.sqlalchemy.SQLAlchemy) The object representing the DB """ conn = db.engine.connect() # the transaction only applies if the DB supports # transactional DDL, i.e. Postgresql, MS SQL Server trans = conn.begin() inspector = reflection.Inspector.from_engine(db.engine) # gather all data first before dropping anything. # some DBs lock after things have been dropped in # a transaction. metadata = sqlalchemy.MetaData() tables = [] all_fks = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk['name']: continue fks.append(sqlalchemy.ForeignKeyConstraint((), (), name=fk['name'])) table = sqlalchemy.Table(table_name, metadata, *fks) tables.append(table) all_fks.extend(fks) for constraint in all_fks: conn.execute(schema.DropConstraint(constraint)) for table in tables: conn.execute(schema.DropTable(table)) trans.commit()
def drop_all_objects(self, engine): """Drop all database objects. Drops all database objects remaining on the default schema of the given engine. Per-db implementations will also need to drop items specific to those systems, such as sequences, custom types (e.g. pg ENUM), etc. """ with engine.begin() as conn: inspector = sqlalchemy.inspect(engine) metadata = schema.MetaData() tbs = [] all_fks = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): # note that SQLite reflection does not have names # for foreign keys until SQLAlchemy 1.0 if not fk['name']: continue fks.append( schema.ForeignKeyConstraint((), (), name=fk['name']) ) table = schema.Table(table_name, metadata, *fks) tbs.append(table) all_fks.extend(fks) if self.supports_drop_fk: for fkc in all_fks: conn.execute(schema.DropConstraint(fkc)) for table in tbs: conn.execute(schema.DropTable(table)) self.drop_additional_objects(conn)
def drop_all_objects(engine): """Drop all database objects. Drops all database objects remaining on the default schema of the given engine. Per-db implementations will also need to drop items specific to those systems, such as sequences, custom types (e.g. pg ENUM), etc. """ with engine.begin() as conn: inspector = sa.inspect(engine) metadata = schema.MetaData() tbs = [] all_fks = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk["name"]: continue fks.append(schema.ForeignKeyConstraint((), (), name=fk["name"])) table = schema.Table(table_name, metadata, *fks) tbs.append(table) all_fks.extend(fks) if engine.name != "sqlite": for fkc in all_fks: conn.execute(schema.DropConstraint(fkc)) for table in tbs: conn.execute(schema.DropTable(table)) if engine.name == "postgresql": if compat_utils.sqla_100: enums = [e["name"] for e in sa.inspect(conn).get_enums()] else: enums = conn.dialect._load_enums(conn).keys() for e in enums: conn.execute("DROP TYPE %s" % e)
def drop_table(self, table): self._exec(schema.DropTable(table))
def drop_table(self, table: "Table") -> None: self._exec(schema.DropTable(table))