def open_db(self, engine, mode='open'): "mode: create, open, drop" from sqlalchemy.ext.declarative import declarative_base, DeferredReflection from sqlalchemy.schema import MetaData, DropConstraint from sqlalchemy import event, Table if mode != 'open': # Clear out any existing tables metadata = MetaData(engine) metadata.reflect() if engine.name != 'sqlite': for table in metadata.tables.values(): for fk in table.foreign_keys: engine.execute(DropConstraint(fk.constraint)) metadata.drop_all(engine) if mode == 'drop': return self.Base = declarative_base(cls=DeferredReflection) self.Base.metadata.bind = engine if mode == 'open': @event.listens_for(Table, "column_reflect") def column_reflect(inspector, table, column_info): if table.metadata is self.Base.metadata: if self.column_map and table.name in self.column_map: column_info['key'] = self.column_map[table.name][ column_info['name']] else: column_info['key'] = column_info['name'] self.declare(reflect=mode == 'open') if mode == 'create': self.Base.metadata.create_all(engine) from sqlalchemy.orm import sessionmaker self.Base.prepare(engine) self.reflect_classes() Session = sessionmaker(bind=engine) self.session = Session()
def test_conditional_constraint_deprecated(self): metadata, users, engine = self.metadata, self.users, self.engine nonpg_mock = engines.mock_engine(dialect_name='sqlite') pg_mock = engines.mock_engine(dialect_name='postgresql') constraint = CheckConstraint('a < b', name='my_test_constraint' , table=users) # by placing the constraint in an Add/Drop construct, the # 'inline_ddl' flag is set to False AddConstraint(constraint, on='postgresql' ).execute_at('after-create', users) DropConstraint(constraint, on='postgresql' ).execute_at('before-drop', users) metadata.create_all(bind=nonpg_mock) strings = ' '.join(str(x) for x in nonpg_mock.mock) assert 'my_test_constraint' not in strings metadata.drop_all(bind=nonpg_mock) strings = ' '.join(str(x) for x in nonpg_mock.mock) assert 'my_test_constraint' not in strings metadata.create_all(bind=pg_mock) strings = ' '.join(str(x) for x in pg_mock.mock) assert 'my_test_constraint' in strings metadata.drop_all(bind=pg_mock) strings = ' '.join(str(x) for x in pg_mock.mock) assert 'my_test_constraint' in strings
def _drop_foreign_key_constraints(instance, engine, table, columns): """Drop foreign key constraints for a table on specific columns.""" inspector = sqlalchemy.inspect(engine) drops = [] for foreign_key in inspector.get_foreign_keys(table): if ( foreign_key["name"] and foreign_key.get("options", {}).get("ondelete") and foreign_key["constrained_columns"] == columns ): drops.append(ForeignKeyConstraint((), (), name=foreign_key["name"])) # Bind the ForeignKeyConstraints to the table old_table = Table( # noqa: F841 pylint: disable=unused-variable table, MetaData(), *drops ) for drop in drops: with session_scope(session=instance.get_session()) as session: try: connection = session.connection() connection.execute(DropConstraint(drop)) except (InternalError, OperationalError): _LOGGER.exception( "Could not drop foreign constraints in %s table on %s", TABLE_STATES, columns, )
def drop_everything(engine): """Drop everything in database. Source: https://bitbucket.org/zzzeek/sqlalchemy/wiki/UsageRecipes/DropEverything """ conn = engine.connect() # the transaction only applies if the DB supports # transactional DDL, i.e. Postgresql, MS SQL Server trans = conn.begin() inspector = reflection.Inspector.from_engine(engine) # gather all data first before dropping anything. # some DBs lock after things have been dropped in # a transaction. metadata = MetaData() tbs = [] all_fks = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk['name']: continue fks.append(ForeignKeyConstraint((), (), name=fk['name'])) t = Table(table_name, metadata, *fks) tbs.append(t) all_fks.extend(fks) for fkc in all_fks: conn.execute(DropConstraint(fkc)) for table in tbs: conn.execute(DropTable(table)) trans.commit()
def _drop_all_(self, session): logging.warn("dropping schema") inspector = reflection.Inspector.from_engine(session.bind) # gather all data first before dropping anything. # some DBs lock after things have been dropped in # a transaction. metadata = MetaData() tbs = [] all_fks = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk['name']: continue fks.append(ForeignKeyConstraint((), (), name=fk['name'])) t = Table(table_name, metadata, *fks) tbs.append(t) all_fks.extend(fks) for fkc in all_fks: session.execute(DropConstraint(fkc)) for table in tbs: session.execute(DropTable(table)) session.commit()
def drop_db(): if db_engine is None: raise Exception conn = db_engine.connect() trans = conn.begin() inspector = engine.reflection.Inspector.from_engine(db_engine) metadata = MetaData() tbs = [] all_fks = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk['name']: continue fks.append(ForeignKeyConstraint((), (), name=fk['name'])) t = Table(table_name, metadata, *fks) tbs.append(t) all_fks.extend(fks) for fkc in all_fks: conn.execute(DropConstraint(fkc)) for table in tbs: conn.execute(DropTable(table)) trans.commit()
def upgrade_resource_data_pre(migrate_engine): meta = sqlalchemy.MetaData(bind=migrate_engine) rd_table = sqlalchemy.Table('resource_data', meta, autoload=True) res_table = sqlalchemy.Table('resource', meta, autoload=True) # remove foreignkey on resource_id inspector = sqlalchemy.inspect(migrate_engine) fkc_query = inspector.get_foreign_keys('resource_data') if fkc_query: fkc = ForeignKeyConstraint([rd_table.c.resource_id], [res_table.c.id], fkc_query[0]['name']) migrate_engine.execute(DropConstraint(fkc)) # migrate.ForeignKeyConstraint(columns=[rd_table.c.resource_id], # refcolumns=[res_table.c.id]).drop() # rename resource_id -> tmp_res_uuid rd_table.c.resource_id.alter('tmp_res_uuid', sqlalchemy.String(36)) # create the new resource_id column (no foreignkey yet) res_id_column_kwargs = {} if migrate_engine.name == 'ibm_db_sa': # NOTE(mriedem): This is turned into a foreignkey key constraint # later so it must be non-nullable. res_id_column_kwargs['nullable'] = False res_id = sqlalchemy.Column('resource_id', sqlalchemy.Integer, **res_id_column_kwargs) rd_table.create_column(res_id)
def drop_everything(db): # From: http://www.sqlalchemy.org/trac/wiki/UsageRecipes/DropEverything conn = db.engine.connect() trans = conn.begin() inspector = reflection.Inspector.from_engine(db.engine) metadata = MetaData() tbs = [] all_fks = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk['name']: continue fks.append(ForeignKeyConstraint((), (), name=fk['name'])) t = db.Table(table_name, metadata, *fks) tbs.append(t) all_fks.extend(fks) for fkc in all_fks: conn.execute(DropConstraint(fkc)) for table in tbs: conn.execute(DropTable(table)) trans.commit()
def rebuild_db(): # See this SQLAlchemy recipe: http://www.sqlalchemy.org/trac/wiki/UsageRecipes/DropEverything inspector = reflection.Inspector.from_engine(db.engine) # gather all data first before dropping anything. # some DBs lock after things have been dropped in # a transaction. metadata = MetaData() tbs = [] all_fks = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk['name']: continue fks.append(ForeignKeyConstraint((), (), name=fk['name'])) t = Table(table_name, metadata, *fks) tbs.append(t) all_fks.extend(fks) for fkc in all_fks: db.engine.execute(DropConstraint(fkc)) for table in tbs: db.engine.execute(DropTable(table)) db.session.commit() db.create_all()
def tearDown(self): db.session.remove() # Replacement for db.session.drop_all() # http://www.mbeckler.org/blog/?p=218 engine = db.get_engine(self.app) conn = engine.connect() trans = conn.begin() inspector = reflection.Inspector.from_engine(engine) metadata = MetaData() tbs = [] all_fks = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk['name']: continue fks.append( ForeignKeyConstraint((), (), name=fk['name']) ) t = Table(table_name, metadata, *fks) tbs.append(t) all_fks.extend(fks) for fkc in all_fks: conn.execute(DropConstraint(fkc)) for table in tbs: conn.execute(DropTable(table)) trans.commit() engine.dispose()
def clone_table_approach_1(original_table, connection, metadata): try: new_table_name = original_table.name + '_sync' columns = [c.copy() for c in original_table.columns] new_table = Table(new_table_name, metadata, quote=False, *columns) # Create table in database if not new_table.exists(): new_table.create() else: raise Exception("New table already exists") # Remove constraints from new table if any for constraint in new_table.constraints: connection.execute(DropConstraint(constraint)) # Return table handle for newly created table final_cloned_table = Table(new_table, metadata, quote=False) return final_cloned_table except: # Drop if we did create a new table if new_table.exists(): new_table.drop() raise
def drop_db(db): inspector = reflection.Inspector.from_engine(db.engine) # gather all data first before dropping anything. # some DBs lock after things have been dropped in # a transaction. metadata = MetaData() tbs = [] all_fks = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk['name']: continue fks.append(ForeignKeyConstraint((), (), name=fk['name'])) t = Table(table_name, metadata, *fks) tbs.append(t) all_fks.extend(fks) for fkc in all_fks: db.engine.execute(DropConstraint(fkc)) for table in tbs: db.engine.execute(DropTable(table))
def db_DropEverything(): _engine = get_engine() conn = _engine.connect() # the transaction only applies if the DB supports # transactional DDL, i.e. Postgresql, MS SQL Server trans = conn.begin() inspector = reflection.Inspector.from_engine(_engine) # gather all data first before dropping anything. # some DBs lock after things have been dropped in # a transaction. metadata = MetaData() tbs = [] all_fks = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk["name"]: continue fks.append(ForeignKeyConstraint((), (), name=fk["name"])) t = Table(table_name, metadata, *fks) tbs.append(t) all_fks.extend(fks) for fkc in all_fks: conn.execute(DropConstraint(fkc)) for table in tbs: conn.execute(DropTable(table)) trans.commit()
def test_conditional_constraint(self): metadata, users, engine = self.metadata, self.users, self.engine nonpg_mock = engines.mock_engine(dialect_name="sqlite") pg_mock = engines.mock_engine(dialect_name="postgresql") constraint = CheckConstraint("a < b", name="my_test_constraint", table=users) # by placing the constraint in an Add/Drop construct, the # 'inline_ddl' flag is set to False event.listen( users, "after_create", AddConstraint(constraint).execute_if(dialect="postgresql"), ) event.listen( users, "before_drop", DropConstraint(constraint).execute_if(dialect="postgresql"), ) metadata.create_all(bind=nonpg_mock) strings = " ".join(str(x) for x in nonpg_mock.mock) assert "my_test_constraint" not in strings metadata.drop_all(bind=nonpg_mock) strings = " ".join(str(x) for x in nonpg_mock.mock) assert "my_test_constraint" not in strings metadata.create_all(bind=pg_mock) strings = " ".join(str(x) for x in pg_mock.mock) assert "my_test_constraint" in strings metadata.drop_all(bind=pg_mock) strings = " ".join(str(x) for x in pg_mock.mock) assert "my_test_constraint" in strings
def drop_db(): """It is a workaround for dropping all tables in sqlalchemy. """ if db_engine is None: raise Exception conn = db_engine.connect() trans = conn.begin() inspector = engine.reflection.Inspector.from_engine(db_engine) # gather all data first before dropping anything. # some DBs lock after things have been dropped in # a transaction. metadata = MetaData() tbs = [] all_fks = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk['name']: continue fks.append(ForeignKeyConstraint((), (), name=fk['name'])) t = Table(table_name, metadata, *fks) tbs.append(t) all_fks.extend(fks) for fkc in all_fks: conn.execute(DropConstraint(fkc)) for table in tbs: conn.execute(DropTable(table)) trans.commit()
def reset_db(): # https://github.com/pallets/flask-sqlalchemy/issues/722 reference from bryan5989 con = db.engine.connect() trans = con.begin() inspector = Inspector.from_engine(db.engine) ''' We need to re-create a minimal metadata with only the required things to successfully emit drop constraints and tables commands for postgres (based on the actual schema of the running instance) ''' meta = MetaData() tables = [] all_fkeys = [] for table_name in inspector.get_table_names(): fkeys = [] for fkey in inspector.get_foreign_keys(table_name): if not fkey["name"]: continue fkeys.append(db.ForeignKeyConstraint((), (), name=fkey["name"])) tables.append(Table(table_name, meta, *fkeys)) all_fkeys.extend(fkeys) # drop all the constraint in the data before dropping the table for fkey in all_fkeys: con.execute(DropConstraint(fkey)) for table in tables: con.execute(DropTable(table)) trans.commit()
def _update_states_table_with_foreign_key_options(engine): """Add the options to foreign key constraints.""" inspector = reflection.Inspector.from_engine(engine) alters = [] for foreign_key in inspector.get_foreign_keys(TABLE_STATES): if foreign_key["name"] and not foreign_key["options"]: alters.append({ "old_fk": ForeignKeyConstraint((), (), name=foreign_key["name"]), "columns": foreign_key["constrained_columns"], }) if not alters: return states_key_constraints = Base.metadata.tables[ TABLE_STATES].foreign_key_constraints old_states_table = Table( # noqa: F841 pylint: disable=unused-variable TABLE_STATES, MetaData(), *[alter["old_fk"] for alter in alters]) for alter in alters: try: engine.execute(DropConstraint(alter["old_fk"])) for fkc in states_key_constraints: if fkc.column_keys == alter["columns"]: engine.execute(AddConstraint(fkc)) except (InternalError, OperationalError): _LOGGER.exception("Could not update foreign options in %s table", TABLE_STATES)
def test_conditional_constraint_deprecated(self): metadata, users = self.metadata, self.users nonpg_mock = engines.mock_engine(dialect_name="sqlite") pg_mock = engines.mock_engine(dialect_name="postgresql") constraint = CheckConstraint("a < b", name="my_test_constraint", table=users) # by placing the constraint in an Add/Drop construct, the # 'inline_ddl' flag is set to False AddConstraint(constraint, on="postgresql").execute_at("after-create", users) DropConstraint(constraint, on="postgresql").execute_at("before-drop", users) metadata.create_all(bind=nonpg_mock) strings = " ".join(str(x) for x in nonpg_mock.mock) assert "my_test_constraint" not in strings metadata.drop_all(bind=nonpg_mock) strings = " ".join(str(x) for x in nonpg_mock.mock) assert "my_test_constraint" not in strings metadata.create_all(bind=pg_mock) strings = " ".join(str(x) for x in pg_mock.mock) assert "my_test_constraint" in strings metadata.drop_all(bind=pg_mock) strings = " ".join(str(x) for x in pg_mock.mock) assert "my_test_constraint" in strings
def remove_foreign_keys(self, engine): inspector = reflection.Inspector.from_engine(engine) fake_metadata = MetaData() table = 'fact_wbulb_freez_alt' fake_tables = [] all_fks = [] for table_name in Base.metadata.tables: if table_name == table: fks = [] for fk in inspector.get_foreign_keys(table_name): if fk['name']: fks.append( ForeignKeyConstraint((), (), name=fk['name'])) t = Table(table_name, fake_metadata, *fks) fake_tables.append(t) all_fks.extend(fks) connection = engine.connect() transaction = connection.begin() for fkc in all_fks: print(fkc) connection.execute(DropConstraint(fkc)) transaction.commit()
def drop_all_table(): from sqlalchemy.engine import reflection from sqlalchemy.schema import MetaData, Table, DropTable, ForeignKeyConstraint, DropConstraint conn = db.engine.connect() trans = conn.begin() inspector = reflection.Inspector.from_engine(db.engine) metadata = MetaData() tbs = [] all_fks = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk['name']: continue fks.append(ForeignKeyConstraint((), (), name=fk['name'])) t = Table(table_name, metadata, *fks) tbs.append(t) all_fks.extend(fks) for fkc in all_fks: conn.execute(DropConstraint(fkc)) for table in tbs: conn.execute(DropTable(table)) trans.commit()
def drop_everything(): """Break all contraints and drop tables Credit to Michael Bayer https://bitbucket.org/zzzeek/sqlalchemy/wiki/UsageRecipes/DropEverything """ engine = create_engine(POSTGRES_URI) conn = engine.connect() trans = conn.begin() inspector = reflection.Inspector.from_engine(engine) metadata = MetaData() tbs = [] all_fks = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk['name']: continue fks.append(ForeignKeyConstraint((), (), name=fk['name'])) t = Table(table_name, metadata, *fks) tbs.append(t) all_fks.extend(fks) for fkc in all_fks: conn.execute(DropConstraint(fkc)) for table in tbs: conn.execute(DropTable(table)) trans.commit()
def drop_tables(engine): """ Drop all the tables in the database attached to by the supplied engine. As many foreign key constraints as possible will be dropped first making this quite brutal! """ # from http://www.sqlalchemy.org/trac/wiki/UsageRecipes/DropEverything conn = engine.connect() inspector = Inspector.from_engine(engine) # gather all data first before dropping anything. # some DBs lock after things have been dropped in # a transaction. metadata = MetaData() tbs = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk['name']: continue fks.append( ForeignKeyConstraint((),(),name=fk['name']) ) t = Table(table_name, metadata,*fks) tbs.append(t) for fkc in fks: conn.execute(DropConstraint(fkc, cascade=True)) for table in tbs: conn.execute(DropTable(table))
def drop_tables(self): '''DROP all tables except those for PostGIS.''' # gather all data first before dropping anything. # some DBs lock after things have been dropped in # a transaction. log.debug('drop_tables') metadata = MetaData() tables = [] all_foreign_keys = [] for table_name in self.inspector.get_table_names(): foreign_keys = [] for foreign_key in self.inspector.get_foreign_keys(table_name): if not foreign_key['name']: continue foreign_keys.append( ForeignKeyConstraint((), (), name=foreign_key['name']) ) table = Table(table_name, metadata, *foreign_keys) tables.append(table) all_foreign_keys.extend(foreign_keys) for foreign_key in all_foreign_keys: self.conn.execute(DropConstraint(foreign_key)) for table in tables: # This table is part of PostGIS extension. if table.name == 'spatial_ref_sys': continue self.conn.execute(DropTable(table)) self.trans.commit()
def drop_all_tables(engine, inspector, schema=None, include_names=None): from sqlalchemy import Column, Table, Integer, MetaData, \ ForeignKeyConstraint from sqlalchemy.schema import DropTable, DropConstraint if include_names is not None: include_names = set(include_names) with engine.connect() as conn: for tname, fkcs in reversed( inspector.get_sorted_table_and_fkc_names(schema=schema)): if tname: if include_names is not None and tname not in include_names: continue conn.execute(DropTable( Table(tname, MetaData(), schema=schema) )) elif fkcs: if not engine.dialect.supports_alter: continue for tname, fkc in fkcs: if include_names is not None and \ tname not in include_names: continue tb = Table( tname, MetaData(), Column('x', Integer), Column('y', Integer), schema=schema ) conn.execute(DropConstraint( ForeignKeyConstraint( [tb.c.x], [tb.c.y], name=fkc) ))
def _update_states_table_with_foreign_key_options(connection, engine): """Add the options to foreign key constraints.""" inspector = sqlalchemy.inspect(engine) alters = [] for foreign_key in inspector.get_foreign_keys(TABLE_STATES): if foreign_key["name"] and ( # MySQL/MariaDB will have empty options not foreign_key.get("options") or # Postgres will have ondelete set to None foreign_key.get("options", {}).get("ondelete") is None): alters.append({ "old_fk": ForeignKeyConstraint((), (), name=foreign_key["name"]), "columns": foreign_key["constrained_columns"], }) if not alters: return states_key_constraints = Base.metadata.tables[ TABLE_STATES].foreign_key_constraints old_states_table = Table( # noqa: F841 pylint: disable=unused-variable TABLE_STATES, MetaData(), *(alter["old_fk"] for alter in alters)) for alter in alters: try: connection.execute(DropConstraint(alter["old_fk"])) for fkc in states_key_constraints: if fkc.column_keys == alter["columns"]: connection.execute(AddConstraint(fkc)) except (InternalError, OperationalError): _LOGGER.exception("Could not update foreign options in %s table", TABLE_STATES)
def setupPackage(): os.environ['MONGO_URI'] = 'mongodb://localhost' os.environ['MONGO_DB_NAME'] = 'royal_example' os.environ['MONGO_DB_PREFIX'] = '' # sqla extentsion setup. global engine alembic_config = Config() alembic_config.set_main_option('script_location', 'example/ext/sqla/db') alembic_config.set_main_option('sqlalchemy.url', mysql_uri) engine = create_engine(mysql_uri) try: command.downgrade(alembic_config, 'base') except: log.exception("Migration downgrade failed, clearing all tables") metadata = MetaData(engine) metadata.reflect() for table in metadata.tables.values(): for fk in table.foreign_keys: engine.execute(DropConstraint(fk.constraint)) metadata.drop_all() command.upgrade(alembic_config, 'head')
def db(app, request): """ Returns session-wide initialised database. Drops all existing tables - Meta follows Postgres FKs """ with app.app_context(): # Clear out any existing tables metadata = MetaData(_db.engine) metadata.reflect() for table in metadata.tables.values(): for fk in table.foreign_keys: _db.engine.execute(DropConstraint(fk.constraint)) metadata.drop_all() _db.drop_all() # ############################################ # There are 2 approaches, an empty database, or the same one that the app will use # create the tables # _db.create_all() # or # Use Alembic to load all of the DB revisions including supporting lookup data # This is the path we'll use in NAMEX!! # even though this isn't referenced directly, it sets up the internal configs that upgrade needs migrate = Migrate(app, _db) upgrade()
def db_drop_everything(self, engine): """ From http://www.sqlalchemy.org/trac/wiki/UsageRecipes/DropEverything """ try: conn = engine.connect() transactional = conn.begin() inspector = reflection.Inspector.from_engine(engine) metadata = MetaData() tables = [] all_foreign_keys = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk["name"]: continue fks.append(ForeignKeyConstraint((), (), name=fk["name"])) t = Table(table_name, metadata, *fks) tables.append(t) all_foreign_keys.extend(fks) for foreignkey in all_foreign_keys: conn.execute(DropConstraint(foreignkey)) for table in tables: conn.execute(DropTable(table)) transactional.commit() except Exception as err: print(err) return False finally: conn.close()
def destory_everything(echo=True): """ Using metadata.reflect() to get all constraints and tables. metadata.drop_all() as it handles cyclical constraints between tables. Ref. http://www.sqlalchemy.org/trac/wiki/UsageRecipes/DropEverything """ engine = session.get_engine(echo=echo) conn = engine.connect() # the transaction only applies if the DB supports # transactional DDL, i.e. Postgresql, MS SQL Server trans = conn.begin() inspector = reflection.Inspector.from_engine(engine) # gather all data first before dropping anything. # some DBs lock after things have been dropped in # a transaction. metadata = MetaData() tbs = [] all_fks = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk['name']: continue fks.append(ForeignKeyConstraint((), (), name=fk['name'])) t = Table(table_name, metadata, *fks) tbs.append(t) all_fks.extend(fks) for fkc in all_fks: try: print(str(DropConstraint(fkc)) + ';') conn.execute(DropConstraint(fkc)) except: # noqa: B901 print(traceback.format_exc()) for table in tbs: try: print(str(DropTable(table)).strip() + ';') conn.execute(DropTable(table)) except: # noqa: B901 print(traceback.format_exc()) trans.commit()
def db(app): # pylint: disable=redefined-outer-name, invalid-name """Return a session-wide initialised database. Drops all existing tables - Meta follows Postgres FKs """ with app.app_context(): # Clear out any existing tables metadata = MetaData(_db.engine) metadata.reflect() for table in metadata.tables.values(): for fk in table.foreign_keys: # pylint: disable=invalid-name _db.engine.execute(DropConstraint(fk.constraint)) metadata.drop_all() _db.drop_all() sequence_sql = """SELECT sequence_name FROM information_schema.sequences WHERE sequence_schema='public' """ sess = _db.session() for seq in [name for (name, ) in sess.execute(text(sequence_sql))]: try: sess.execute(text('DROP SEQUENCE public.%s ;' % seq)) print('DROP SEQUENCE public.%s ' % seq) except Exception as err: # pylint: disable=broad-except print(f'Error: {err}') sess.commit() # ############################################ # There are 2 approaches, an empty database, or the same one that the app will use # create the tables # _db.create_all() # or # Use Alembic to load all of the DB revisions including supporting lookup data # This is the path we'll use in selfservice_api!! # even though this isn't referenced directly, # it sets up the internal configs that upgrade needs Migrate(app, _db) upgrade() admin_user = { 'sub': '65a62428-6713-4e7d-8f12-99e56de58386', 'given_name': 'admin', 'family_name': 'ss', 'email': '*****@*****.**' } sess.execute( text( 'INSERT INTO public."user"(' + 'created, modified, email, phone, first_name, last_name, oauth_id)' + "VALUES (now(), null, '" + admin_user['email'] + "', " + "'123456789', '" + admin_user['given_name'] + "', " + " '" + admin_user['family_name'] + "', '" + admin_user['sub'] + "'); ")) sess.commit() return _db