def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_index(op.f('ix_zoom_meetings_url_zoom'), table_name='zoom_meetings', schema='plugin_vc_zoom') op.drop_index(op.f('ix_zoom_meetings_owned_by_id'), table_name='zoom_meetings', schema='plugin_vc_zoom') op.drop_index(op.f('ix_zoom_meetings_meeting'), table_name='zoom_meetings', schema='plugin_vc_zoom') op.drop_table('zoom_meetings', schema='plugin_vc_zoom') op.execute(DropSchema('plugin_vc_zoom'))
def downgrade(): op.drop_constraint('fk_contributions_abstract_id_abstracts', 'contributions', schema='events') op.drop_table('abstract_field_values', schema='event_abstracts') op.drop_table('abstracts', schema='event_abstracts') op.execute(DropSchema('event_abstracts'))
def delete_all_tables(db): """Drop all tables in the database.""" conn = db.engine.connect() transaction = conn.begin() inspector = inspect(db.engine) metadata = MetaData() all_schema_tables = get_all_tables(db) tables = [] all_fkeys = [] for schema, schema_tables in all_schema_tables.items(): for table_name in schema_tables: fkeys = [ForeignKeyConstraint((), (), name=fk['name']) for fk in inspector.get_foreign_keys(table_name, schema=schema) if fk['name']] tables.append(Table(table_name, metadata, *fkeys, schema=schema)) all_fkeys.extend(fkeys) for fkey in all_fkeys: conn.execute(DropConstraint(fkey)) for table in tables: conn.execute(DropTable(table)) for schema in all_schema_tables: if schema != 'public': row = conn.execute(''' SELECT 'DROP FUNCTION ' || ns.nspname || '.' || proname || '(' || oidvectortypes(proargtypes) || ')' FROM pg_proc INNER JOIN pg_namespace ns ON (pg_proc.pronamespace = ns.oid) WHERE ns.nspname = '{}' order by proname; '''.format(schema)) for stmt, in row: conn.execute(stmt) conn.execute(DropSchema(schema)) transaction.commit()
def delete_all_tables(db): """Drops all tables in the database""" conn = db.engine.connect() transaction = conn.begin() inspector = Inspector.from_engine(db.engine) metadata = MetaData() all_schema_tables = get_all_tables(db) tables = [] all_fkeys = [] for schema, schema_tables in all_schema_tables.iteritems(): for table_name in schema_tables: fkeys = [ForeignKeyConstraint((), (), name=fk['name']) for fk in inspector.get_foreign_keys(table_name, schema=schema) if fk['name']] tables.append(Table(table_name, metadata, *fkeys, schema=schema)) all_fkeys.extend(fkeys) for fkey in all_fkeys: conn.execute(DropConstraint(fkey)) for table in tables: conn.execute(DropTable(table)) for schema in all_schema_tables: if schema != 'public': conn.execute(DropSchema(schema)) transaction.commit()
def downgrade(): op.drop_constraint('fk_attachments_file_id_files', 'attachments', schema='attachments') op.drop_table('attachment_principals', schema='attachments') op.drop_table('folder_principals', schema='attachments') op.drop_table('files', schema='attachments') op.drop_table('attachments', schema='attachments') op.drop_table('folders', schema='attachments') op.execute(DropSchema('attachments'))
def test_dataset(engine): dataset = "".join(random.choices(string.ascii_lowercase, k=10)) with engine.connect() as conn: logging.info(f"Creating dataset {dataset}") conn.execute(CreateSchema(dataset)) yield dataset logging.info(f"Dropping dataset {dataset}") conn.execute(DropSchema(dataset))
def downgrade(): # ### commands auto generated by Alembic ### op.drop_index(op.f('ix_checkin_webhook_settings_ticket_template_id'), table_name='checkin_webhook_settings', schema='plugin_checkin_webhook') op.drop_index(op.f('ix_checkin_webhook_settings_event_id'), table_name='checkin_webhook_settings', schema='plugin_checkin_webhook') op.drop_table('checkin_webhook_settings', schema='plugin_checkin_webhook') # ### end Alembic commands ### op.execute(DropSchema('plugin_checkin_webhook'))
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_index(op.f('ix_zoom_licenses_license_name'), table_name='zoom_licenses', schema='plugin_vc_zoom') op.drop_index(op.f('ix_uq_zoom_licenses_license_id'), table_name='zoom_licenses', schema='plugin_vc_zoom') op.drop_table('zoom_licenses', schema='plugin_vc_zoom') op.drop_index(op.f('ix_zoom_extensions_url_zoom'), table_name='zoom_extensions', schema='plugin_vc_zoom') op.drop_table('zoom_extensions', schema='plugin_vc_zoom') # ### end Alembic commands ### op.execute(DropSchema('plugin_vc_zoom'))
def connection(sync_engine): with sync_engine.connect() as conn: metadata = MetaData() Table("table", metadata, Column("column1", Integer, primary_key=True)) Table("table2", metadata, Column("fk_column", ForeignKey("table.column1"))) if conn.dialect.name != "sqlite": conn.execute(CreateSchema("altschema")) Table("table3", metadata, Column("fk_column", Integer), schema="altschema") metadata.create_all(conn) yield conn if conn.dialect.name != "sqlite": metadata.drop_all(conn) conn.execute(DropSchema("altschema"))
def destroy_everything(echo=True): """ Using metadata.reflect() to get all constraints and tables. metadata.drop_all() as it handles cyclical constraints between tables. Ref. http://www.sqlalchemy.org/trac/wiki/UsageRecipes/DropEverything """ engine = session.get_engine(echo=echo) try: # the transaction only applies if the DB supports # transactional DDL, i.e. Postgresql, MS SQL Server with engine.begin() as conn: inspector = inspect(conn) # type: Union[Inspector, PGInspector] for tname, fkcs in reversed( inspector.get_sorted_table_and_fkc_names(schema='*')): if tname: drop_table_stmt = DropTable( Table(tname, MetaData(), schema='*')) conn.execute(drop_table_stmt) elif fkcs: if not engine.dialect.supports_alter: continue for tname, fkc in fkcs: fk_constraint = ForeignKeyConstraint((), (), name=fkc) Table(tname, MetaData(), fk_constraint) drop_constraint_stmt = DropConstraint(fk_constraint) conn.execute(drop_constraint_stmt) if config_has_option('database', 'schema'): schema = config_get('database', 'schema') if schema: conn.execute(DropSchema(schema, cascade=True)) if engine.dialect.name == 'postgresql': assert isinstance(inspector, PGInspector), 'expected a PGInspector' for enum in inspector.get_enums(schema='*'): sqlalchemy.Enum(**enum).drop(bind=conn) except Exception as e: print('Cannot destroy db:', e) print(traceback.format_exc())
def database(app, request): db.app = app db.init_app(app) db.drop_all() try: db.engine.execute(DropSchema('tests', cascade=True)) except ProgrammingError: pass db.engine.execute(CreateSchema('tests')) db.create_all() @request.addfinalizer def drop_database(): db.drop_all() db.engine.execute(DropSchema('tests', cascade=True)) return db
def drop_everything(): """ Pre-gather all named constraints and table names, and drop everything. This is better than using metadata.reflect(); metadata.drop_all() as it handles cyclical constraints between tables. Ref. https://github.com/sqlalchemy/sqlalchemy/wiki/DropEverything """ engine = get_engine() # the transaction only applies if the DB supports # transactional DDL, i.e. Postgresql, MS SQL Server with engine.begin() as conn: inspector = inspect(conn) # type: Union[Inspector, PGInspector] for tname, fkcs in reversed( inspector.get_sorted_table_and_fkc_names(schema='*')): if tname: drop_table_stmt = DropTable( Table(tname, MetaData(), schema='*')) conn.execute(drop_table_stmt) elif fkcs: if not engine.dialect.supports_alter: continue for tname, fkc in fkcs: fk_constraint = ForeignKeyConstraint((), (), name=fkc) Table(tname, MetaData(), fk_constraint) drop_constraint_stmt = DropConstraint(fk_constraint) conn.execute(drop_constraint_stmt) schema = config_get('database', 'schema', raise_exception=False) if schema: conn.execute(DropSchema(schema, cascade=True)) if engine.dialect.name == 'postgresql': assert isinstance(inspector, PGInspector), 'expected a PGInspector' for enum in inspector.get_enums(schema='*'): sqlalchemy.Enum(**enum).drop(bind=conn)
def engine(request, tmpdir_factory): engine = None if request.param == 'sqlite-file': tmpdir = tmpdir_factory.mktemp('asphalt-sqlalchemy') db_path = str(tmpdir.join('test.db')) engine = create_engine('sqlite:///' + db_path) elif request.param == 'sqlite-memory': engine = create_engine('sqlite:///:memory:') elif request.param == 'mysql': url = request.getfixturevalue('mysql_url') engine = create_engine(url) elif request.param == 'postgresql': url = request.getfixturevalue('postgresql_url') engine = create_engine(url) if engine.dialect.name != 'sqlite': engine.execute(CreateSchema('altschema')) if request.param != 'sqlite-memory': metadata = MetaData() Table('table', metadata, Column('column1', Integer, primary_key=True)) Table('table2', metadata, Column('fk_column', ForeignKey('table.column1'))) if engine.dialect.name != 'sqlite': Table('table3', metadata, Column('fk_column', Integer), schema='altschema') metadata.create_all(engine) yield engine if engine.dialect.name != 'sqlite': metadata.drop_all(engine) engine.execute(DropSchema('altschema'))
def downgrade(): op.drop_table('answers', schema='event_surveys') op.drop_table('submissions', schema='event_surveys') op.drop_table('items', schema='event_surveys') op.drop_table('surveys', schema='event_surveys') op.execute(DropSchema('event_surveys'))
def downgrade(): op.drop_table('paper_files', schema='event_paper_reviewing') op.drop_table('contribution_roles', schema='event_paper_reviewing') op.execute(DropSchema('event_paper_reviewing'))
from flaskdb.database import db app = Flask(__name__) @app.route('/') def new_user(): db.session.add(User(username='******')) db.session.commit() return 'New user created!!' if __name__ == '__main__': DATABASE_URI = os.getenv('DATABASE_URI') assert DATABASE_URI, 'Environment variable "DATABASE_URI" required.' app.config['SQLALCHEMY_DATABASE_URI'] = DATABASE_URI app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False db.app = app db.init_app(app) try: db.engine.execute(DropSchema('tests', cascade=True)) except ProgrammingError: pass db.engine.execute(CreateSchema('tests')) db.create_all() app.run()
def downgrade(): op.drop_table('queues', schema='plugin_livesync') op.drop_table('agents', schema='plugin_livesync') op.execute(DropSchema('plugin_livesync'))
def downgrade(): op.drop_table('tokens', schema='oauth') op.drop_table('applications', schema='oauth') op.execute(DropSchema('oauth'))
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### pass # ### end Alembic commands ### op.execute(DropSchema('plugin_print_checkin'))
def downgrade(): op.drop_table('access_request_regforms', schema='plugin_cern_access') op.drop_table('access_requests', schema='plugin_cern_access') op.execute(DropSchema('plugin_cern_access'))
def downgrade(): op.drop_table('legacy_id_map', schema='categories') op.execute('ALTER TABLE categories.category_index SET SCHEMA indico') op.execute(DropSchema('categories'))
def downgrade(): op.drop_table('file_types', schema='event_editing') op.execute(DropSchema('event_editing'))
def downgrade(): op.drop_table('foo', schema='plugin_example') op.execute(DropSchema('plugin_example'))
def downgrade(): op.drop_table('id_map', schema='plugin_citadel') op.execute(DropSchema('plugin_citadel'))
def drop(self, cascade=False): """ Drop the schema """ with cnx(self.migration) as conn: conn.execute(DropSchema(self.name, cascade=cascade))
def downgrade(): op.drop_table('chatroom_events', schema='plugin_chat') op.drop_table('chatrooms', schema='plugin_chat') op.execute(DropSchema('plugin_chat'))
def drop_database(): db.drop_all() db.engine.execute(DropSchema('tests', cascade=True))
def downgrade(): op.drop_table('bluejeans_extensions', schema='plugin_vc_bluejeans') op.execute(DropSchema('plugin_vc_bluejeans'))
def downgrade(): op.drop_table('vidyo_extensions', schema='plugin_vc_vidyo') op.execute(DropSchema('plugin_vc_vidyo'))
def downgrade(): op.drop_table('emails', schema='users') op.drop_table('users', schema='users') op.execute(DropSchema('users'))