def upgrade(migrate_engine): # Upgrade operations go here. Don't create your own engine; bind # migrate_engine to your metadata meta = MetaData(bind=migrate_engine) mt = Table('meas_MeasurementTable', meta, autoload=True) et = Table('meas_ExtractionTable', meta, autoload=True) mt.c.script_blob.drop() mt.c.hash.drop() mt.c.script_name.drop() et.c.script_blob.drop() et.c.hash.drop() et.c.script_name.drop() c = Column('script_id', Integer) c.create(mt) c = Column('script_id', Integer) c.create(et) st = Table('meas_ScriptTable', meta, Column('id', Integer, primary_key=True), Column('name', String(80)), Column('hash', String(32)), Column('blob', BLOB) ) st.create() fk = ForeignKeyConstraint([mt.c.script_id], [st.c.id]) fk.create() fk = ForeignKeyConstraint([et.c.script_id], [st.c.id]) fk.create()
def upgrade(migrate_engine): # Upgrade operations go here. Don't create your own engine; bind # migrate_engine to your metadata debug = False migrate_engine.echo=debug meta.bind = migrate_engine real_meta = MetaData() real_meta.bind = migrate_engine real_meta.reflect() rt = real_meta.tables['authentication_userrole'] for ctraint in deepcopy(rt.foreign_keys): if 'fk_userrole_user' in ctraint.name: column = ctraint.column parent = ctraint.parent fk = ForeignKeyConstraint([parent], [column], **{'table': rt}) fk.name = ctraint.name fk.drop() fkp = [a for a in UserRole.__table__.foreign_keys if a.name == 'fk_userrole_users'][0] fk = ForeignKeyConstraint([fkp.parent], [fkp.column], **{'table': fkp.parent.table}) fk.name = fkp.name fk.use_alter = fkp.use_alter fk.ondelete = fkp.ondelete fk.onupdate = fkp.onupdate fk.create() new_meta = MetaData(bind=migrate_engine) new_meta.reflect()
def upgrade(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine builders = sautils.Table('builders', metadata, autoload=True) masters = sautils.Table('masters', metadata, autoload=True) workers = sautils.Table('workers', metadata, autoload=True) builder_masters = sautils.Table('builder_masters', metadata, autoload=True) configured_workers = sautils.Table('configured_workers', metadata, autoload=True) fks_to_change = [] # we need to parse the reflected model in order to find the automatic fk name that was put # mysql and pgsql have different naming convention so this is not very easy to have generic code working. for table, keys in [(builder_masters, (builders.c.id, masters.c.id)), (configured_workers, (builder_masters.c.id, workers.c.id))]: for fk in table.constraints: if not isinstance(fk, sa.ForeignKeyConstraint): continue for c in fk.elements: if c.column in keys: # migrate.xx.ForeignKeyConstraint is changing the model so initializing here # would break the iteration (Set changed size during iteration) fks_to_change.append(( table, (fk.columns, [c.column]), dict(name=fk.name, ondelete='CASCADE'))) for table, args, kwargs in fks_to_change: fk = ForeignKeyConstraint(*args, **kwargs) table.append_constraint(fk) try: fk.drop() except NotSupportedError: pass # some versions of sqlite do not support drop, but will still update the fk fk.create()
def upgrade(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine builders = sautils.Table('builders', metadata, autoload=True) masters = sautils.Table('masters', metadata, autoload=True) workers = sautils.Table('workers', metadata, autoload=True) builder_masters = sautils.Table('builder_masters', metadata, autoload=True) configured_workers = sautils.Table('configured_workers', metadata, autoload=True) fks_to_change = [] # we need to parse the reflected model in order to find the automatic fk name that was put # mysql and pgsql have different naming convention so this is not very easy to have generic code working. for table, keys in [(builder_masters, (builders.c.id, masters.c.id)), (configured_workers, (builder_masters.c.id, workers.c.id))]: for fk in table.constraints: if not isinstance(fk, sa.ForeignKeyConstraint): continue for c in fk.elements: if c.column in keys: # migrate.xx.ForeignKeyConstraint is changing the model so initializing here # would break the iteration (Set changed size during iteration) fks_to_change.append(( table, (fk.columns, [c.column]), dict(name=fk.name, ondelete='CASCADE'))) for table, args, kwargs in fks_to_change: fk = ForeignKeyConstraint(*args, **kwargs) table.append_constraint(fk) try: fk.drop() except NotSupportedError: pass # some versions of sqlite do not support drop, but will still update the fk fk.create()
def upgrade(migrate_engine): class AuthUserLog(Base): """ event: L - Login R - Register P - Password F - Forgot """ __tablename__ = 'auth_user_log' __table_args__ = {"sqlite_autoincrement": True} id = Column(Integer, primary_key=True) user_id = Column(Integer, ForeignKey("auth_users.id", onupdate='CASCADE', ondelete='CASCADE'), index=True) time = Column(DateTime(), default=func.now()) ip_addr = Column(Unicode(39), nullable=False) internal_user = Column(Boolean, nullable=False, default=False) external_user = Column(Boolean, nullable=False, default=False) event = Column(Enum(u'L',u'R',u'P',u'F', name=u"event"), default=u'L') recreate_constraints = [AuthUserLog] # Upgrade operations go here. Don't create your own engine; bind # migrate_engine to your metadata debug = True session.configure(bind=migrate_engine) migrate_engine.echo=debug metadata.bind = migrate_engine metadata.reflect(only=['auth_users']) r_meta = s.MetaData(migrate_engine, True) def commit(): session.commit() r_meta.bind.execute ('COMMIT;') metadata.bind.execute('COMMIT;') # create constraints fks = [] for md in recreate_constraints: t = md.__table__ rt = r_meta.tables[t.name] rt_constraints = [a for a in rt.foreign_keys] for cs in deepcopy(t.foreign_keys): if cs.__class__.__name__ == 'ForeignKey': table, column = cs.target_fullname.split('.') target = [r_meta.tables[table].c[column]] parent = [r_meta.tables[cs.parent.table.name].c[cs.parent.name]] fk = ForeignKeyConstraint(columns=parent,refcolumns=target) fk.use_alter = cs.use_alter fk.ondelete = 'CASCADE' fk.onupdate = 'CASCADE' fk.name = cs.name fks.append(fk) if (cs.name in [a.name for a in rt_constraints] or (cs.target_fullname in [a.target_fullname for a in rt_constraints])): fk.drop(migrate_engine) commit() for fk in fks: fk.create(migrate_engine) commit()
def upgrade(migrate_engine): meta.bind = migrate_engine user = Table("user", meta, autoload=True) site = Table("site", meta, autoload=True) fk_site = ForeignKeyConstraint([user.c.site_id], [site.c.id]) fk_site.create()
def upgrade(migrate_engine): """Add missing foreign key constraint on pci_devices.compute_node_id.""" meta = MetaData(bind=migrate_engine) pci_devices = Table("pci_devices", meta, autoload=True) compute_nodes = Table("compute_nodes", meta, autoload=True) fkey = ForeignKeyConstraint(columns=[pci_devices.c.compute_node_id], refcolumns=[compute_nodes.c.id]) fkey.create()
def upgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine jobs = Table('jobs', meta, autoload=True) job_metadata = Table('job_metadata', meta, autoload=True) if not job_metadata.foreign_keys: cons = ForeignKeyConstraint([job_metadata.c.job_id], [jobs.c.id]) cons.create()
def upgrade(migrate_engine): """Add missing foreign key constraint on pci_devices.compute_node_id.""" meta = MetaData(bind=migrate_engine) pci_devices = Table('pci_devices', meta, autoload=True) compute_nodes = Table('compute_nodes', meta, autoload=True) fkey = ForeignKeyConstraint(columns=[pci_devices.c.compute_node_id], refcolumns=[compute_nodes.c.id]) fkey.create()
def upgrade(migrate_engine): meta.bind = migrate_engine job_queue = Table('spider_execution_queue', meta, autoload=True) job_queue_slot = Column('slot', Integer, default=1) job_queue_slot.create(job_queue) spider_settings.create() spiders = Table('spiders', meta, autoload=True) spider_settings_spider_id_fk = ForeignKeyConstraint( [spider_settings.c.spider_id], [spiders.c.id]) spider_settings_spider_id_fk.create()
def upgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine schedules = Table('schedules', meta, autoload=True) schedule_metadata = Table('schedule_metadata', meta, autoload=True) if not schedule_metadata.foreign_keys: cons = ForeignKeyConstraint([schedule_metadata.c.schedule_id], [schedules.c.id]) cons.create()
def upgrade(migrate_engine): # Upgrade operations go here. Don't create your own engine; bind # migrate_engine to your metadata debug = False migrate_engine.echo=debug meta.bind = migrate_engine real_meta = MetaData() real_meta.bind = migrate_engine real_meta.reflect() # finally i decided to go to a separate permission table if 'authentication_permission' not in real_meta.tables: permission.create() for acl, item in ((aclusers, 'users'), (aclprojects, 'projects')): rt = real_meta.tables[acl.name] for ctraint in deepcopy(rt.foreign_keys): if ('perm' in ctraint.name) or ('perm' in ctraint.parent.name): column = ctraint.column parent = ctraint.parent fk = ForeignKeyConstraint([parent], [column], **{'table': rt}) fk.name = ctraint.name fk.drop() if 'permission' in rt.c: if len(rt.c["permission"].foreign_keys) > 0: rt.c["permission"].drop() if 'permission' in rt.c: ctype = rt.c['permission'].type.__class__.__name__ drop = False if 'CHAR' in ctype: drop = True if 'INTEGER' in ctype: drop = True if drop: rt.c["permission"].drop() if not ('permission' in rt.c): acl.c["permission"].create() # refresh metA fkp = {"users":ForeignKey("authentication_permission.id", name="fk_userssacl_permission", use_alter=True, ondelete="CASCADE", onupdate="CASCADE"), "projects":ForeignKey("authentication_permission.id", name="fk_projectsacl_permission", use_alter=True, ondelete="CASCADE", onupdate="CASCADE"), }.get(item) fk = ForeignKeyConstraint([acl.c.permission], [permission.c.id], **{'table': acl}) fk.name = fkp.name fk.use_alter = fkp.use_alter fk.ondelete = fkp.ondelete fk.onupdate = fkp.onupdate fk.create() new_meta = MetaData(bind=migrate_engine) new_meta.reflect() nt = new_meta.tables[acl.name] columns = [] if 'project' in item: columns.append(nt.c['rid']) columns.extend([nt.c['role'], nt.c['permission']]) pk = PrimaryKeyConstraint(*columns) pk.create()
def upgrade(migrate_engine): meta.bind = migrate_engine account = Table('account', meta, autoload=True) rooms = Column('rooms', Integer) room.create() rooms.create(account) cons = ForeignKeyConstraint([account.c.rooms], [room.c.id]) cons.create()
def upgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine schedules = Table('schedules', meta, autoload=True) schedule_metadata = Table('schedule_metadata', meta, autoload=True) if not schedule_metadata.foreign_keys: cons = ForeignKeyConstraint([schedule_metadata.c.schedule_id], [schedules.c.id]) cons.create()
def create_foreign_key_constraints(constraint_names, columns, ref_columns): """Create the foreign key constraints that match the given criteria. :param constraint_names: List of foreign key constraint names :param columns: List of the foreign key columns. :param ref_columns: List of the referenced columns. """ for constraint_name in constraint_names: fkey_constraint = ForeignKeyConstraint(columns=columns, refcolumns=ref_columns, name=constraint_name) fkey_constraint.create()
def upgrade(migrate_engine): meta.bind = migrate_engine upload = Table("upload", meta, autoload=True) study = Table("study", meta, autoload=True) user = Table("user", meta, autoload=True) fk_study = ForeignKeyConstraint([upload.c.study_id], [study.c.id]) fk_study.create() fk_user = ForeignKeyConstraint([upload.c.uploader_id], [user.c.id]) fk_user.create()
def upgrade(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine table_names = set(TABLES_FKEYS_SET_NULL.keys()) table_names.update(TABLES_COLUMNS_NOT_NULL.keys()) tables = {} for t in table_names: tables[t] = sautils.Table(t, metadata, autoload=True) fks_to_change = [] # We need to parse the reflected model in order to find the automatic # fk name that was put. # Mysql and postgres have different naming convention so this is not very # easy to have generic code working. for t, keys in TABLES_FKEYS_SET_NULL.items(): table = tables[t] for fk in table.constraints: if not isinstance(fk, sa.ForeignKeyConstraint): continue for c in fk.elements: if str(c.column) in keys: # migrate.xx.ForeignKeyConstraint is changing the model # so initializing here would break the iteration # (Set changed size during iteration) fks_to_change.append((table, (fk.columns, [c.column]), dict(name=fk.name, ondelete='SET NULL'))) for table, args, kwargs in fks_to_change: fk = ForeignKeyConstraint(*args, **kwargs) table.append_constraint(fk) try: fk.drop() except NotSupportedError: # some versions of sqlite do not support drop, # but will still update the fk pass fk.create() for t, cols in TABLES_COLUMNS_NOT_NULL.items(): table = tables[t] if table.dialect_options.get('mysql', {}).get('engine') == 'InnoDB': migrate_engine.execute('SET FOREIGN_KEY_CHECKS = 0;') try: for c in table.columns: if c.name in cols: c.alter(nullable=False) finally: if table.dialect_options.get('mysql', {}).get('engine') == 'InnoDB': migrate_engine.execute('SET FOREIGN_KEY_CHECKS = 1;')
def upgrade(migrate_engine): meta.bind = migrate_engine roles_users = Table("roles_users", meta, autoload=True) role = Table("role", meta, autoload=True) user = Table("user", meta, autoload=True) fk_role = ForeignKeyConstraint([roles_users.c.role_id], [role.c.id]) fk_role.create() fk_user = ForeignKeyConstraint([roles_users.c.user_id], [user.c.id]) fk_user.create()
def upgrade(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine table_names = set(TABLES_FKEYS_SET_NULL.keys()) table_names.update(TABLES_COLUMNS_NOT_NULL.keys()) tables = {} for t in table_names: tables[t] = sautils.Table(t, metadata, autoload=True) fks_to_change = [] # We need to parse the reflected model in order to find the automatic # fk name that was put. # Mysql and postgres have different naming convention so this is not very # easy to have generic code working. for t, keys in TABLES_FKEYS_SET_NULL.items(): table = tables[t] for fk in table.constraints: if not isinstance(fk, sa.ForeignKeyConstraint): continue for c in fk.elements: if str(c.column) in keys: # migrate.xx.ForeignKeyConstraint is changing the model # so initializing here would break the iteration # (Set changed size during iteration) fks_to_change.append(( table, (fk.columns, [c.column]), dict(name=fk.name, ondelete='SET NULL'))) for table, args, kwargs in fks_to_change: fk = ForeignKeyConstraint(*args, **kwargs) table.append_constraint(fk) try: fk.drop() except NotSupportedError: # some versions of sqlite do not support drop, # but will still update the fk pass fk.create() for t, cols in TABLES_COLUMNS_NOT_NULL.items(): table = tables[t] if table.dialect_options.get('mysql', {}).get('engine') == 'InnoDB': migrate_engine.execute('SET FOREIGN_KEY_CHECKS = 0;') try: for c in table.columns: if c.name in cols: c.alter(nullable=False) finally: if table.dialect_options.get('mysql', {}).get('engine') == 'InnoDB': migrate_engine.execute('SET FOREIGN_KEY_CHECKS = 1;')
def create_foreign_key_constraints(constraint_names, columns, ref_columns): """Create the foreign key constraints that match the given criteria. :param constraint_names: List of foreign key constraint names :param columns: List of the foreign key columns. :param ref_columns: List of the referenced columns. """ for constraint_name in constraint_names: fkey_constraint = ForeignKeyConstraint(columns=columns, refcolumns=ref_columns, name=constraint_name) fkey_constraint.create()
def upgrade(migrate_engine): meta.bind = migrate_engine studies_collaborators = Table("studies_collaborators", meta, autoload=True) study = Table("study", meta, autoload=True) user = Table("user", meta, autoload=True) fk_study = ForeignKeyConstraint([studies_collaborators.c.study_id], [study.c.id]) fk_study.create() fk_user = ForeignKeyConstraint([studies_collaborators.c.user_id], [user.c.id]) fk_user.create()
def downgrade(migrate_engine): # Operations to reverse the above upgrade go here. meta = MetaData(bind=migrate_engine) mt = Table('meas_MeasurementTable', meta, autoload=True) sp = Table('meas_SpectrometerParametersTable', meta, autoload=True) c = Column('measurement_id', Integer) c.create(sp) fk = ForeignKeyConstraint([sp.c.measurement_id], [mt.c.id]) fk.create() fk = ForeignKeyConstraint([mt.c.spectrometer_parameters_id], [sp.c.id]) fk.drop() mt.c.spectrometer_parameters_id.drop() sp.c.hash.drop()
def upgrade(migrate_engine): meta.bind = migrate_engine field.create() field_type = Table("field_type", meta, autoload=True) study = Table("study", meta, autoload=True) fk_field_type = ForeignKeyConstraint([field.c.field_type_id], [field_type.c.id]) fk_field_type.create() fk_study = ForeignKeyConstraint([field.c.study_id], [study.c.id]) fk_study.create() uk_study_id_order = UniqueConstraint(field.c.study_id, field.c.order, table=field) uk_study_id_order.create()
def upgrade(migrate_engine): # Upgrade operations go here. Don't create your own engine; bind # migrate_engine to your metadata meta = MetaData(bind=migrate_engine) mt = Table('meas_MeasurementTable', meta, autoload=True) c = Column('spectrometer_parameters_id', Integer) c.create(mt) sp = Table('meas_SpectrometerParametersTable', meta, autoload=True) fk = ForeignKeyConstraint([mt.c.spectrometer_parameters_id], [sp.c.id]) fk.create() fk = ForeignKeyConstraint([sp.c.measurement_id], [mt.c.id]) fk.drop() sp.c.measurement_id.drop() c = Column('hash', String(32)) c.create(sp)
def upgrade(migrate_engine): # Upgrade operations go here. Don't create your own engine; bind # migrate_engine to your metadata meta = MetaData(bind=migrate_engine) t = Table('proc_FitTable', meta, autoload=True) try: t.c.isotope.drop() except: pass c = Column('isotope_id', Integer) try: c.create(t) except: pass tt = Table('meas_IsotopeTable', meta, autoload=True) cons = ForeignKeyConstraint([t.c.isotope_id], [tt.c.id]) cons.create()
def upgrade(migrate_engine): # Upgrade operations go here. Don't create your own engine; bind # migrate_engine to your metadata meta.bind = migrate_engine t.create() # tt = Table('meas_ExtractionTable', meta, autoload=True) c = Column('sensitivity_multiplier', Float) c.create(tt, populate_default=True) # c = Column('sensitivity_id', Integer) c.create(tt) # cons = ForeignKeyConstraint([tt.c.sensitivity_id], [t.c.id]) cons.create() # mst = Table('gen_MassSpectrometerTable', meta, autoload=True) cons = ForeignKeyConstraint([t.c.mass_spectrometer_id], [mst.c.id]) cons.create()
def upgrade(migrate_engine): meta.bind = migrate_engine domains_table = Table('domains', meta, autoload=True) domainmetadata_table = Table('domainmetadata', meta, autoload=True) records_table = Table('records', meta, autoload=True) records_fk = ForeignKeyConstraint([records_table.c.domain_id], [domains_table.c.id], ondelete="CASCADE") records_fk.create() domainmetadata_fk = ForeignKeyConstraint( [domainmetadata_table.c.domain_id], [domains_table.c.id], ondelete="CASCADE") domainmetadata_fk.create()
def upgrade(migrate_engine): meta.bind = migrate_engine domains_table = Table('domains', meta, autoload=True) domainmetadata_table = Table('domainmetadata', meta, autoload=True) records_table = Table('records', meta, autoload=True) records_fk = ForeignKeyConstraint( [records_table.c.domain_id], [domains_table.c.id], ondelete="CASCADE") records_fk.create() domainmetadata_fk = ForeignKeyConstraint( [domainmetadata_table.c.domain_id], [domains_table.c.id], ondelete="CASCADE") domainmetadata_fk.create()
def downgrade(migrate_engine): meta.bind = migrate_engine pool_attributes_table = Table('pool_attributes', meta, autoload=True) # pools = Table('pools', meta, autoload=True) constraint = UniqueConstraint('pool_id', 'key', 'value', name='unique_pool_attribute', table=pool_attributes_table) fk_constraint = ForeignKeyConstraint(columns=['pool_id'], refcolumns=['pools.id'], ondelete='CASCADE', table=pool_attributes_table) # First have to drop the ForeignKeyConstraint fk_constraint.drop() # Then drop the UniqueConstraint constraint.drop() # Then recreate the ForeignKeyConstraint fk_constraint.create()
def upgrade(migrate_engine): # Upgrade operations go here. Don't create your own engine; bind # migrate_engine to your metadata meta.bind = migrate_engine shelter_puppies = Table('shelter_puppies', meta, Column('id', INTEGER, primary_key=True, nullable=False), Column('shelter_id', INTEGER), Column('puppy_id', INTEGER) ) shelter_puppies.create() shelter = Table('shelter', meta, autoload=True) puppy = Table('puppy', meta, autoload=True) shelter_puppies = Table('shelter_puppies', meta, autoload=True) cons1 = ForeignKeyConstraint([shelter_puppies.c.shelter_id], [shelter.c.id]) cons1.create() cons2 = ForeignKeyConstraint([shelter_puppies.c.puppy_id], [puppy.c.id]) cons2.create() pass
def upgrade(migrate_engine): # Upgrade operations go here. Don't create your own engine; bind # migrate_engine to your metadata meta.bind = migrate_engine shelter_puppies = Table( 'shelter_puppies', meta, Column('id', INTEGER, primary_key=True, nullable=False), Column('shelter_id', INTEGER), Column('puppy_id', INTEGER)) shelter_puppies.create() shelter = Table('shelter', meta, autoload=True) puppy = Table('puppy', meta, autoload=True) shelter_puppies = Table('shelter_puppies', meta, autoload=True) cons1 = ForeignKeyConstraint([shelter_puppies.c.shelter_id], [shelter.c.id]) cons1.create() cons2 = ForeignKeyConstraint([shelter_puppies.c.puppy_id], [puppy.c.id]) cons2.create() pass
def recreate_table_fkeys(table, session,): """Recreate all foreign keys in the table or declarative object""" if not isinstance(table, Table): table = table.__table__ migrate_engine = session.bind metadata = table.metadata r_meta = s.MetaData(migrate_engine, True) def commit(): session.commit() r_meta.bind.execute ('COMMIT;') metadata.bind.execute('COMMIT;') fks = [] commit() t = table rt = r_meta.tables[t.name] rt_constraints = [a for a in rt.foreign_keys] for cs in deepcopy(t.foreign_keys): if cs.__class__.__name__ == 'ForeignKey': table, column = cs.target_fullname.split('.') target = [r_meta.tables[table].c[column]] parent_table = r_meta.tables[cs.parent.table.name] parent = [parent_table.c[cs.parent.name]] fk = ForeignKeyConstraint(columns=parent,refcolumns=target) fk.use_alter = cs.use_alter fk.ondelete = 'CASCADE' fk.onupdate = 'CASCADE' fk.name = cs.name fks.append(fk) if (cs.name in [a.name for a in rt_constraints] or (cs.target_fullname in [a.target_fullname for a in rt_constraints])): try: fk.drop(migrate_engine) commit() except:pass for fk in fks: fk.create(migrate_engine) commit()
def upgrade(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine tables = {} for t in TABLES_FKEYS: tables[t] = sautils.Table(t, metadata, autoload=True) fks_to_change = [] # We need to parse the reflected model in order to find the automatic # fk name that was put. # Mysql and postgres have different naming convention so this is not very # easy to have generic code working. for t, keys in iteritems(TABLES_FKEYS): table = tables[t] for fk in table.constraints: if not isinstance(fk, sa.ForeignKeyConstraint): continue for c in fk.elements: if str(c.column) in keys: # migrate.xx.ForeignKeyConstraint is changing the model # so initializing here would break the iteration # (Set changed size during iteration) fks_to_change.append(( table, (fk.columns, [c.column]), dict(name=fk.name, ondelete='CASCADE'))) for table, args, kwargs in fks_to_change: fk = ForeignKeyConstraint(*args, **kwargs) table.append_constraint(fk) try: fk.drop() except NotSupportedError: # some versions of sqlite do not support drop, # but will still update the fk pass fk.create()
def upgrade(migrate_engine): # Upgrade operations go here. Don't create your own engine; bind # migrate_engine to your metadata meta.bind = migrate_engine t.create() t2.create() tt = Table('gen_ProjectTable', meta, autoload=True) cons = ForeignKeyConstraint([t.c.project_id], [tt.c.id]) cons.create() tt2 = Table('meas_AnalysisTable', meta, autoload=True) cons = ForeignKeyConstraint([t2.c.analysis_id], [tt2.c.id]) cons.create() cons = ForeignKeyConstraint([t2.c.figure_id], [tt2.c.id]) cons.create()
def upgrade(migrate_engine): """ Alter existing table ALGORITHMS, by moving columns from the old ALGORITHM_GROUPS table. """ meta.bind = migrate_engine table_algo = meta.tables["ALGORITHMS"] for col in ADD_COLUMNS: create_column(col, table_algo) session = SA_SESSIONMAKER() try: session.execute( text("ALTER TABLE \"MAPPED_SIMULATION_STATE\" " "ADD COLUMN _current_state VARYING CHARACTER(255)")) session.commit() except Exception: session.close() session = SA_SESSIONMAKER() session.execute( text("ALTER TABLE \"MAPPED_SIMULATION_STATE\" " "ADD COLUMN _current_state character varying;")) session.commit() finally: session.close() session = SA_SESSIONMAKER() try: session.execute( text("""UPDATE "ALGORITHMS" SET module = (select G.module FROM "ALGORITHM_GROUPS" G WHERE "ALGORITHMS".fk_algo_group=G.id), classname = (select G.classname FROM "ALGORITHM_GROUPS" G WHERE "ALGORITHMS".fk_algo_group=G.id), displayname = (select G.displayname FROM "ALGORITHM_GROUPS" G WHERE "ALGORITHMS".fk_algo_group=G.id), fk_category = (select G.fk_category FROM "ALGORITHM_GROUPS" G WHERE "ALGORITHMS".fk_algo_group=G.id);""" )) session.commit() # Delete old columns, no longer needed for col in DEL_COLUMNS: drop_column(col, table_algo) # Create constraint only after rows are being populated table_algo = meta.tables["ALGORITHMS"] fk_constraint = ForeignKeyConstraint(["fk_category"], ["ALGORITHM_CATEGORIES.id"], ondelete="CASCADE", table=table_algo) fk_constraint.create() # Drop old table session = SA_SESSIONMAKER() session.execute(text("""DROP TABLE "ALGORITHM_GROUPS";""")) session.commit() except Exception as excep: LOGGER.exception(excep) finally: session.close() session = SA_SESSIONMAKER() try: session.execute( text("""ALTER TABLE "MAPPED_CONNECTIVITY_ANNOTATIONS" RENAME TO "MAPPED_CONNECTIVITY_ANNOTATIONS_DATA"; """ )) session.execute( text( """ALTER TABLE "MAPPED_DATATYPE_MEASURE" RENAME TO "MAPPED_DATATYPE_MEASURE_DATA"; """ )) session.execute( text( """ALTER TABLE "MAPPED_SIMULATION_STATE" RENAME TO "MAPPED_SIMULATION_STATE_DATA"; """ )) session.execute( text( """ALTER TABLE "MAPPED_VALUE_WRAPPER" RENAME TO "MAPPED_VALUE_WRAPPER_DATA"; """ )) session.execute( text( """ALTER TABLE "MAPPED_PROJECTION_DATA" RENAME TO "MAPPED_PROJECTION_MATRIX_DATA"; """ )) session.commit() except Exception as excep: LOGGER.exception(excep) finally: session.close()
def upgrade(migrate_engine): metadata = MetaData() metadata.bind = migrate_engine group_table = Table('group', metadata, Column('id', UnicodeText, primary_key=True, default=make_uuid), Column('name', UnicodeText, unique=True, nullable=False), Column('title', UnicodeText), Column('description', UnicodeText), Column('created', DateTime, default=datetime.now), ) group_revision_table = Table('group_revision', metadata, Column('id', UnicodeText, primary_key=True, default=make_uuid), Column('name', UnicodeText, nullable=False), Column('title', UnicodeText), Column('description', UnicodeText), Column('created', DateTime, default=datetime.now), Column('state', UnicodeText), Column('revision_id', UnicodeText, ForeignKey('revision.id'), primary_key=True), Column('continuity_id', UnicodeText, ForeignKey('group.id')) ) package_group_table = Table('package_group', metadata, Column('id', UnicodeText, primary_key=True, default=make_uuid), Column('package_id', UnicodeText, ForeignKey('package.id')), Column('group_id', UnicodeText, ForeignKey('group.id')), ) package_group_revision_table = Table('package_group_revision', metadata, Column('id', UnicodeText, primary_key=True, default=make_uuid), Column('package_id', UnicodeText, ForeignKey('package.id')), Column('group_id', UnicodeText, ForeignKey('group.id')), Column('state', UnicodeText), Column('revision_id', UnicodeText, ForeignKey('revision.id'), primary_key=True), Column('continuity_id', UnicodeText, ForeignKey('package_group.id')) ) group_extra_table = Table('group_extra', metadata, Column('id', UnicodeText, primary_key=True, default=make_uuid), Column('group_id', UnicodeText, ForeignKey('group.id')), Column('key', UnicodeText), Column('value', JsonType), ) group_extra_revision_table = Table('group_extra_revision', metadata, Column('id', UnicodeText, primary_key=True, default=make_uuid), Column('group_id', UnicodeText, ForeignKey('group.id')), Column('key', UnicodeText), Column('value', JsonType), Column('state', UnicodeText), Column('revision_id', UnicodeText, ForeignKey('revision.id'), primary_key=True), Column('continuity_id', UnicodeText, ForeignKey('group_extra.id')) ) revision_table = Table('revision', metadata, autoload=True) package_table = Table('package', metadata, autoload=True) rev_id = make_uuid() q = revision_table.insert(values={'id': rev_id, 'author': u'system', 'message': u"Add versioning to groups, group_extras and package_groups", 'timestamp': datetime.utcnow(), 'state': u'active'}) r = migrate_engine.execute(q) # handle groups: # BUG in sqlalchemy-migrate 0.4/0.5.4: "group" isn't escaped properly when sent to # postgres. # cf http://code.google.com/p/sqlalchemy-migrate/issues/detail?id=32 #state = Column('state', UnicodeText) #revision_id = Column('revision_id', UnicodeText) #state.create(group_table) #revision_id.create(group_table) migrate_engine.execute('ALTER TABLE "group" ADD COLUMN state TEXT') migrate_engine.execute('ALTER TABLE "group" ADD COLUMN revision_id TEXT') #q = group_table.update(values={'state': 'active', # 'revision_id': rev_id}) #migrate_engine.execute(q) migrate_engine.execute('UPDATE "group" SET revision_id = \'%s\', state=\'active\'' % rev_id) #fk = ForeignKeyConstraint(['revision_id'], [revision_table.c.id], table=group_table) #fk.create(migrate_engine) migrate_engine.execute('ALTER TABLE "group" ADD CONSTRAINT "group_revision_id_fkey" ' + \ 'FOREIGN KEY (revision_id) REFERENCES revision(id)') group_revision_table.create() for row in migrate_engine.execute(group_table.select()): group_rev = dict(row.items()) group_rev['continuity_id'] = group_rev['id'] # otherwise, this doesn't get mapped due to the bug above: group_rev['state'] = u'active' group_rev['revision_id'] = rev_id q = group_revision_table.insert(values=group_rev) migrate_engine.execute(q) state = Column('state', UnicodeText) revision_id = Column('revision_id', UnicodeText) state.create(package_group_table) revision_id.create(package_group_table) q = package_group_table.update(values={'state': u'active', 'revision_id': rev_id}) migrate_engine.execute(q) fk = ForeignKeyConstraint(['revision_id'], [revision_table.c.id], table=package_group_table, name = 'package_group_revision_id_fkey') fk.create(migrate_engine) package_group_revision_table.create() for row in migrate_engine.execute(package_group_table.select()): pkg_group_rev = dict(row.items()) pkg_group_rev['continuity_id'] = pkg_group_rev['id'] q = package_group_revision_table.insert(values=pkg_group_rev) migrate_engine.execute(q) state = Column('state', UnicodeText) revision_id = Column('revision_id', UnicodeText) state.create(group_extra_table) revision_id.create(group_extra_table) q = group_extra_table.update(values={'state': u'active', 'revision_id': rev_id}) migrate_engine.execute(q) fk = ForeignKeyConstraint(['revision_id'], [revision_table.c.id], table=group_extra_table, name='group_extra_revision_id_fkey') fk.create(migrate_engine) group_extra_revision_table.create() for row in migrate_engine.execute(group_extra_table.select()): group_extra_rev = dict(row.items()) group_extra_rev['continuity_id'] = group_rev['id'] q = group_extra_revision_table.insert(values=group_extra_rev) migrate_engine.execute(q)
pass commit() r_meta = s.MetaData(migrate_engine, True) # create constraints fks = [] for md in recreate_constraints: t = md.__table__ rt = r_meta.tables[t.name] rt_constraints = [a for a in rt.foreign_keys] for cs in deepcopy(t.foreign_keys): if cs.__class__.__name__ == "ForeignKey": if not cs.name in [a.name for a in rt_constraints]: table, column = cs.target_fullname.split(".") target = [r_meta.tables[table].c[column]] parent = [r_meta.tables[cs.parent.table.name].c[cs.parent.name]] fk = ForeignKeyConstraint(columns=parent, refcolumns=target) fk.use_alter = cs.use_alter fk.ondelete = "CASCADE" fk.onupdate = "CASCADE" fk.name = cs.name fks.append(fk) for fk in fks: fk.create(migrate_engine) commit() def downgrade(migrate_engine): # Operations to reverse the above upgrade go here. pass
def upgrade(migrate_engine): metadata = MetaData() metadata.bind = migrate_engine group_table = Table( 'group', metadata, Column('id', UnicodeText, primary_key=True, default=make_uuid), Column('name', UnicodeText, unique=True, nullable=False), Column('title', UnicodeText), Column('description', UnicodeText), Column('created', DateTime, default=datetime.now), ) group_revision_table = Table( 'group_revision', metadata, Column('id', UnicodeText, primary_key=True, default=make_uuid), Column('name', UnicodeText, nullable=False), Column('title', UnicodeText), Column('description', UnicodeText), Column('created', DateTime, default=datetime.now), Column('state', UnicodeText), Column('revision_id', UnicodeText, ForeignKey('revision.id'), primary_key=True), Column('continuity_id', UnicodeText, ForeignKey('group.id'))) package_group_table = Table( 'package_group', metadata, Column('id', UnicodeText, primary_key=True, default=make_uuid), Column('package_id', UnicodeText, ForeignKey('package.id')), Column('group_id', UnicodeText, ForeignKey('group.id')), ) package_group_revision_table = Table( 'package_group_revision', metadata, Column('id', UnicodeText, primary_key=True, default=make_uuid), Column('package_id', UnicodeText, ForeignKey('package.id')), Column('group_id', UnicodeText, ForeignKey('group.id')), Column('state', UnicodeText), Column('revision_id', UnicodeText, ForeignKey('revision.id'), primary_key=True), Column('continuity_id', UnicodeText, ForeignKey('package_group.id'))) group_extra_table = Table( 'group_extra', metadata, Column('id', UnicodeText, primary_key=True, default=make_uuid), Column('group_id', UnicodeText, ForeignKey('group.id')), Column('key', UnicodeText), Column('value', JsonType), ) group_extra_revision_table = Table( 'group_extra_revision', metadata, Column('id', UnicodeText, primary_key=True, default=make_uuid), Column('group_id', UnicodeText, ForeignKey('group.id')), Column('key', UnicodeText), Column('value', JsonType), Column('state', UnicodeText), Column('revision_id', UnicodeText, ForeignKey('revision.id'), primary_key=True), Column('continuity_id', UnicodeText, ForeignKey('group_extra.id'))) revision_table = Table('revision', metadata, autoload=True) package_table = Table('package', metadata, autoload=True) rev_id = make_uuid() q = revision_table.insert( values={ 'id': rev_id, 'author': u'system', 'message': u"Add versioning to groups, group_extras and package_groups", 'timestamp': datetime.utcnow(), 'state': u'active' }) r = migrate_engine.execute(q) # handle groups: # BUG in sqlalchemy-migrate 0.4/0.5.4: "group" isn't escaped properly when sent to # postgres. # cf http://code.google.com/p/sqlalchemy-migrate/issues/detail?id=32 #state = Column('state', UnicodeText) #revision_id = Column('revision_id', UnicodeText) #state.create(group_table) #revision_id.create(group_table) migrate_engine.execute('ALTER TABLE "group" ADD COLUMN state TEXT') migrate_engine.execute('ALTER TABLE "group" ADD COLUMN revision_id TEXT') #q = group_table.update(values={'state': 'active', # 'revision_id': rev_id}) #migrate_engine.execute(q) migrate_engine.execute( 'UPDATE "group" SET revision_id = \'%s\', state=\'active\'' % rev_id) #fk = ForeignKeyConstraint(['revision_id'], [revision_table.c.id], table=group_table) #fk.create(migrate_engine) migrate_engine.execute('ALTER TABLE "group" ADD CONSTRAINT "group_revision_id_fkey" ' + \ 'FOREIGN KEY (revision_id) REFERENCES revision(id)') group_revision_table.create() for row in migrate_engine.execute(group_table.select()): group_rev = dict(row.items()) group_rev['continuity_id'] = group_rev['id'] # otherwise, this doesn't get mapped due to the bug above: group_rev['state'] = u'active' group_rev['revision_id'] = rev_id q = group_revision_table.insert(values=group_rev) migrate_engine.execute(q) state = Column('state', UnicodeText) revision_id = Column('revision_id', UnicodeText) state.create(package_group_table) revision_id.create(package_group_table) q = package_group_table.update(values={ 'state': u'active', 'revision_id': rev_id }) migrate_engine.execute(q) fk = ForeignKeyConstraint(['revision_id'], [revision_table.c.id], table=package_group_table, name='package_group_revision_id_fkey') fk.create(migrate_engine) package_group_revision_table.create() for row in migrate_engine.execute(package_group_table.select()): pkg_group_rev = dict(row.items()) pkg_group_rev['continuity_id'] = pkg_group_rev['id'] q = package_group_revision_table.insert(values=pkg_group_rev) migrate_engine.execute(q) state = Column('state', UnicodeText) revision_id = Column('revision_id', UnicodeText) state.create(group_extra_table) revision_id.create(group_extra_table) q = group_extra_table.update(values={ 'state': u'active', 'revision_id': rev_id }) migrate_engine.execute(q) fk = ForeignKeyConstraint(['revision_id'], [revision_table.c.id], table=group_extra_table, name='group_extra_revision_id_fkey') fk.create(migrate_engine) group_extra_revision_table.create() for row in migrate_engine.execute(group_extra_table.select()): group_extra_rev = dict(row.items()) group_extra_rev['continuity_id'] = group_rev['id'] q = group_extra_revision_table.insert(values=group_extra_rev) migrate_engine.execute(q)
"""UPDATE "ALGORITHMS" SET module = (select G.module FROM "ALGORITHM_GROUPS" G WHERE "ALGORITHMS".fk_algo_group=G.id), classname = (select G.classname FROM "ALGORITHM_GROUPS" G WHERE "ALGORITHMS".fk_algo_group=G.id), displayname = (select G.displayname FROM "ALGORITHM_GROUPS" G WHERE "ALGORITHMS".fk_algo_group=G.id), fk_category = (select G.fk_category FROM "ALGORITHM_GROUPS" G WHERE "ALGORITHMS".fk_algo_group=G.id);""")) session.commit() # Delete old columns, no longer needed for col in DEL_COLUMNS: drop_column(col, table_algo) # Create constraint only after rows are being populated table_algo = meta.tables["ALGORITHMS"] fk_constraint = ForeignKeyConstraint(["fk_category"], ["ALGORITHM_CATEGORIES.id"], ondelete="CASCADE", table=table_algo) fk_constraint.create() # Drop old table session = SA_SESSIONMAKER() session.execute(text("""DROP TABLE "ALGORITHM_GROUPS";""")) session.commit() except Exception, excep: LOGGER.exception(excep) finally: session.close() session = SA_SESSIONMAKER() try: session.execute(text("""ALTER TABLE "MAPPED_CONNECTIVITY_ANNOTATIONS" RENAME TO "MAPPED_CONNECTIVITY_ANNOTATIONS_DATA"; """))
def upgrade(migrate_engine): metadata = MetaData() metadata.bind = migrate_engine group_table = Table( "group", metadata, Column("id", UnicodeText, primary_key=True, default=make_uuid), Column("name", UnicodeText, unique=True, nullable=False), Column("title", UnicodeText), Column("description", UnicodeText), Column("created", DateTime, default=datetime.now), ) group_revision_table = Table( "group_revision", metadata, Column("id", UnicodeText, primary_key=True, default=make_uuid), Column("name", UnicodeText, nullable=False), Column("title", UnicodeText), Column("description", UnicodeText), Column("created", DateTime, default=datetime.now), Column("state", UnicodeText), Column("revision_id", UnicodeText, ForeignKey("revision.id"), primary_key=True), Column("continuity_id", UnicodeText, ForeignKey("group.id")), ) package_group_table = Table( "package_group", metadata, Column("id", UnicodeText, primary_key=True, default=make_uuid), Column("package_id", UnicodeText, ForeignKey("package.id")), Column("group_id", UnicodeText, ForeignKey("group.id")), ) package_group_revision_table = Table( "package_group_revision", metadata, Column("id", UnicodeText, primary_key=True, default=make_uuid), Column("package_id", UnicodeText, ForeignKey("package.id")), Column("group_id", UnicodeText, ForeignKey("group.id")), Column("state", UnicodeText), Column("revision_id", UnicodeText, ForeignKey("revision.id"), primary_key=True), Column("continuity_id", UnicodeText, ForeignKey("package_group.id")), ) group_extra_table = Table( "group_extra", metadata, Column("id", UnicodeText, primary_key=True, default=make_uuid), Column("group_id", UnicodeText, ForeignKey("group.id")), Column("key", UnicodeText), Column("value", JsonType), ) group_extra_revision_table = Table( "group_extra_revision", metadata, Column("id", UnicodeText, primary_key=True, default=make_uuid), Column("group_id", UnicodeText, ForeignKey("group.id")), Column("key", UnicodeText), Column("value", JsonType), Column("state", UnicodeText), Column("revision_id", UnicodeText, ForeignKey("revision.id"), primary_key=True), Column("continuity_id", UnicodeText, ForeignKey("group_extra.id")), ) revision_table = Table("revision", metadata, autoload=True) package_table = Table("package", metadata, autoload=True) rev_id = make_uuid() q = revision_table.insert( values={ "id": rev_id, "author": u"system", "message": u"Add versioning to groups, group_extras and package_groups", "timestamp": datetime.now(), "state": u"active", } ) r = migrate_engine.execute(q) # handle groups: # BUG in sqlalchemy-migrate 0.4/0.5.4: "group" isn't escaped properly when sent to # postgres. # cf http://code.google.com/p/sqlalchemy-migrate/issues/detail?id=32 # state = Column('state', UnicodeText) # revision_id = Column('revision_id', UnicodeText) # state.create(group_table) # revision_id.create(group_table) migrate_engine.execute('ALTER TABLE "group" ADD COLUMN state TEXT') migrate_engine.execute('ALTER TABLE "group" ADD COLUMN revision_id TEXT') # q = group_table.update(values={'state': 'active', # 'revision_id': rev_id}) # migrate_engine.execute(q) migrate_engine.execute("UPDATE \"group\" SET revision_id = '%s', state='active'" % rev_id) # fk = ForeignKeyConstraint(['revision_id'], [revision_table.c.id], table=group_table) # fk.create(migrate_engine) migrate_engine.execute( 'ALTER TABLE "group" ADD CONSTRAINT "group_revision_id_fkey" ' + "FOREIGN KEY (revision_id) REFERENCES revision(id)" ) group_revision_table.create() for row in migrate_engine.execute(group_table.select()): group_rev = dict(row.items()) group_rev["continuity_id"] = group_rev["id"] # otherwise, this doesn't get mapped due to the bug above: group_rev["state"] = u"active" group_rev["revision_id"] = rev_id q = group_revision_table.insert(values=group_rev) migrate_engine.execute(q) state = Column("state", UnicodeText) revision_id = Column("revision_id", UnicodeText) state.create(package_group_table) revision_id.create(package_group_table) q = package_group_table.update(values={"state": u"active", "revision_id": rev_id}) migrate_engine.execute(q) fk = ForeignKeyConstraint( ["revision_id"], [revision_table.c.id], table=package_group_table, name="package_group_revision_id_fkey" ) fk.create(migrate_engine) package_group_revision_table.create() for row in migrate_engine.execute(package_group_table.select()): pkg_group_rev = dict(row.items()) pkg_group_rev["continuity_id"] = group_rev["id"] q = package_group_revision_table.insert(values=pkg_group_rev) migrate_engine.execute(q) state = Column("state", UnicodeText) revision_id = Column("revision_id", UnicodeText) state.create(group_extra_table) revision_id.create(group_extra_table) q = group_extra_table.update(values={"state": u"active", "revision_id": rev_id}) migrate_engine.execute(q) fk = ForeignKeyConstraint( ["revision_id"], [revision_table.c.id], table=group_extra_table, name="group_extra_revision_id_fkey" ) fk.create(migrate_engine) group_extra_revision_table.create() for row in migrate_engine.execute(group_extra_table.select()): group_extra_rev = dict(row.items()) group_extra_rev["continuity_id"] = group_rev["id"] q = group_extra_revision_table.insert(values=group_extra_rev) migrate_engine.execute(q)
def upgrade(migrate_engine): """ Alter existing table ALGORITHMS, by moving columns from the old ALGORITHM_GROUPS table. """ meta.bind = migrate_engine table_algo = meta.tables["ALGORITHMS"] for col in ADD_COLUMNS: create_column(col, table_algo) session = SA_SESSIONMAKER() try: session.execute(text("ALTER TABLE \"MAPPED_SIMULATION_STATE\" " "ADD COLUMN _current_state VARYING CHARACTER(255)")) session.commit() except Exception: session.close() session = SA_SESSIONMAKER() session.execute(text("ALTER TABLE \"MAPPED_SIMULATION_STATE\" " "ADD COLUMN _current_state character varying;")) session.commit() finally: session.close() session = SA_SESSIONMAKER() try: session.execute(text( """UPDATE "ALGORITHMS" SET module = (select G.module FROM "ALGORITHM_GROUPS" G WHERE "ALGORITHMS".fk_algo_group=G.id), classname = (select G.classname FROM "ALGORITHM_GROUPS" G WHERE "ALGORITHMS".fk_algo_group=G.id), displayname = (select G.displayname FROM "ALGORITHM_GROUPS" G WHERE "ALGORITHMS".fk_algo_group=G.id), fk_category = (select G.fk_category FROM "ALGORITHM_GROUPS" G WHERE "ALGORITHMS".fk_algo_group=G.id);""")) session.commit() # Delete old columns, no longer needed for col in DEL_COLUMNS: drop_column(col, table_algo) # Create constraint only after rows are being populated table_algo = meta.tables["ALGORITHMS"] fk_constraint = ForeignKeyConstraint(["fk_category"], ["ALGORITHM_CATEGORIES.id"], ondelete="CASCADE", table=table_algo) fk_constraint.create() # Drop old table session = SA_SESSIONMAKER() session.execute(text("""DROP TABLE "ALGORITHM_GROUPS";""")) session.commit() except Exception as excep: LOGGER.exception(excep) finally: session.close() session = SA_SESSIONMAKER() try: session.execute(text("""ALTER TABLE "MAPPED_CONNECTIVITY_ANNOTATIONS" RENAME TO "MAPPED_CONNECTIVITY_ANNOTATIONS_DATA"; """)) session.execute(text("""ALTER TABLE "MAPPED_DATATYPE_MEASURE" RENAME TO "MAPPED_DATATYPE_MEASURE_DATA"; """)) session.execute(text("""ALTER TABLE "MAPPED_SIMULATION_STATE" RENAME TO "MAPPED_SIMULATION_STATE_DATA"; """)) session.execute(text("""ALTER TABLE "MAPPED_VALUE_WRAPPER" RENAME TO "MAPPED_VALUE_WRAPPER_DATA"; """)) session.execute(text("""ALTER TABLE "MAPPED_PROJECTION_DATA" RENAME TO "MAPPED_PROJECTION_MATRIX_DATA"; """)) session.commit() except Exception as excep: LOGGER.exception(excep) finally: session.close()
def main(): nca47_service.prepare_service(sys.argv) engine = db_api.get_engine() meta = sa.MetaData() meta.bind = engine dns_servers = sa.Table('dns_servers', meta, sa.Column('id', sa.String(attr.UUID_LEN), primary_key=True, nullable=False), sa.Column('name', sa.String(attr.NAME_MAX_LEN), nullable=True), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=True), mysql_engine='InnoDB', mysql_charset='utf8') nca_agent_info = sa.Table('nca_agent_info', meta, sa.Column('id', sa.String(attr.UUID_LEN), primary_key=True, nullable=False), sa.Column('agent_id', sa.String(attr.UUID_LEN), nullable=False), sa.Column('agent_ip', sa.String(attr.IP_LEN), nullable=False), sa.Column('agent_nat_ip', sa.String(attr.IP_LEN), nullable=True), sa.Column('dc_name', sa.String(attr.NAME_MAX_LEN), nullable=True), sa.Column('network_zone', sa.String(attr.NAME_MAX_LEN), nullable=True), sa.Column('agent_type', sa.String(attr.NAME_MAX_LEN), nullable=True), sa.Column('availiable', sa.Boolean(), nullable=False), sa.Column('status', sa.Boolean(), nullable=True), sa.Column('update_time', sa.DateTime(), nullable=True), sa.Column('operation_fro', sa.String(attr.NAME_MAX_LEN), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=True), mysql_engine='InnoDB', mysql_charset='utf8' ) nca_vres_info = sa.Table('nca_vres_info', meta, sa.Column('id', sa.String(attr.UUID_LEN), primary_key=True, nullable=False), sa.Column('vres_id', sa.String(attr.UUID_LEN), unique=True, nullable=False), sa.Column('vres_name', sa.String(attr.NAME_MAX_LEN), nullable=True), sa.Column('tenant_id', sa.String(attr.TENANT_ID_MAX_LEN), nullable=True), sa.Column('agent_id', sa.String(attr.UUID_LEN), nullable=True), sa.Column('operation_fro', sa.String(attr.NAME_MAX_LEN), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=True), mysql_engine='InnoDB', mysql_charset='utf8' ) nca_operation_history = sa.Table('nca_operation_history', meta, sa.Column('id', sa.String(attr.UUID_LEN), primary_key=True, nullable=False), sa.Column('config_id', sa.String(attr.UUID_LEN), nullable=False), sa.Column('input', sa.String(attr.INPUT_MAX_LEN), nullable=False), sa.Column('operation_type', sa.String(attr.NAME_MAX_LEN), nullable=True), sa.Column('operation_time', sa.DateTime(), nullable=True), sa.Column('operation_status', sa.String(attr.NAME_MAX_LEN), nullable=True), sa.Column('operation_fro', sa.String(attr.NAME_MAX_LEN), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=True), mysql_engine='InnoDB', mysql_charset='utf8' ) dns_zone_info = sa.Table('dns_zone_info', meta, sa.Column('id', sa.String(attr.UUID_LEN), primary_key=True, nullable=False), sa.Column('zone_id', sa.String(attr.UUID_LEN), nullable=False), sa.Column('tenant_id', sa.String(attr.NAME_MAX_LEN), nullable=False), sa.Column('vres_id', sa.String(attr.UUID_LEN), nullable=True), sa.Column('zone_name', sa.String(attr.NAME_MAX_LEN), nullable=True), sa.Column('masters', sa.String(attr.NAME_MAX_LEN), nullable=True), sa.Column('slaves', sa.String(attr.NAME_MAX_LEN), nullable=True), sa.Column('default_ttl', sa.String(attr.NAME_MAX_LEN), nullable=True), sa.Column('ad_controller', sa.String(attr.NAME_MAX_LEN), nullable=True), sa.Column('renewal', sa.String(attr.NAME_MAX_LEN), nullable=True), sa.Column('owners', sa.String(attr.NAME_MAX_LEN), nullable=True), sa.Column('comment', sa.String(attr.NAME_MAX_LEN), nullable=True), sa.Column('operation_fro', sa.String(attr.NAME_MAX_LEN), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=True), mysql_engine='InnoDB', mysql_charset='utf8' ) dns_rrs_info = sa.Table('dns_rrs_info', meta, sa.Column('id', sa.String(attr.UUID_LEN), primary_key=True, nullable=False), sa.Column('rrs_id', sa.String(attr.NAME_MAX_LEN), nullable=True), sa.Column('zone_id', sa.String(attr.UUID_LEN), nullable=True), sa.Column('rrs_name', sa.String(attr.NAME_MAX_LEN), nullable=False, primary_key=True), sa.Column('type', sa.String(attr.TYPE_LEN), nullable=False, primary_key=True), sa.Column('ttl', sa.String(attr.TTL_LEN), nullable=False), sa.Column('klass', sa.String(attr.TENANT_ID_MAX_LEN), nullable=True), sa.Column('rdata', sa.String(attr.TENANT_ID_MAX_LEN), nullable=False), sa.Column('operation_fro', sa.String(attr.NAME_MAX_LEN), nullable=True), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=True), mysql_engine='InnoDB', mysql_charset='utf8' ) fw_vlan_info = sa.Table('fw_vlan_info', meta, sa.Column('id', sa.String(attr.UUID_LEN), primary_key=True, nullable=False), sa.Column('vlan_id', sa.String(attr.UUID_LEN), nullable=False), sa.Column('ipaddr', sa.String(attr.INPUT_MAX_LEN), nullable=False), sa.Column('ifnames', sa.String(attr.INPUT_MAX_LEN), nullable=False), sa.Column('vres_id', sa.String(attr.UUID_LEN), nullable=False), sa.Column('operation_fro', sa.String(attr.UUID_LEN), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=True), mysql_engine='InnoDB', mysql_charset='utf8' ) fw_vrf_info = sa.Table('fw_vrf_info', meta, sa.Column('id', sa.String(attr.UUID_LEN), primary_key=True, nullable=False), sa.Column('name', sa.String(attr.UUID_LEN), nullable=False), sa.Column('vrfInterface', sa.String(attr.INPUT_MAX_LEN), nullable=False), sa.Column('vfwname', sa.String(attr.NAME_MAX_LEN), nullable=True), sa.Column('vfw_id', sa.String(attr.UUID_LEN), nullable=True), sa.Column('vres_id', sa.String(attr.UUID_LEN), nullable=False), sa.Column('operation_fro', sa.String(attr.UUID_LEN), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=True), mysql_engine='InnoDB', mysql_charset='utf8' ) fw_vfw_info = sa.Table('fw_vfw_info', meta, sa.Column('id', sa.String(attr.UUID_LEN), primary_key=True, nullable=False), sa.Column('vfw_name', sa.String(attr.NAME_MAX_LEN), nullable=False), sa.Column('vfw_type', sa.String(attr.STATUS_LEN), nullable=False), sa.Column('vfw_info', sa.String(attr.INPUT_MAX_LEN), nullable=False), sa.Column('dc_name', sa.String(attr.NAME_MAX_LEN), nullable=False), sa.Column('network_zone_name', sa.String(attr.NAME_MAX_LEN), nullable=False), sa.Column('network_zone_class', sa.String(attr.NAME_MAX_LEN), nullable=False), sa.Column('protection_class', sa.String(attr.STATUS_LEN), nullable=True), sa.Column('vres_id', sa.String(attr.UUID_LEN), nullable=False), sa.Column('operation_fro', sa.String(attr.UUID_LEN), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=True), mysql_engine='InnoDB', mysql_charset='utf8' ) fw_security_zone_info = sa.Table('fw_security_zone_info', meta, sa.Column('id', sa.String(attr.UUID_LEN), primary_key=True, nullable=False), sa.Column('name', sa.String(attr.NAME_MAX_LEN), nullable=False), sa.Column('ifnames', sa.String(attr.INPUT_MAX_LEN), nullable=True), sa.Column('priority', sa.String(attr.TTL_LEN), nullable=False), sa.Column('vfwname', sa.String(attr.NAME_MAX_LEN), nullable=False), sa.Column('vfw_id', sa.String(attr.UUID_LEN), nullable=False), sa.Column('operation_fro', sa.String(attr.UUID_LEN), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=True), mysql_engine='InnoDB', mysql_charset='utf8' ) fw_addrobj_info = sa.Table('fw_addrobj_info', meta, sa.Column('id', sa.String(attr.UUID_LEN), primary_key=True, nullable=False), sa.Column('name', sa.String(attr.NAME_MAX_LEN), nullable=False), sa.Column('ip', sa.String(attr.UUID_LEN), nullable=False), sa.Column('expip', sa.String(attr.UUID_LEN), nullable=True), sa.Column('vfwname', sa.String(attr.NAME_MAX_LEN), nullable=False), sa.Column('vfw_id', sa.String(attr.UUID_LEN), nullable=False), sa.Column('operation_fro', sa.String(attr.UUID_LEN), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=True), mysql_engine='InnoDB', mysql_charset='utf8' ) fw_netservices_info = sa.Table('fw_netservices_info', meta, sa.Column('id', sa.String(attr.UUID_LEN), primary_key=True, nullable=False), sa.Column('name', sa.String(attr.NAME_MAX_LEN), nullable=False), sa.Column('proto', sa.String(attr.NAME_MAX_LEN), nullable=True), sa.Column('port', sa.String(attr.UUID_LEN), nullable=False), sa.Column('vfwname', sa.String(attr.NAME_MAX_LEN), nullable=False), sa.Column('vfw_id', sa.String(attr.UUID_LEN), nullable=False), sa.Column('operation_fro', sa.String(attr.UUID_LEN), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=True), mysql_engine='InnoDB', mysql_charset='utf8' ) fw_snataddrpool_info = sa.Table('fw_snataddrpool_info', meta, sa.Column('id', sa.String(attr.UUID_LEN), primary_key=True, nullable=False), sa.Column('name', sa.String(attr.NAME_MAX_LEN), nullable=False), sa.Column('ipstart', sa.String(attr.IP_LEN), nullable=False), sa.Column('ipend', sa.String(attr.IP_LEN), nullable=False), sa.Column('slotip', sa.String(attr.STATUS_LEN), nullable=False), sa.Column('vfwname', sa.String(attr.NAME_MAX_LEN), nullable=False), sa.Column('vfw_id', sa.String(attr.UUID_LEN), nullable=False), sa.Column('operation_fro', sa.String(attr.UUID_LEN), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=True), mysql_engine='InnoDB', mysql_charset='utf8' ) fw_snat_info = sa.Table('fw_snat_info', meta, sa.Column('id', sa.String(attr.UUID_LEN), primary_key=True, nullable=False), sa.Column('name', sa.String(attr.NAME_MAX_LEN), nullable=False), sa.Column('outifname', sa.String(attr.NAME_MAX_LEN), nullable=False), sa.Column('srcipobjname', sa.String(attr.NAME_MAX_LEN), nullable=True), sa.Column('dstipobjname', sa.String(attr.NAME_MAX_LEN), nullable=True), sa.Column('wanippoolname', sa.String(attr.NAME_MAX_LEN), nullable=True), sa.Column('vfwname', sa.String(attr.NAME_MAX_LEN), nullable=False), sa.Column('vfw_id', sa.String(attr.UUID_LEN), nullable=False), sa.Column('operation_fro', sa.String(attr.UUID_LEN), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=True), mysql_engine='InnoDB', mysql_charset='utf8' ) fw_dnat_info = sa.Table('fw_dnat_info', meta, sa.Column('id', sa.String(attr.UUID_LEN), primary_key=True, nullable=False), sa.Column('name', sa.String(attr.NAME_MAX_LEN), nullable=False), sa.Column('inifname', sa.String(attr.NAME_MAX_LEN), nullable=False), sa.Column('wanip', sa.String(attr.IP_LEN), nullable=False), sa.Column('wantcpports', sa.String(attr.NAME_MAX_LEN), nullable=True), sa.Column('wanudpports', sa.String(attr.NAME_MAX_LEN), nullable=True), sa.Column('lanipstart', sa.String(attr.IP_LEN), nullable=False), sa.Column('lanipend', sa.String(attr.IP_LEN), nullable=False), sa.Column('lanport', sa.String(attr.TTL_LEN), nullable=False), sa.Column('slot', sa.String(attr.STATUS_LEN), nullable=False), sa.Column('vfwname', sa.String(attr.NAME_MAX_LEN), nullable=False), sa.Column('vfw_id', sa.String(attr.UUID_LEN), nullable=False), sa.Column('operation_fro', sa.String(attr.UUID_LEN), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=True), mysql_engine='InnoDB', mysql_charset='utf8' ) fw_packetfilter_info = sa.Table('fw_packetfilter_info', meta, sa.Column('id', sa.String(attr.UUID_LEN), primary_key=True, nullable=False), sa.Column('name', sa.String(attr.NAME_MAX_LEN), nullable=False), sa.Column('srczonename', sa.String(attr.NAME_MAX_LEN), nullable=False), sa.Column('dstzonename', sa.String(attr.NAME_MAX_LEN), nullable=False), sa.Column('srcipobjnames', sa.String(attr.NAME_MAX_LEN), nullable=True), sa.Column('dstipobjnames', sa.String(attr.NAME_MAX_LEN), nullable=True), sa.Column('servicenames', sa.String(attr.NAME_MAX_LEN), nullable=True), sa.Column('action', sa.String(attr.STATUS_LEN), nullable=True), sa.Column('log', sa.String(attr.STATUS_LEN), nullable=True), sa.Column('vfwname', sa.String(attr.NAME_MAX_LEN), nullable=False), sa.Column('vfw_id', sa.String(attr.UUID_LEN), nullable=False), sa.Column('operation_fro', sa.String(attr.UUID_LEN), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=True), mysql_engine='InnoDB', mysql_charset='utf8' ) fw_staticnat_info = sa.Table('fw_staticnat_info', meta, sa.Column('id', sa.String(attr.UUID_LEN), primary_key=True, nullable=False), sa.Column('name', sa.String(attr.NAME_MAX_LEN), nullable=False), sa.Column('ifname', sa.String(attr.NAME_MAX_LEN), nullable=False), sa.Column('lanip', sa.String(attr.IP_LEN), nullable=False), sa.Column('wanip', sa.String(attr.IP_LEN), nullable=False), sa.Column('slot', sa.String(attr.STATUS_LEN), nullable=False), sa.Column('vfwname', sa.String(attr.NAME_MAX_LEN), nullable=False), sa.Column('vfw_id', sa.String(attr.UUID_LEN), nullable=False), sa.Column('operation_fro', sa.String(attr.UUID_LEN), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=True), mysql_engine='InnoDB', mysql_charset='utf8' ) tables = [dns_zone_info, nca_agent_info, nca_vres_info, nca_operation_history, dns_rrs_info, fw_vlan_info, fw_vfw_info, fw_security_zone_info, fw_addrobj_info, fw_netservices_info, fw_snataddrpool_info, fw_snat_info, fw_dnat_info, fw_packetfilter_info, fw_staticnat_info, fw_vrf_info] for table in tables: try: if not table.exists(): table.create() except Exception: LOG.info(repr(table)) LOG.exception(_LE('Exception while creating table.')) raise columns = [nca_vres_info.c.id.label('id'), nca_agent_info.c.id.label('agent_id'), nca_agent_info.c.agent_ip.label('agent_ip'), nca_agent_info.c.agent_nat_ip.label('agent_nat_ip'), nca_vres_info.c.id.label('vres_id'), nca_vres_info.c.tenant_id.label('tenant_id'), nca_agent_info.c.dc_name.label('dc_name'), nca_agent_info.c.network_zone.label('network_zone'), nca_vres_info.c.vres_name.label('vres_name'), nca_agent_info.c.agent_type.label('agent_type'), nca_agent_info.c.deleted_at.label('deleted_at'), nca_agent_info.c.deleted.label('deleted')] condition1 = (nca_agent_info.c.id == nca_vres_info.c.agent_id) condition2 = (nca_agent_info.c.availiable is True) condition3 = (nca_vres_info.c.deleted is False) vres_agent_view = CreateView('vres_agent_view', select(columns).where(condition1 and condition2 and condition3)) engine.execute(vres_agent_view) vfw_trigger = sa.DDL('''\ CREATE TRIGGER cascadeDel_on_vfw AFTER UPDATE ON fw_vfw_info FOR EACH ROW BEGIN UPDATE fw_dnat_info set deleted_at=now(), deleted=1 WHERE (vfw_id = old.id) and (old.deleted = 0) and (new.deleted = 1); UPDATE fw_staticnat_info set deleted_at=now(), deleted=1 WHERE (vfw_id = old.id) and (old.deleted = 0) and (new.deleted = 1); UPDATE fw_snat_info set deleted_at=now(), deleted=1 WHERE (vfw_id = old.id) and (old.deleted = 0) and (new.deleted = 1); UPDATE fw_addrobj_info set deleted_at=now(), deleted=1 WHERE (vfw_id = old.id) and (old.deleted = 0) and (new.deleted = 1); UPDATE fw_snataddrpool_info set deleted_at=now(), deleted=1 WHERE (vfw_id = old.id) and (old.deleted = 0) and (new.deleted = 1); UPDATE fw_netservices_info set deleted_at=now(), deleted=1 WHERE (vfw_id = old.id) and (old.deleted = 0) and (new.deleted = 1); UPDATE fw_security_zone_info set deleted_at=now(), deleted=1 WHERE (vfw_id = old.id) and (old.deleted = 0) and (new.deleted = 1); UPDATE fw_packetfilter_info set deleted_at=now(), deleted=1 WHERE (vfw_id = old.id) and (old.deleted = 0) and (new.deleted = 1); END;''') engine.execute(vfw_trigger) nca_agent_info_table = sa.Table('nca_agent_info', meta, autoload=True) nca_vres_info_table = sa.Table('nca_vres_info', meta, autoload=True) nca_operation_history_table = sa.Table('nca_operation_history', meta, autoload=True) dns_zone_info_table = sa.Table('dns_zone_info', meta, autoload=True) dns_rrs_info_table = sa.Table('dns_rrs_info', meta, autoload=True) fw_vlan_info_table = sa.Table('fw_vlan_info', meta, autoload=True) fw_vfw_info_table = sa.Table('fw_vfw_info', meta, autoload=True) fw_security_zone_info_table = sa.Table('fw_security_zone_info', meta, autoload=True) fw_addrobj_info_table = sa.Table('fw_addrobj_info', meta, autoload=True) fw_netservices_info_table = sa.Table('fw_netservices_info', meta, autoload=True) fw_snataddrpool_info_table = sa.Table('fw_snataddrpool_info', meta, autoload=True) fw_snat_info_table = sa.Table('fw_snat_info', meta, autoload=True) fw_dnat_info_table = sa.Table('fw_dnat_info', meta, autoload=True) fw_packetfilter_info_table = sa.Table('fw_packetfilter_info', meta, autoload=True) fw_staticnat_info_table = sa.Table('fw_staticnat_info', meta, autoload=True) fw_vrf_info_table = sa.Table('fw_vrf_info', meta, autoload=True) nca_vres_info_agent_fk = ForeignKeyConstraint( [nca_vres_info_table.c.agent_id], [nca_agent_info_table.c.id]) nca_vres_info_agent_fk.create() nca_operation_history_rrs_fk = ForeignKeyConstraint( [nca_operation_history_table.c.config_id], [nca_vres_info_table.c.id]) nca_operation_history_rrs_fk.create() dns_zone_info_rrs_fk = ForeignKeyConstraint( [dns_rrs_info_table.c.zone_id], [dns_zone_info_table.c.id]) dns_zone_info_rrs_fk.create() dns_zone_info_vres_fk = ForeignKeyConstraint( [dns_zone_info_table.c.vres_id], [nca_vres_info_table.c.vres_id]) dns_zone_info_vres_fk.create() fw_vlan_info_fk = ForeignKeyConstraint( [fw_vlan_info_table.c.vres_id], [nca_vres_info_table.c.id]) fw_vlan_info_fk.create() fw_vrf_info_fk = ForeignKeyConstraint( [fw_vrf_info_table.c.vres_id], [nca_vres_info_table.c.id]) fw_vrf_info_fk.create() fw_vfw_info_fk = ForeignKeyConstraint( [fw_vfw_info_table.c.vres_id], [nca_vres_info_table.c.id]) fw_vfw_info_fk.create() fw_security_zone_info_fk = ForeignKeyConstraint( [fw_security_zone_info_table.c.vfw_id], [fw_vfw_info_table.c.id]) fw_security_zone_info_fk.create() fw_addrobj_info_fk = ForeignKeyConstraint( [fw_addrobj_info_table.c.vfw_id], [fw_vfw_info_table.c.id]) fw_addrobj_info_fk.create() fw_netservices_info_fk = ForeignKeyConstraint( [fw_netservices_info_table.c.vfw_id], [fw_vfw_info_table.c.id]) fw_netservices_info_fk.create() fw_snataddrpool_info_fk = ForeignKeyConstraint( [fw_snataddrpool_info_table.c.vfw_id], [fw_vfw_info_table.c.id]) fw_snataddrpool_info_fk.create() fw_snat_info_fk = ForeignKeyConstraint( [fw_snat_info_table.c.vfw_id], [fw_vfw_info_table.c.id]) fw_snat_info_fk.create() fw_dnat_info_fk = ForeignKeyConstraint( [fw_dnat_info_table.c.vfw_id], [fw_vfw_info_table.c.id]) fw_dnat_info_fk.create() fw_packetfilter_info_fk = ForeignKeyConstraint( [fw_packetfilter_info_table.c.vfw_id], [fw_vfw_info_table.c.id]) fw_packetfilter_info_fk.create() fw_staticnat_info_fk = ForeignKeyConstraint( [fw_staticnat_info_table.c.vfw_id], [fw_vfw_info_table.c.id]) fw_staticnat_info_fk.create()
def upgrade(migrate_engine): """r_meta must be reflected from the current database whereas metadata is the current constructed metadata for the migration purpose""" class Server(Base): __tablename__ = 'servers' id = Column(Integer, primary_key=True) name = Column(Unicode(50), unique=True) url = Column(Unicode(255)) help_mail = Column(Unicode(255)) def __init__(self, name=None, url=None, help_mail=None, ): self.name = name self.url = url self.help_mail = help_mail class Acl(Base): __tablename__ = 'authentication_acl' role = Column(Integer, ForeignKey("authentication_role.id", name="fk_acl_role", use_alter=True, ondelete="CASCADE", onupdate="CASCADE"), primary_key=True) permission = Column(Integer, ForeignKey("authentication_permission.id", name="fk_acl_permission", use_alter=True, ondelete="CASCADE", onupdate="CASCADE"), primary_key=True) class Role(Base): __tablename__ = 'authentication_role' id = Column(Integer, primary_key=True) name = Column(Unicode(50), unique=True) description = Column(Unicode(2500)) global_permissions = relationship( "Permission", uselist=True, secondary="authentication_acl", secondaryjoin="Acl.permission==Permission.id") def __init__(self, id=None, name=None, description=None, global_permissions=None): self.id = id self.description = description self.name = name if global_permissions is not None: self.global_permissions.extend(global_permissions) class Permission(Base): __tablename__ = 'authentication_permission' id = Column(Integer, primary_key=True) name = Column(Unicode(50), unique=True) description = Column(Unicode(2500)) roles = relationship( "Role", uselist=True, secondary="authentication_acl", secondaryjoin="Acl.permission==Role.id") def __init__(self, id=None, name=None, description=None, roles=None): self.id = id self.name = name self.description=description if roles is not None: self.roles.extends(roles) # Upgrade operations go here. Don't create your own engine; bind # migrate_engine to your metadata debug = True session.configure(bind=migrate_engine) migrate_engine.echo=debug metadata.bind = migrate_engine def commit(): session.commit() r_meta.bind.execute ('COMMIT;') metadata.bind.execute('COMMIT;') """ Reload all permissions """ real_perms = Permission.all() for p in real_perms[:]: if not p.name in default_permissions: session.delete(p) session.commit() nreal_perms = [p.name for p in real_perms] for p in default_permissions: if not p in nreal_perms: perm = Permission(name=p, description=default_permissions[p]) session.add(perm) session.commit() """ Reload all ACLS """ for p in default_acls: default_acls try: perm = Permission.by_name(p) except: pass roles = default_acls[p] for role in roles: access = roles[role] orole = Role.by_name(role) if access: if not perm in orole.global_permissions: orole.global_permissions.append(perm) session.add(orole) session.commit() else: if perm in orole.global_permissions: del orole.global_permissions[orole.global_permissions.index(perm)] session.add(orole) session.commit() session.flush() """ Create remote servers table """ r_meta = s.MetaData(migrate_engine, True) Base.metadata.bind = migrate_engine for t in [Server.__table__]: if not t.name in r_meta.tables: t.create(migrate_engine) """Recreate all authentication_acl foreign keys """ fks = [] commit() for md in [Acl]: t = md.__table__ rt = r_meta.tables[t.name] rt_constraints = [a for a in rt.foreign_keys] for cs in deepcopy(t.foreign_keys): if cs.__class__.__name__ == 'ForeignKey': table, column = cs.target_fullname.split('.') target = [r_meta.tables[table].c[column]] parent = [r_meta.tables[cs.parent.table.name].c[cs.parent.name]] fk = ForeignKeyConstraint(columns=parent,refcolumns=target) fk.use_alter = cs.use_alter fk.ondelete = 'CASCADE' fk.onupdate = 'CASCADE' fk.name = cs.name fks.append(fk) if (cs.name in [a.name for a in rt_constraints] or (cs.target_fullname in [a.target_fullname for a in rt_constraints])): fk.drop(migrate_engine) commit() for fk in fks: fk.create(migrate_engine) commit()
displayname = (select G.displayname FROM "ALGORITHM_GROUPS" G WHERE "ALGORITHMS".fk_algo_group=G.id), fk_category = (select G.fk_category FROM "ALGORITHM_GROUPS" G WHERE "ALGORITHMS".fk_algo_group=G.id);""" )) session.commit() # Delete old columns, no longer needed for col in DEL_COLUMNS: drop_column(col, table_algo) # Create constraint only after rows are being populated table_algo = meta.tables["ALGORITHMS"] fk_constraint = ForeignKeyConstraint(["fk_category"], ["ALGORITHM_CATEGORIES.id"], ondelete="CASCADE", table=table_algo) fk_constraint.create() # Drop old table session = SA_SESSIONMAKER() session.execute(text("""DROP TABLE "ALGORITHM_GROUPS";""")) session.commit() except Exception, excep: LOGGER.exception(excep) finally: session.close() session = SA_SESSIONMAKER() try: session.execute( text("""ALTER TABLE "MAPPED_CONNECTIVITY_ANNOTATIONS"