def upgrade(migrate_engine): if migrate_engine.name == 'sqlite': return meta = MetaData(bind=migrate_engine) for table_name, ref, child in TABLES: table = Table(table_name, meta, autoload=True) column_name, ref_table_name, ref_column_name = ref column = table.c[column_name] ref_table = Table(ref_table_name, meta, autoload=True) ref_column = ref_table.c[ref_column_name] subq = select([ref_column]).where(ref_column != None) if child: # Dump and cleanup rows in child table first child_table_name, child_column_name, child_ref_column_name = child child_table = Table(child_table_name, meta, autoload=True) child_column = child_table.c[child_column_name] child_ref_column = table.c[child_ref_column_name] child_subq = select([child_ref_column]).where(~ column.in_(subq)) dump_cleanup_rows(migrate_engine, meta, child_table, child_column.in_(child_subq)) dump_cleanup_rows(migrate_engine, meta, table, ~ column.in_(subq)) params = {'columns': [column], 'refcolumns': [ref_column]} if migrate_engine.name == 'mysql': params['name'] = "_".join(('fk', table_name, column_name)) fkey = ForeignKeyConstraint(**params) fkey.create()
def upgrade(migrate_engine): metadata.bind = migrate_engine display_migration_details() # Load existing tables metadata.reflect() # Create the folder_id column try: Request_table = Table( "request", metadata, autoload=True ) except NoSuchTableError: Request_table = None log.debug( "Failed loading table request" ) if Request_table is not None: try: col = Column( "folder_id", Integer, index=True ) col.create( Request_table, index_name='ix_request_folder_id') assert col is Request_table.c.folder_id except Exception, e: log.debug( "Adding column 'folder_id' to request table failed: %s" % ( str( e ) ) ) try: LibraryFolder_table = Table( "library_folder", metadata, autoload=True ) except NoSuchTableError: LibraryFolder_table = None log.debug( "Failed loading table library_folder" ) # Add 1 foreign key constraint to the library_folder table if migrate_engine.name != 'sqlite' and Request_table is not None and LibraryFolder_table is not None: try: cons = ForeignKeyConstraint( [Request_table.c.folder_id], [LibraryFolder_table.c.id], name='request_folder_id_fk' ) # Create the constraint cons.create() except Exception, e: log.debug( "Adding foreign key constraint 'request_folder_id_fk' to table 'library_folder' failed: %s" % ( str( e ) ) )
def downgrade(migrate_engine): """Remove volume_type_rate_limit table.""" meta = MetaData() meta.bind = migrate_engine qos_specs = Table('quality_of_service_specs', meta, autoload=True) if migrate_engine.name == 'mysql': # NOTE(alanmeadows): MySQL Cannot drop column qos_specs_id # until the foreign key volumes_types_ibfk_1 is removed. We # remove the foreign key first, and then we drop the column. table = Table('volume_types', meta, autoload=True) ref_table = Table('volume_types', meta, autoload=True) params = {'columns': [table.c['qos_specs_id']], 'refcolumns': [ref_table.c['id']], 'name': 'volume_types_ibfk_1'} fkey = ForeignKeyConstraint(**params) fkey.drop() volume_types = Table('volume_types', meta, autoload=True) qos_specs_id = Column('qos_specs_id', String(36)) volume_types.drop_column(qos_specs_id) qos_specs.drop()
def _uc_rename(migrate_engine, upgrade=True): UC_DATA.update(UC_SPEC_DB_DATA[migrate_engine.name]) meta = MetaData(bind=migrate_engine) for table in UC_DATA: t = Table(table, meta, autoload=True) for columns, old_uc_name in UC_DATA[table]: new_uc_name = "uniq_{0}0{1}".format(table, "0".join(columns)) if table in constraint_names and migrate_engine.name == "mysql": instances = Table("instances", meta, autoload=True) ForeignKeyConstraint( columns=[t.c.instance_uuid], refcolumns=[instances.c.uuid], name=constraint_names[table] ).drop(engine=migrate_engine) if upgrade: old_name, new_name = old_uc_name, new_uc_name else: old_name, new_name = new_uc_name, old_uc_name utils.drop_unique_constraint(migrate_engine, table, old_name, *(columns)) UniqueConstraint(*columns, table=t, name=new_name).create() if table in constraint_names and migrate_engine.name == "mysql": ForeignKeyConstraint( columns=[t.c.instance_uuid], refcolumns=[instances.c.uuid], name=constraint_names[table] ).create(engine=migrate_engine)
def upgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine compute_nodes = Table('compute_nodes', meta, autoload=True) shadow_compute_nodes = Table('shadow_compute_nodes', meta, autoload=True) services = Table('services', meta, autoload=True) _correct_sqlite_unique_constraints(migrate_engine, compute_nodes) # Make the service_id column nullable compute_nodes.c.service_id.alter(nullable=True) shadow_compute_nodes.c.service_id.alter(nullable=True) for fk in compute_nodes.foreign_keys: if fk.column == services.c.id: # Delete the FK fkey = ForeignKeyConstraint(columns=[compute_nodes.c.service_id], refcolumns=[services.c.id], name=fk.name) fkey.drop() break for index in compute_nodes.indexes: if 'service_id' in index.columns: # Delete the nested index which was created by the FK index.drop() break
def upgrade(migrate_engine): meta = MetaData(bind=migrate_engine) port_table = Table('port', meta, autoload=True) subnet_table = Table('subnet', meta, autoload=True) subnet_id = Column('subnet_id', Integer) port_table.create_column(subnet_id) ports = port_table.select().execute() subnets = subnet_table.select().execute() subnets = dict( (netaddr.IPNetwork('%s/%s' % (net.ip, net.mask), version=4), net.id) for net in subnets) for port in ports: match = [ v for k, v in subnets.items() if netaddr.IPAddress(port.ip) in k ] if len(match) != 1: raise RuntimeError('More than one subnet matches %s' % port.ip) port_table.update().where(port_table.c.id == port.id).\ values(subnet_id=match[0]).execute() port_table.c.subnet_id.alter(nullable=False) fkey = ForeignKeyConstraint(columns=[port_table.c.subnet_id], refcolumns=[subnet_table.c.id]) fkey.create()
def downgrade(migrate_engine): if migrate_engine.name == 'sqlite': return meta = MetaData(bind=migrate_engine) load_tables = dict((table_name, Table(table_name, meta, autoload=True)) for table_name in TABLES) for table_name, indexes in INDEXES.items(): table = load_tables[table_name] # Save data that conflicted with FK. columns = [column.copy() for column in table.columns] table_dump = Table('dump027_' + table_name, meta, *columns) table_dump.create() for column, ref_table_name, ref_column_name in indexes: ref_table = load_tables[ref_table_name] subq = select([getattr(ref_table.c, ref_column_name)]) sql = utils.InsertFromSelect(table_dump, table.select().where( ~ getattr(table.c, column).in_(subq))) sql_del = table.delete().where( ~ getattr(table.c, column).in_(subq)) migrate_engine.execute(sql) migrate_engine.execute(sql_del) params = {'columns': [table.c[column]], 'refcolumns': [ref_table.c[ref_column_name]]} if migrate_engine.name == 'mysql': params['name'] = "_".join(('fk', table_name, column)) fkey = ForeignKeyConstraint(**params) fkey.create()
def downgrade(migrate_engine): if migrate_engine.name == 'sqlite': return meta = MetaData(bind=migrate_engine) load_tables = dict((table_name, Table(table_name, meta, autoload=True)) for table_name in TABLES) for table_name, indexes in INDEXES.items(): table = load_tables[table_name] for column, ref_table_name, ref_column_name in indexes: ref_table = load_tables[ref_table_name] params = { 'columns': [table.c[column]], 'refcolumns': [ref_table.c[ref_column_name]] } if migrate_engine.name == 'mysql': params['name'] = "_".join(('fk', table_name, column)) with migrate_engine.begin(): fkey = ForeignKeyConstraint(**params) fkey.drop() with migrate_engine.begin(): # Restore data that had been dropped. table_dump_name = 'dump_' + table_name table_dump = Table(table_dump_name, meta, autoload=True) sql = utils.InsertFromSelect(table, table_dump.select()) migrate_engine.execute(sql) table_dump.drop()
def upgrade(migrate_engine): if migrate_engine.name == 'sqlite': return meta = MetaData(bind=migrate_engine) load_tables = dict((table_name, Table(table_name, meta, autoload=True)) for table_name in TABLES) for table_name, indexes in INDEXES.items(): table = load_tables[table_name] # Save data that conflicted with FK. columns = [column.copy() for column in table.columns] table_dump = Table('dump_' + table_name, meta, *columns) table_dump.create() for column, ref_table_name, ref_column_name in indexes: ref_table = load_tables[ref_table_name] subq = select([getattr(ref_table.c, ref_column_name)]) sql = utils.InsertFromSelect( table_dump, table.select().where(~getattr(table.c, column).in_(subq))) sql_del = table.delete().where(~getattr(table.c, column).in_(subq)) migrate_engine.execute(sql) migrate_engine.execute(sql_del) params = { 'columns': [table.c[column]], 'refcolumns': [ref_table.c[ref_column_name]] } if migrate_engine.name == 'mysql': params['name'] = "_".join(('fk', table_name, column)) fkey = ForeignKeyConstraint(**params) fkey.create()
def downgrade(migrate_engine): # Operations to reverse the above upgrade go here. meta.bind = migrate_engine dialect = migrate_engine.url.get_dialect().name if dialect.startswith('sqlite'): return instances = Table('instances', meta, autoload=True) networks = Table('networks', meta, autoload=True) vifs = Table('virtual_interfaces', meta, autoload=True) fixed_ips = Table('fixed_ips', meta, autoload=True) floating_ips = Table('floating_ips', meta, autoload=True) try: ForeignKeyConstraint(columns=[fixed_ips.c.network_id], refcolumns=[networks.c.id]).create() ForeignKeyConstraint(columns=[fixed_ips.c.virtual_interface_id], refcolumns=[vifs.c.id]).create() ForeignKeyConstraint(columns=[fixed_ips.c.instance_id], refcolumns=[instances.c.id]).create() ForeignKeyConstraint(columns=[floating_ips.c.fixed_ip_id], refcolumns=[fixed_ips.c.id]).create() except Exception: logging.error(_("foreign key constraint couldn't be added")) raise
def upgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine virtual_interfaces = Table('virtual_interfaces', meta, autoload=True) instances = Table('instances', meta, autoload=True) uuid_column = Column('instance_uuid', String(36)) uuid_column.create(virtual_interfaces) try: virtual_interfaces.update().values( instance_uuid=select([instances.c.uuid], instances.c.id == virtual_interfaces.c.instance_id)).execute() except Exception: uuid_column.drop() raise fkeys = list(virtual_interfaces.c.instance_id.foreign_keys) if fkeys: try: fkey_name = fkeys[0].constraint.name ForeignKeyConstraint(columns=[virtual_interfaces.c.instance_id], refcolumns=[instances.c.id], name=fkey_name).drop() except Exception: LOG.error(_("foreign key constraint couldn't be removed")) raise virtual_interfaces.c.instance_id.drop() try: ForeignKeyConstraint(columns=[virtual_interfaces.c.instance_uuid], refcolumns=[instances.c.uuid]).create() except Exception: LOG.error(_("foreign key constraint couldn't be created")) raise
def downgrade(migrate_engine): if migrate_engine.name == 'sqlite': return meta = MetaData(bind=migrate_engine) for table_name, ref, child in TABLES: table = Table(table_name, meta, autoload=True) column_name, ref_table_name, ref_column_name = ref column = table.c[column_name] ref_table = Table(ref_table_name, meta, autoload=True) ref_column = ref_table.c[ref_column_name] params = {'columns': [column], 'refcolumns': [ref_column]} if migrate_engine.name == 'mysql': params['name'] = "_".join(('fk', table_name, column_name)) with migrate_engine.begin(): fkey = ForeignKeyConstraint(**params) fkey.drop() with migrate_engine.begin(): restore_rows(migrate_engine, meta, table_name) # compute_node_stats has a missing foreign key and is a child of # of compute_nodes. Don't bother processing it as a child since # only want to restore the dump once if child and table_name != 'compute_nodes': child_table_name, child_column_name, child_ref_column_name = child with migrate_engine.begin(): restore_rows(migrate_engine, meta, child_table_name)
def downgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine fixed_ips = Table('fixed_ips', meta, autoload=True) instances = Table('instances', meta, autoload=True) id_column = Column('instance_id', Integer, ForeignKey('instances.id')) id_column.create(fixed_ips) fkeys = list(fixed_ips.c.instance_uuid.foreign_keys) if fkeys: try: fkey_name = fkeys[0].constraint.name ForeignKeyConstraint(columns=[fixed_ips.c.instance_uuid], refcolumns=[instances.c.uuid], name=fkey_name).drop() except Exception: LOG.error(_("foreign key constraint couldn't be removed")) raise try: fixed_ips.update().values( instance_id=select([instances.c.id], instances.c.uuid == fixed_ips.c.instance_uuid)).execute() except Exception: id_column.drop() raise fixed_ips.c.instance_uuid.drop() try: ForeignKeyConstraint(columns=[fixed_ips.c.instance_id], refcolumns=[instances.c.id]).create() except Exception: LOG.error(_("foreign key constraint couldn't be created")) raise
def upgrade(migrate_engine): if migrate_engine.name == 'sqlite': return meta = MetaData(bind=migrate_engine) for table_name, ref, child in TABLES: table = Table(table_name, meta, autoload=True) column_name, ref_table_name, ref_column_name = ref column = table.c[column_name] ref_table = Table(ref_table_name, meta, autoload=True) ref_column = ref_table.c[ref_column_name] subq = select([ref_column]).where(ref_column != None) if child: # Dump and cleanup rows in child table first child_table_name, child_column_name, child_ref_column_name = child child_table = Table(child_table_name, meta, autoload=True) child_column = child_table.c[child_column_name] child_ref_column = table.c[child_ref_column_name] child_subq = select([child_ref_column]).where(~column.in_(subq)) dump_cleanup_rows(migrate_engine, meta, child_table, child_column.in_(child_subq)) dump_cleanup_rows(migrate_engine, meta, table, ~column.in_(subq)) params = {'columns': [column], 'refcolumns': [ref_column]} if migrate_engine.name == 'mysql': params['name'] = "_".join(('fk', table_name, column_name)) fkey = ForeignKeyConstraint(**params) fkey.create()
def upgrade(migrate_engine): meta = MetaData(bind=migrate_engine) port_table = Table('port', meta, autoload=True) subnet_table = Table('subnet', meta, autoload=True) subnet_id = Column('subnet_id', Integer) port_table.create_column(subnet_id) ports = port_table.select().execute() subnets = subnet_table.select().execute() subnets = dict((netaddr.IPNetwork('%s/%s' % (net.ip, net.mask), version=4), net.id) for net in subnets) for port in ports: match = [v for k, v in subnets.items() if netaddr.IPAddress(port.ip) in k] if len(match) != 1: raise RuntimeError('More than one subnet matches %s' % port.ip) port_table.update().where(port_table.c.id == port.id).\ values(subnet_id=match[0]).execute() port_table.c.subnet_id.alter(nullable=False) fkey = ForeignKeyConstraint(columns=[port_table.c.subnet_id], refcolumns=[subnet_table.c.id]) fkey.create()
def upgrade(migrate_engine): metadata.bind = migrate_engine display_migration_details() # Load existing tables metadata.reflect() try: User_table = Table( "galaxy_user", metadata, autoload=True ) except NoSuchTableError: User_table = None log.debug( "Failed loading table galaxy_user" ) if User_table is not None: try: col = Column( "form_values_id", Integer, index=True ) col.create( User_table, index_name='ix_user_form_values_id') assert col is User_table.c.form_values_id except Exception, e: log.debug( "Adding column 'form_values_id' to galaxy_user table failed: %s" % ( str( e ) ) ) try: FormValues_table = Table( "form_values", metadata, autoload=True ) except NoSuchTableError: FormValues_table = None log.debug( "Failed loading table form_values" ) if migrate_engine.name != 'sqlite': # Add 1 foreign key constraint to the form_values table if User_table is not None and FormValues_table is not None: try: cons = ForeignKeyConstraint( [User_table.c.form_values_id], [FormValues_table.c.id], name='user_form_values_id_fk' ) # Create the constraint cons.create() except Exception, e: log.debug( "Adding foreign key constraint 'user_form_values_id_fk' to table 'galaxy_user' failed: %s" % ( str( e ) ) )
def downgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine fk_name = None if migrate_engine.name == 'mysql': fk_name = 'reservations_ibfk_1' elif migrate_engine.name == 'postgresql': fk_name = 'reservations_usage_id_fkey' # NOTE: MySQL and PostgreSQL Cannot drop the quota_usages table # until the foreign key is removed. We remove the foreign key first, # and then we drop the table. table = Table('reservations', meta, autoload=True) ref_table = Table('reservations', meta, autoload=True) params = {'columns': [table.c['usage_id']], 'refcolumns': [ref_table.c['id']], 'name': fk_name} if fk_name: fkey = ForeignKeyConstraint(**params) fkey.drop() quota_classes = Table('quota_classes', meta, autoload=True) quota_classes.drop() quota_usages = Table('quota_usages', meta, autoload=True) quota_usages.drop() reservations = Table('reservations', meta, autoload=True) reservations.drop()
def upgrade(migrate_engine): metadata = MetaData(bind=migrate_engine) call_records = Table('call_records', metadata, autoload=True) tariff_configuration = Table('tariff_configuration', metadata, autoload=True) fk = ForeignKeyConstraint([call_records.c.applied_tariff_config], [tariff_configuration.c.id]) fk.drop() applied_config = Table( 'applied_config', metadata, Column('call_id', BigInteger, ForeignKey(call_records.c.id), primary_key=True), Column('config_id', BigInteger, ForeignKey(tariff_configuration.c.id), primary_key=True), Column('order', Integer, nullable=False, primary_key=True), Column('start_at', DateTime, nullable=False), Column('end_at', DateTime, nullable=False), Column('standard_charge', DECIMAL(8, 3), nullable=False), Column('call_time_charge', DECIMAL(8, 3), nullable=False)) applied_config.create()
def downgrade(migrate_engine): metadata.bind = migrate_engine # Load existing tables metadata.reflect() # NOTE: all new data added in the upgrade method is eliminated here via table drops # Drop 1 foreign key constraint from the metadata_file table try: MetadataFile_table = Table( "metadata_file", metadata, autoload=True ) except NoSuchTableError: MetadataFile_table = None log.debug( "Failed loading table metadata_file" ) try: LibraryDatasetDatasetAssociation_table = Table( "library_dataset_dataset_association", metadata, autoload=True ) except NoSuchTableError: LibraryDatasetDatasetAssociation_table = None log.debug( "Failed loading table library_dataset_dataset_association" ) if MetadataFile_table is not None and LibraryDatasetDatasetAssociation_table is not None: try: cons = ForeignKeyConstraint( [MetadataFile_table.c.lda_id], [LibraryDatasetDatasetAssociation_table.c.id], name='metadata_file_lda_id_fkey' ) # Drop the constraint cons.drop() except Exception, e: log.debug( "Dropping foreign key constraint 'metadata_file_lda_id_fkey' from table 'metadata_file' failed: %s" % ( str( e ) ) )
def downgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine dns_domains_old = Table('dns_domains', meta, autoload=True) dns_domains_old.rename(name='dns_domains_old') # NOTE(dprince): manually remove pkey/fkey for postgres if migrate_engine.name == "postgresql": sql = """ALTER TABLE ONLY dns_domains_old DROP CONSTRAINT dns_domains_pkey; ALTER TABLE ONLY dns_domains_old DROP CONSTRAINT dns_domains_project_id_fkey;""" migrate_engine.execute(sql) #Bind new metadata to avoid issues after the rename meta = MetaData() meta.bind = migrate_engine dns_domains_new = Table( 'dns_domains', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('deleted', Boolean), Column('domain', String(length=512), primary_key=True, nullable=False), Column('scope', String(length=255)), Column('availability_zone', String(length=255)), Column('project_id', String(length=255)), mysql_engine='InnoDB', mysql_charset='latin1', ) dns_domains_new.create() dns_domains_old = Table('dns_domains_old', meta, autoload=True) record_list = list(dns_domains_old.select().execute()) for rec in record_list: row = dns_domains_new.insert() row.execute({ 'created_at': rec['created_at'], 'updated_at': rec['updated_at'], 'deleted_at': rec['deleted_at'], 'deleted': rec['deleted'], 'domain': rec['domain'], 'scope': rec['scope'], 'availability_zone': rec['availability_zone'], 'project_id': rec['project_id'], }) dns_domains_old.drop() # NOTE(dprince): We can't easily add the MySQL Fkey on the downgrade # because projects is 'utf8' where dns_domains is 'latin1'. if migrate_engine.name != "mysql": projects = Table('projects', meta, autoload=True) fkey = ForeignKeyConstraint(columns=[dns_domains_new.c.project_id], refcolumns=[projects.c.id]) fkey.create()
def upgrade(migrate_engine): metadata.bind = migrate_engine print(__doc__) # Load existing tables metadata.reflect() # Create the job_to_output_library_dataset table try: JobToOutputLibraryDatasetAssociation_table.create() except Exception: log.exception("Creating job_to_output_library_dataset table failed.") # Create the library_folder_id column try: Job_table = Table("job", metadata, autoload=True) except NoSuchTableError: Job_table = None log.debug("Failed loading table job") if Job_table is not None: try: col = Column("library_folder_id", Integer, index=True) col.create(Job_table, index_name='ix_job_library_folder_id') assert col is Job_table.c.library_folder_id except Exception: log.exception( "Adding column 'library_folder_id' to job table failed.") try: LibraryFolder_table = Table("library_folder", metadata, autoload=True) except NoSuchTableError: LibraryFolder_table = None log.debug("Failed loading table library_folder") # Add 1 foreign key constraint to the job table if migrate_engine.name != 'sqlite': # Sqlite can't alter-table-add-foreign-key if Job_table is not None and LibraryFolder_table is not None: try: cons = ForeignKeyConstraint( [Job_table.c.library_folder_id], [LibraryFolder_table.c.id], name='job_library_folder_id_fk') # Create the constraint cons.create() except Exception: log.exception( "Adding foreign key constraint 'job_library_folder_id_fk' to table 'library_folder' failed." ) # Create the ix_dataset_state index try: Dataset_table = Table("dataset", metadata, autoload=True) except NoSuchTableError: Dataset_table = None log.debug("Failed loading table dataset") i = Index("ix_dataset_state", Dataset_table.c.state) try: i.create() except Exception: log.exception( "Adding index 'ix_dataset_state' to dataset table failed.")
def downgrade(migrate_engine): if migrate_engine.name == "sqlite": return meta = MetaData(bind=migrate_engine) storage_pools = Table("storage_pools", meta, autoload=True) storage_groups = Table("storage_groups", meta, autolaod=True) params = {"columns": [storage_pools.c.primary_storage_group_id], "refcolumns": [storage_groups.c.id]} fkey = ForeignKeyConstraint(**params) fkey.drop()
def downgrade(migrate_engine): if migrate_engine.name == 'sqlite': return meta = MetaData(bind=migrate_engine) storage_pools = Table('storage_pools', meta, autoload=True) storage_groups = Table('storage_groups', meta, autolaod=True) params = {'columns': [storage_pools.c.primary_storage_group_id], 'refcolumns': [storage_groups.c.id]} fkey = ForeignKeyConstraint(**params) fkey.drop()
def downgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine dns_domains_old = Table('dns_domains', meta, autoload=True) dns_domains_old.rename(name='dns_domains_old') # NOTE(dprince): manually remove pkey/fkey for postgres if migrate_engine.name == "postgresql": sql = """ALTER TABLE ONLY dns_domains_old DROP CONSTRAINT dns_domains_pkey; ALTER TABLE ONLY dns_domains_old DROP CONSTRAINT dns_domains_project_id_fkey;""" migrate_engine.execute(sql) #Bind new metadata to avoid issues after the rename meta = MetaData() meta.bind = migrate_engine dns_domains_new = Table('dns_domains', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('deleted', Boolean), Column('domain', String(length=512), primary_key=True, nullable=False), Column('scope', String(length=255)), Column('availability_zone', String(length=255)), Column('project_id', String(length=255)), mysql_engine='InnoDB', mysql_charset='latin1', ) dns_domains_new.create() dns_domains_old = Table('dns_domains_old', meta, autoload=True) record_list = list(dns_domains_old.select().execute()) for rec in record_list: row = dns_domains_new.insert() row.execute({'created_at': rec['created_at'], 'updated_at': rec['updated_at'], 'deleted_at': rec['deleted_at'], 'deleted': rec['deleted'], 'domain': rec['domain'], 'scope': rec['scope'], 'availability_zone': rec['availability_zone'], 'project_id': rec['project_id'], }) dns_domains_old.drop() # NOTE(dprince): We can't easily add the MySQL Fkey on the downgrade # because projects is 'utf8' where dns_domains is 'latin1'. if migrate_engine.name != "mysql": projects = Table('projects', meta, autoload=True) fkey = ForeignKeyConstraint(columns=[dns_domains_new.c.project_id], refcolumns=[projects.c.id]) fkey.create()
def upgrade(migrate_engine): meta = sa.MetaData(bind=migrate_engine) load_tables = dict((table_name, sa.Table(table_name, meta, autoload=True)) for table_name in TABLES) if migrate_engine.name != 'sqlite': for table_name, indexes in INDEXES.items(): table = load_tables[table_name] for column, ref_table_name, ref_column_name in indexes: ref_table = load_tables[ref_table_name] params = { 'columns': [table.c[column]], 'refcolumns': [ref_table.c[ref_column_name]] } if (migrate_engine.name == "mysql" and table_name != 'alarm_history'): params['name'] = "_".join(('fk', table_name, column)) elif (migrate_engine.name == "postgresql" and table_name == "sample"): # The fk contains the old table name params['name'] = "_".join(('meter', column, 'fkey')) fkey = ForeignKeyConstraint(**params) fkey.drop() sourceassoc = load_tables['sourceassoc'] if migrate_engine.name != 'sqlite': idx = sa.Index('idx_su', sourceassoc.c.source_id, sourceassoc.c.user_id) idx.drop(bind=migrate_engine) idx = sa.Index('idx_sp', sourceassoc.c.source_id, sourceassoc.c.project_id) idx.drop(bind=migrate_engine) params = {} if migrate_engine.name == "mysql": params = {'name': 'uniq_sourceassoc0sample_id'} uc = UniqueConstraint('sample_id', table=sourceassoc, **params) uc.create() params = {} if migrate_engine.name == "mysql": params = {'name': 'uniq_sourceassoc0sample_id0user_id'} uc = UniqueConstraint('sample_id', 'user_id', table=sourceassoc, **params) uc.drop() sourceassoc.c.user_id.drop() sourceassoc.c.project_id.drop() for table_name in TABLES_DROP: sa.Table(table_name, meta, autoload=True).drop()
def upgrade(migrate_engine): if migrate_engine.name == "sqlite": return meta = MetaData(bind=migrate_engine) storage_pools = Table("storage_pools", meta, autoload=True) storage_groups = Table("storage_groups", meta, autoload=True) params = {"columns": [storage_pools.c.primary_storage_group_id], "refcolumns": [storage_groups.c.id]} if migrate_engine.name == "mysql": params["name"] = "_".join(("storage_pool", "primary_storage_group_id", "fkey")) fkey = ForeignKeyConstraint(**params) fkey.create()
def _remove_foreign_key_constraints(engine, meta, table_name): inspector = reflection.Inspector.from_engine(engine) for fk in inspector.get_foreign_keys(table_name): source_table = Table(table_name, meta, autoload=True) target_table = Table(fk['referred_table'], meta, autoload=True) fkey = ForeignKeyConstraint( columns=_get_columns(source_table, fk['constrained_columns']), refcolumns=_get_columns(target_table, fk['referred_columns']), name=fk['name']) fkey.drop()
def downgrade(migrate_engine): # Operations to reverse the above upgrade go here. meta.bind = migrate_engine tt = Table('proc_SelectedHistoriesTable', meta, autoload=True) con = ForeignKeyConstraint([tt.c.selected_arar_id], [t.c.id]) con.drop() t.drop() th.drop() tt.c.selected_arar_id.drop()
def upgrade(migrate_engine): metadata.bind = migrate_engine display_migration_details() # Load existing tables metadata.reflect() # Create the job_to_output_library_dataset table try: JobToOutputLibraryDatasetAssociation_table.create() except Exception as e: print("Creating job_to_output_library_dataset table failed: %s" % str( e )) log.debug( "Creating job_to_output_library_dataset table failed: %s" % str( e ) ) # Create the library_folder_id column try: Job_table = Table( "job", metadata, autoload=True ) except NoSuchTableError: Job_table = None log.debug( "Failed loading table job" ) if Job_table is not None: try: col = Column( "library_folder_id", Integer, index=True ) col.create( Job_table, index_name='ix_job_library_folder_id') assert col is Job_table.c.library_folder_id except Exception as e: log.debug( "Adding column 'library_folder_id' to job table failed: %s" % ( str( e ) ) ) try: LibraryFolder_table = Table( "library_folder", metadata, autoload=True ) except NoSuchTableError: LibraryFolder_table = None log.debug( "Failed loading table library_folder" ) # Add 1 foreign key constraint to the job table if migrate_engine.name != 'sqlite': # Sqlite can't alter-table-add-foreign-key if Job_table is not None and LibraryFolder_table is not None: try: cons = ForeignKeyConstraint( [Job_table.c.library_folder_id], [LibraryFolder_table.c.id], name='job_library_folder_id_fk' ) # Create the constraint cons.create() except Exception as e: log.debug( "Adding foreign key constraint 'job_library_folder_id_fk' to table 'library_folder' failed: %s" % ( str( e ) ) ) # Create the ix_dataset_state index try: Dataset_table = Table( "dataset", metadata, autoload=True ) except NoSuchTableError: Dataset_table = None log.debug( "Failed loading table dataset" ) i = Index( "ix_dataset_state", Dataset_table.c.state ) try: i.create() except Exception as e: print(str(e)) log.debug( "Adding index 'ix_dataset_state' to dataset table failed: %s" % str( e ) )
def add_constraints(migrate_engine): if migrate_engine.name == 'sqlite': return meta = sqlalchemy.MetaData() meta.bind = migrate_engine user_table = sqlalchemy.Table('user', meta, autoload=True) proj_table = sqlalchemy.Table('project', meta, autoload=True) cred_table = sqlalchemy.Table('credential', meta, autoload=True) ForeignKeyConstraint(columns=[cred_table.c.user_id], refcolumns=[user_table.c.id]).create() ForeignKeyConstraint(columns=[cred_table.c.project_id], refcolumns=[proj_table.c.id]).create()
def upgrade(migrate_engine): if migrate_engine.name == 'sqlite': return meta = MetaData(bind=migrate_engine) storage_pools = Table('storage_pools', meta, autoload=True) storage_groups = Table('storage_groups', meta, autoload=True) params = {'columns': [storage_pools.c.primary_storage_group_id], 'refcolumns': [storage_groups.c.id]} if migrate_engine.name == 'mysql': params['name'] = "_".join(('storage_pool', 'primary_storage_group_id', 'fkey')) fkey = ForeignKeyConstraint(**params) fkey.create()
def upgrade(migrate_engine): if migrate_engine.name == 'sqlite': return meta = MetaData(bind=migrate_engine) storage_pools = Table('storage_pools', meta, autoload=True) storage_groups = Table('storage_groups', meta, autoload=True) params = {'columns': [storage_pools.c.primary_storage_group_id], 'refcolumns': [storage_groups.c.id]} if migrate_engine.name == 'mysql': params['name'] = "_".join(('storage_pools', 'primary_storage_group_ids', 'fkey')) fkey = ForeignKeyConstraint(**params) fkey.create()
def upgrade(migrate_engine): metadata.bind = migrate_engine display_migration_details() # Load existing tables metadata.reflect() # Add all of the new tables above # metadata.create_all() try: FormDefinitionCurrent_table.create() except Exception as e: log.debug("Creating form_definition_current table failed: %s" % str(e)) try: FormDefinition_table.create() except Exception as e: log.debug("Creating form_definition table failed: %s" % str(e)) # Add 1 foreign key constraint to the form_definition_current table if FormDefinitionCurrent_table is not None and FormDefinition_table is not None: try: cons = ForeignKeyConstraint( [FormDefinitionCurrent_table.c.latest_form_id], [FormDefinition_table.c.id], name='form_definition_current_latest_form_id_fk') # Create the constraint cons.create() except Exception as e: log.debug( "Adding foreign key constraint 'form_definition_current_latest_form_id_fk' to table 'form_definition_current' failed: %s" % (str(e))) try: FormValues_table.create() except Exception as e: log.debug("Creating form_values table failed: %s" % str(e)) try: RequestType_table.create() except Exception as e: log.debug("Creating request_type table failed: %s" % str(e)) try: Request_table.create() except Exception as e: log.debug("Creating request table failed: %s" % str(e)) try: Sample_table.create() except Exception as e: log.debug("Creating sample table failed: %s" % str(e)) try: SampleState_table.create() except Exception as e: log.debug("Creating sample_state table failed: %s" % str(e)) try: SampleEvent_table.create() except Exception as e: log.debug("Creating sample_event table failed: %s" % str(e))
def upgrade(migrate_engine): metadata.bind = migrate_engine print(__doc__) # Load existing tables metadata.reflect() # Create the folder_id column try: Request_table = Table("request", metadata, autoload=True) except NoSuchTableError: Request_table = None log.debug("Failed loading table request") if Request_table is not None: try: col = Column("folder_id", Integer, index=True) col.create(Request_table, index_name='ix_request_folder_id') assert col is Request_table.c.folder_id except Exception: log.exception("Adding column 'folder_id' to request table failed.") try: LibraryFolder_table = Table("library_folder", metadata, autoload=True) except NoSuchTableError: LibraryFolder_table = None log.debug("Failed loading table library_folder") # Add 1 foreign key constraint to the library_folder table if migrate_engine.name != 'sqlite' and Request_table is not None and LibraryFolder_table is not None: try: cons = ForeignKeyConstraint([Request_table.c.folder_id], [LibraryFolder_table.c.id], name='request_folder_id_fk') # Create the constraint cons.create() except Exception: log.exception("Adding foreign key constraint 'request_folder_id_fk' to table 'library_folder' failed.") # Create the type column in form_definition try: FormDefinition_table = Table("form_definition", metadata, autoload=True) except NoSuchTableError: FormDefinition_table = None log.debug("Failed loading table form_definition") if FormDefinition_table is not None: try: col = Column("type", TrimmedString(255), index=True) col.create(FormDefinition_table, index_name='ix_form_definition_type') assert col is FormDefinition_table.c.type except Exception: log.exception("Adding column 'type' to form_definition table failed.") try: col = Column("layout", JSONType()) col.create(FormDefinition_table) assert col is FormDefinition_table.c.layout except Exception: log.exception("Adding column 'layout' to form_definition table failed.")
def downgrade(migrate_engine): meta.bind = migrate_engine dialect = migrate_engine.url.get_dialect().name records = Table('records', meta, autoload=True) domains = Table('domains', meta, autoload=True) # add foreignkey if not sqlite if not dialect.startswith('sqlite'): ForeignKeyConstraint(columns=[records.c.domain_id], refcolumns=[domains.c.id]).drop() ForeignKeyConstraint(columns=[records.c.domain_id], refcolumns=[domains.c.id]).create()
def upgrade(migrate_engine): meta = sa.MetaData(bind=migrate_engine) load_tables = dict((table_name, sa.Table(table_name, meta, autoload=True)) for table_name in TABLES) if migrate_engine.name != 'sqlite': for table_name, indexes in INDEXES.items(): table = load_tables[table_name] for column, ref_table_name, ref_column_name in indexes: ref_table = load_tables[ref_table_name] params = {'columns': [table.c[column]], 'refcolumns': [ref_table.c[ref_column_name]]} if migrate_engine.name == "mysql" and \ table_name != 'alarm_history': params['name'] = "_".join(('fk', table_name, column)) elif migrate_engine.name == "postgresql" and \ table_name == "sample": # The fk contains the old table name params['name'] = "_".join(('meter', column, 'fkey')) fkey = ForeignKeyConstraint(**params) fkey.drop() sourceassoc = load_tables['sourceassoc'] if migrate_engine.name != 'sqlite': idx = sa.Index('idx_su', sourceassoc.c.source_id, sourceassoc.c.user_id) idx.drop(bind=migrate_engine) idx = sa.Index('idx_sp', sourceassoc.c.source_id, sourceassoc.c.project_id) idx.drop(bind=migrate_engine) params = {} if migrate_engine.name == "mysql": params = {'name': 'uniq_sourceassoc0sample_id'} uc = UniqueConstraint('sample_id', table=sourceassoc, **params) uc.create() params = {} if migrate_engine.name == "mysql": params = {'name': 'uniq_sourceassoc0sample_id0user_id'} uc = UniqueConstraint('sample_id', 'user_id', table=sourceassoc, **params) uc.drop() sourceassoc.c.user_id.drop() sourceassoc.c.project_id.drop() for table_name in TABLES_DROP: sa.Table(table_name, meta, autoload=True).drop()
def downgrade(migrate_engine): metadata = MetaData(bind=migrate_engine) applied_config = Table('applied_config', metadata, autoload=True) applied_config.drop() call_records = Table('call_records', metadata, autoload=True) tariff_configuration = Table('tariff_configuration', metadata, autoload=True) fk = ForeignKeyConstraint([call_records.c.applied_tariff_config], [tariff_configuration.c.id]) fk.create()
def downgrade(migrate_engine): meta = MetaData(migrate_engine) unique_name = Table('unique_name', meta, Column('id', Integer, primary_key=True), Column('key', String(255), unique=True)) trait_type = Table('trait_type', meta, autoload=True) trait = Table('trait', meta, autoload=True) # Create the UniqueName table, drop the foreign key constraint # to trait_type, drop the trait_type table, rename the # trait.trait_type column to traitname, re-add the dtype to # the trait table, and re-add the old foreign key constraint unique_name.create(migrate_engine) conn = migrate_engine.connect() sql = ("INSERT INTO unique_name " "SELECT trait_type.id, trait_type.desc " "FROM trait_type") conn.execute(sql) conn.close() params = { 'columns': [trait.c.trait_type_id], 'refcolumns': [trait_type.c.id] } if migrate_engine.name == 'mysql': params['name'] = "_".join(('fk', 'trait_type', 'id')) fkey = ForeignKeyConstraint(**params) fkey.drop() # Re-create the old columns in trait Column("name_id", Integer).create(trait) Column("t_type", Integer).create(trait) # copy data from trait_type.data_type into trait.t_type query = select([trait_type.c.id, trait_type.c.data_type]) for key, value in migration.paged(query): trait.update().where(trait.c.trait_type_id == key)\ .values({"t_type": value}).execute() # Move data from name_id column into trait_type_id column query = select([trait.c.id, trait.c.trait_type_id]) for key, value in migration.paged(query): trait.update().where(trait.c.id == key)\ .values({"name_id": value}).execute() # Add a foreign key to the unique_name table params = {'columns': [trait.c.name_id], 'refcolumns': [unique_name.c.id]} if migrate_engine.name == 'mysql': params['name'] = 'trait_ibfk_1' fkey = ForeignKeyConstraint(**params) fkey.create() trait.c.trait_type_id.drop() # Drop the trait_type table. It isn't needed anymore trait_type.drop()
def upgrade(migrate_engine): meta = MetaData(migrate_engine) trait_type = Table( 'trait_type', meta, Column('id', Integer, primary_key=True), Column('desc', String(255)), Column('data_type', Integer), UniqueConstraint('desc', 'data_type', name="tt_unique") ) trait = Table('trait', meta, autoload=True) unique_name = Table('unique_name', meta, autoload=True) trait_type.create(migrate_engine) # Trait type extracts data from Trait and Unique name. # We take all trait names from Unique Name, and data types # from Trait. We then remove dtype and name from trait, and # remove the name field. conn = migrate_engine.connect() sql = ("INSERT INTO trait_type " "SELECT unique_name.id, unique_name.key, trait.t_type FROM trait " "INNER JOIN unique_name " "ON trait.name_id = unique_name.id " "GROUP BY unique_name.id, unique_name.key, trait.t_type") conn.execute(sql) conn.close() # Now we need to drop the foreign key constraint, rename # the trait.name column, and re-add a new foreign # key constraint params = {'columns': [trait.c.name_id], 'refcolumns': [unique_name.c.id]} if migrate_engine.name == 'mysql': params['name'] = "trait_ibfk_1" # foreign key to the unique name table fkey = ForeignKeyConstraint(**params) fkey.drop() Column('trait_type_id', Integer).create(trait) # Move data from name_id column into trait_type_id column query = select([trait.c.id, trait.c.name_id]) for key, value in migration.paged(query): trait.update().where(trait.c.id == key)\ .values({"trait_type_id": value}).execute() trait.c.name_id.drop() params = {'columns': [trait.c.trait_type_id], 'refcolumns': [trait_type.c.id]} if migrate_engine.name == 'mysql': params['name'] = "_".join(('fk', 'trait_type', 'id')) fkey = ForeignKeyConstraint(**params) fkey.create() # Drop the t_type column to data_type. trait.c.t_type.drop() # Finally, drop the unique_name table - we don't need it # anymore. unique_name.drop()
def upgrade(migrate_engine): if migrate_engine.name == "sqlite": return meta = MetaData(bind=migrate_engine) load_tables = dict((table_name, Table(table_name, meta, autoload=True)) for table_name in TABLES) for table_name, indexes in INDEXES.items(): table = load_tables[table_name] for column, ref_table_name, ref_column_name in indexes: ref_table = load_tables[ref_table_name] params = {"columns": [table.c[column]], "refcolumns": [ref_table.c[ref_column_name]]} if migrate_engine.name == "mysql": params["name"] = "_".join(("fk", table_name, column)) fkey = ForeignKeyConstraint(**params) fkey.drop()
def upgrade(migrate_engine): # Upgrade operations go here. Don't create your own engine; bind # migrate_engine to your metadata meta.bind = migrate_engine t.create() th.create() tt = Table('proc_SelectedHistoriesTable', meta, autoload=True) c = Column('selected_arar_id', Integer) c.create(tt) # a = tt.c.selected_arar con = ForeignKeyConstraint([c], [t.c.id]) con.create()
def downgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine compute_nodes = Table('compute_nodes', meta, autoload=True) shadow_compute_nodes = Table('shadow_compute_nodes', meta, autoload=True) services = Table('services', meta, autoload=True) _correct_sqlite_unique_constraints(migrate_engine, compute_nodes) # Make the service_id field not nullable # NOTE(sbauza): Beyond the point of this commit, service_id will not be # updated, but previous commits still do. We can tho safely go back to # a state where all the compute nodes are providing this field. compute_nodes.c.service_id.alter(nullable=False) shadow_compute_nodes.c.service_id.alter(nullable=False) # Adding only FK if not existing yet fkeys = {fk.parent.name: fk.column for fk in compute_nodes.foreign_keys} if 'service_id' in fkeys and fkeys['service_id'] == services.c.id: return # NOTE(sbauza): See 216_havana.py for the whole logic if migrate_engine.name == 'postgresql': # PostgreSQL names things like it wants (correct and compatible!) fkey = ForeignKeyConstraint(columns=[compute_nodes.c.service_id], refcolumns=[services.c.id]) fkey.create() else: # For MySQL we name our fkeys explicitly so they match Havana fkey = ForeignKeyConstraint(columns=[compute_nodes.c.service_id], refcolumns=[services.c.id], name='fk_compute_nodes_service_id') fkey.create()
def downgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine fk_name = None if migrate_engine.name == 'mysql': fk_name = 'reservations_ibfk_1' elif migrate_engine.name == 'postgresql': fk_name = 'reservations_usage_id_fkey' # NOTE: MySQL and PostgreSQL Cannot drop the quota_usages table # until the foreign key is removed. We remove the foreign key first, # and then we drop the table. table = Table('reservations', meta, autoload=True) ref_table = Table('reservations', meta, autoload=True) params = { 'columns': [table.c['usage_id']], 'refcolumns': [ref_table.c['id']], 'name': fk_name } if fk_name: try: fkey = ForeignKeyConstraint(**params) fkey.drop() except Exception: msg = _LE("Dropping foreign key %s failed.") LOG.error(msg, fk_name) quota_classes = Table('quota_classes', meta, autoload=True) try: quota_classes.drop() except Exception: LOG.error(_LE("quota_classes table not dropped")) raise quota_usages = Table('quota_usages', meta, autoload=True) try: quota_usages.drop() except Exception: LOG.error(_LE("quota_usages table not dropped")) raise reservations = Table('reservations', meta, autoload=True) try: reservations.drop() except Exception: LOG.error(_LE("reservations table not dropped")) raise
def upgrade(migrate_engine): meta = MetaData(bind=migrate_engine) event_type = Table( 'event_type', meta, Column('id', Integer, primary_key=True), Column('desc', String(255), unique=True), mysql_engine='InnoDB', mysql_charset='utf8', ) event_type.create() event = Table('event', meta, autoload=True) unique_name = Table('unique_name', meta, autoload=True) # Event type is a specialization of Unique name, so # we insert into the event_type table all the distinct # unique names from the event.unique_name field along # with the key from the unique_name table, and # then rename the event.unique_name field to event.event_type conn = migrate_engine.connect() sql = ("INSERT INTO event_type " "SELECT unique_name.id, unique_name.key FROM event " "INNER JOIN unique_name " "ON event.unique_name_id = unique_name.id " "GROUP BY unique_name.id") conn.execute(sql) conn.close() # Now we need to drop the foreign key constraint, rename # the event.unique_name column, and re-add a new foreign # key constraint params = { 'columns': [event.c.unique_name_id], 'refcolumns': [unique_name.c.id] } if migrate_engine.name == 'mysql': params['name'] = "event_ibfk_1" fkey = ForeignKeyConstraint(**params) fkey.drop() Column('event_type_id', Integer).create(event) # Move data from unique_name_id column into event_type_id column # and delete the entry from the unique_name table query = select([event.c.id, event.c.unique_name_id]) for key, value in migration.paged(query): event.update().where(event.c.id == key)\ .values({"event_type_id": value}).execute() unique_name.delete()\ .where(unique_name.c.id == key).execute() params = { 'columns': [event.c.event_type_id], 'refcolumns': [event_type.c.id] } if migrate_engine.name == 'mysql': params['name'] = "_".join(('fk', 'event_type', 'id')) fkey = ForeignKeyConstraint(**params) fkey.create() event.c.unique_name_id.drop()
def downgrade(migrate_engine): meta = MetaData(bind=migrate_engine) load_tables = dict((table_name, Table(table_name, meta, autoload=True)) for table_name in TABLES) for table_name, indexes in INDEXES.items(): table = load_tables[table_name] for column, ref_table_name, ref_column_name in indexes: ref_table = load_tables[ref_table_name] params = {'columns': [table.c[column]], 'refcolumns': [ref_table.c[ref_column_name]]} if migrate_engine.name == 'mysql': params['name'] = "_".join(('fk', table_name, column)) fkey = ForeignKeyConstraint(**params) fkey.drop()
def upgrade(migrate_engine): metadata.bind = migrate_engine display_migration_details() # Load existing tables metadata.reflect() # Add all of the new tables above # metadata.create_all() try: FormDefinitionCurrent_table.create() except Exception as e: log.debug( "Creating form_definition_current table failed: %s" % str( e ) ) try: FormDefinition_table.create() except Exception as e: log.debug( "Creating form_definition table failed: %s" % str( e ) ) # Add 1 foreign key constraint to the form_definition_current table if FormDefinitionCurrent_table is not None and FormDefinition_table is not None: try: cons = ForeignKeyConstraint( [FormDefinitionCurrent_table.c.latest_form_id], [FormDefinition_table.c.id], name='form_definition_current_latest_form_id_fk' ) # Create the constraint cons.create() except Exception as e: log.debug( "Adding foreign key constraint 'form_definition_current_latest_form_id_fk' to table 'form_definition_current' failed: %s" % ( str( e ) ) ) try: FormValues_table.create() except Exception as e: log.debug( "Creating form_values table failed: %s" % str( e ) ) try: RequestType_table.create() except Exception as e: log.debug( "Creating request_type table failed: %s" % str( e ) ) try: Request_table.create() except Exception as e: log.debug( "Creating request table failed: %s" % str( e ) ) try: Sample_table.create() except Exception as e: log.debug( "Creating sample table failed: %s" % str( e ) ) try: SampleState_table.create() except Exception as e: log.debug( "Creating sample_state table failed: %s" % str( e ) ) try: SampleEvent_table.create() except Exception as e: log.debug( "Creating sample_event table failed: %s" % str( e ) )
def downgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine fk_name = None if migrate_engine.name == 'mysql': fk_name = 'reservations_ibfk_1' elif migrate_engine.name == 'postgresql': fk_name = 'reservations_usage_id_fkey' # NOTE: MySQL and PostgreSQL Cannot drop the quota_usages table # until the foreign key is removed. We remove the foreign key first, # and then we drop the table. table = Table('reservations', meta, autoload=True) ref_table = Table('reservations', meta, autoload=True) params = {'columns': [table.c['usage_id']], 'refcolumns': [ref_table.c['id']], 'name': fk_name} if fk_name: try: fkey = ForeignKeyConstraint(**params) fkey.drop() except Exception: msg = _LE("Dropping foreign key %s failed.") LOG.error(msg, fk_name) quota_classes = Table('quota_classes', meta, autoload=True) try: quota_classes.drop() except Exception: LOG.error(_LE("quota_classes table not dropped")) raise quota_usages = Table('quota_usages', meta, autoload=True) try: quota_usages.drop() except Exception: LOG.error(_LE("quota_usages table not dropped")) raise reservations = Table('reservations', meta, autoload=True) try: reservations.drop() except Exception: LOG.error(_LE("reservations table not dropped")) raise
def downgrade(migrate_engine): meta = MetaData(bind=migrate_engine) meta.bind = migrate_engine port_table = Table('port', meta, autoload=True) subnet_table = Table('subnet', meta, autoload=True) for fk in port_table.foreign_keys: if fk.column == subnet_table.c.id: # Delete the FK fkey = ForeignKeyConstraint(columns=[port_table.c.subnet_id], refcolumns=[subnet_table.c.id], name=fk.name) fkey.drop() break port_table.c.subnet_id.drop()
def upgrade(migrate_engine): if migrate_engine.name == 'ibm_db_sa': # create the foreign keys metadata = MetaData(bind=migrate_engine) for values in DB2_FKEYS: # NOTE(mriedem): We have to load all of the tables in the same # MetaData object for the ForeignKey object to work, so we just # load up the Column objects here as well dynamically. params = dict(name=values['name']) table = Table(values['table'], metadata, autoload=True) params['table'] = table params['columns'] = [table.c[col] for col in values['columns']] params['refcolumns'] = _get_refcolumns(metadata, values['refcolumns']) fkey = ForeignKeyConstraint(**params) fkey.create()
def upgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine dialect = migrate_engine.url.get_dialect().name block_device_mapping = Table('block_device_mapping', meta, autoload=True) instances = Table('instances', meta, autoload=True) uuid_column = Column('instance_uuid', String(36)) uuid_column.create(block_device_mapping) try: block_device_mapping.update().values(instance_uuid=select( [instances.c.uuid], instances.c.id == block_device_mapping.c.instance_id)).execute() except Exception: uuid_column.drop() raise fkeys = list(block_device_mapping.c.instance_id.foreign_keys) if fkeys: try: fkey_name = fkeys[0].constraint.name ForeignKeyConstraint(columns=[block_device_mapping.c.instance_id], refcolumns=[instances.c.id], name=fkey_name).drop() except Exception: LOG.error(_("foreign key constraint couldn't be removed")) raise block_device_mapping.c.instance_id.drop()
def downgrade(migrate_engine): meta = sqlalchemy.MetaData(bind=migrate_engine) stack = sqlalchemy.Table('stack', meta, autoload=True) if migrate_engine.name == 'sqlite': _downgrade_sqlite(migrate_engine) else: raw_template = sqlalchemy.Table('raw_template', meta, autoload=True) fkey = ForeignKeyConstraint(columns=[stack.c.prev_raw_template_id], refcolumns=[raw_template.c.id], name='prev_raw_template_ref') fkey.drop() stack.c.prev_raw_template_id.drop() stack.c.current_traversal.drop() stack.c.current_deps.drop()
def upgrade(migrate_engine): """Add UUID primary key column to encryption.""" meta = MetaData() meta.bind = migrate_engine encryptions = Table('encryption', meta, autoload=True) encryption_id_column_kwargs = {} if migrate_engine.name == 'ibm_db_sa': # NOTE(junxiebj): DB2 10.5 doesn't support primary key # constraints over nullable columns, so we have to # make the column non-nullable in the DB2 case. encryption_id_column_kwargs['nullable'] = False encryption_id = Column('encryption_id', String(36), **encryption_id_column_kwargs) encryptions.create_column(encryption_id) encryption_items = list(encryptions.select().execute()) for item in encryption_items: encryptions.update().\ where(encryptions.c.volume_type_id == item['volume_type_id']).\ values(encryption_id=str(uuid.uuid4())).execute() # NOTE (e0ne): need to drop FK first for MySQL if migrate_engine.name == 'mysql': ref_table = Table('volume_types', meta, autoload=True) params = {'columns': [encryptions.c['volume_type_id']], 'refcolumns': [ref_table.c['id']], 'name': 'encryption_ibfk_1'} volume_type_fk = ForeignKeyConstraint(**params) volume_type_fk.drop() try: volume_type_pk = PrimaryKeyConstraint('volume_type_id', table=encryptions) volume_type_pk.drop() except Exception: # NOTE (e0ne): SQLite doesn't support 'drop constraint' statament if migrate_engine.url.get_dialect().name.startswith('sqlite'): pass else: raise pkey = PrimaryKeyConstraint(encryptions.columns.encryption_id) pkey.create()
def downgrade(migrate_engine): print("037 downgrade") meta = MetaData() meta.bind = migrate_engine versions = Table('versions', meta, autoload=True) targets = Table('targets', meta, autoload=True) params = {'columns': [versions.c['target_id']], 'refcolumns': [targets.c['id']], 'name': 'versions_ibfk_1'} foreign = ForeignKeyConstraint(**params) foreign.drop() versions.drop_column(target_id) tables = [define_target_status_table(meta), define_targets_table(meta), define_host_versions_table(meta)] drop_tables(tables)
def upgrade(migrate_engine): if migrate_engine.name == "sqlite": return meta = MetaData(bind=migrate_engine) load_tables = dict((table_name, Table(table_name, meta, autoload=True)) for table_name in TABLES) for table_name, indexes in INDEXES.items(): table = load_tables[table_name] for column, ref_table_name, ref_column_name in indexes: ref_table = load_tables[ref_table_name] subq = select([getattr(ref_table.c, ref_column_name)]) sql_del = table.delete().where(~getattr(table.c, column).in_(subq)) migrate_engine.execute(sql_del) params = {"columns": [table.c[column]], "refcolumns": [ref_table.c[ref_column_name]]} if migrate_engine.name == "mysql": params["name"] = "_".join(("fk", table_name, column)) fkey = ForeignKeyConstraint(**params) fkey.create()