def upgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine flavors = Table('flavors', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('name', String(length=255), nullable=False), Column('id', Integer, primary_key=True, nullable=False), Column('memory_mb', Integer, nullable=False), Column('vcpus', Integer, nullable=False), Column('swap', Integer, nullable=False), Column('vcpu_weight', Integer), Column('flavorid', String(length=255), nullable=False), Column('rxtx_factor', Float), Column('root_gb', Integer), Column('ephemeral_gb', Integer), Column('disabled', Boolean), Column('is_public', Boolean), UniqueConstraint("flavorid", name="uniq_flavors0flavorid"), UniqueConstraint("name", name="uniq_flavors0name"), mysql_engine='InnoDB', mysql_charset='utf8') flavors.create(checkfirst=True) flavor_extra_specs = Table( 'flavor_extra_specs', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('flavor_id', Integer, nullable=False), Column('key', String(length=255), nullable=False), Column('value', String(length=255)), UniqueConstraint('flavor_id', 'key', name='uniq_flavor_extra_specs0flavor_id0key'), Index('flavor_extra_specs_flavor_id_key_idx', 'flavor_id', 'key'), ForeignKeyConstraint(columns=['flavor_id'], refcolumns=[flavors.c.id]), mysql_engine='InnoDB', mysql_charset='utf8') flavor_extra_specs.create(checkfirst=True) flavor_projects = Table( 'flavor_projects', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('flavor_id', Integer, nullable=False), Column('project_id', String(length=255), nullable=False), UniqueConstraint('flavor_id', 'project_id', name='uniq_flavor_projects0flavor_id0project_id'), ForeignKeyConstraint(columns=['flavor_id'], refcolumns=[flavors.c.id]), mysql_engine='InnoDB', mysql_charset='utf8') flavor_projects.create(checkfirst=True)
def define_hosts_table(meta): failover_segments = Table('failover_segments', meta, autoload=True) hosts = Table('hosts', meta, Column('created_at', DateTime, nullable=False), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('deleted', Integer), Column('id', Integer, primary_key=True, nullable=False), Column('uuid', String(36), nullable=False), Column('name', String(255), nullable=False), Column('reserved', Boolean, default=False), Column('type', String(255), nullable=False), Column('control_attributes', Text, nullable=False), Column('failover_segment_id', String(36), nullable=False), Column('on_maintenance', Boolean, default=False), UniqueConstraint('failover_segment_id', 'name', 'deleted', name='uniq_host0name0deleted'), UniqueConstraint('uuid', name='uniq_host0uuid'), ForeignKeyConstraint(columns=['failover_segment_id'], refcolumns=[failover_segments.c.uuid], name='fk_failover_segments_uuid'), Index('hosts_type_idx', 'type'), mysql_engine='InnoDB', mysql_charset='utf8', extend_existing=True) return hosts
def upgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine aggregates = Table('aggregates', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('uuid', String(length=36)), Column('name', String(length=255)), Index('aggregate_uuid_idx', 'uuid'), UniqueConstraint('name', name='uniq_aggregate0name'), mysql_engine='InnoDB', mysql_charset='utf8') aggregates.create(checkfirst=True) aggregate_hosts = Table('aggregate_hosts', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('host', String(length=255)), Column('aggregate_id', Integer, ForeignKey('aggregates.id'), nullable=False), UniqueConstraint( 'host', 'aggregate_id', name='uniq_aggregate_hosts0host0aggregate_id'), mysql_engine='InnoDB', mysql_charset='utf8') aggregate_hosts.create(checkfirst=True) aggregate_metadata = Table( 'aggregate_metadata', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('aggregate_id', Integer, ForeignKey('aggregates.id'), nullable=False), Column('key', String(length=255), nullable=False), Column('value', String(length=255), nullable=False), UniqueConstraint('aggregate_id', 'key', name='uniq_aggregate_metadata0aggregate_id0key'), Index('aggregate_metadata_key_idx', 'key'), mysql_engine='InnoDB', mysql_charset='utf8') aggregate_metadata.create(checkfirst=True)
def _pre_upgrade_273(self, engine): if engine.name != 'sqlite': return # Drop a variety of unique constraints to ensure that the script # properly readds them back for table_name, constraint_name in [ ('compute_nodes', 'uniq_compute_nodes0' 'host0hypervisor_hostname'), ('fixed_ips', 'uniq_fixed_ips0address0deleted'), ('instance_info_caches', 'uniq_instance_info_caches0' 'instance_uuid'), ('instance_type_projects', 'uniq_instance_type_projects0' 'instance_type_id0project_id0' 'deleted'), ('pci_devices', 'uniq_pci_devices0compute_node_id0' 'address0deleted'), ('virtual_interfaces', 'uniq_virtual_interfaces0' 'address0deleted')]: table = oslodbutils.get_table(engine, table_name) constraints = [c for c in table.constraints if c.name == constraint_name] for cons in constraints: # Need to use sqlalchemy-migrate UniqueConstraint cons = UniqueConstraint(*[c.name for c in cons.columns], name=cons.name, table=table) cons.drop()
def _pre_upgrade_273(self, engine): if engine.name != 'sqlite': return # Drop a variety of unique constraints to ensure that the script # properly readds them back for table_name, constraint_name in [ ('compute_nodes', 'uniq_compute_nodes0' 'host0hypervisor_hostname'), ('fixed_ips', 'uniq_fixed_ips0address0deleted'), ('instance_info_caches', 'uniq_instance_info_caches0' 'instance_uuid'), ('instance_type_projects', 'uniq_instance_type_projects0' 'instance_type_id0project_id0' 'deleted'), ('pci_devices', 'uniq_pci_devices0compute_node_id0' 'address0deleted'), ('virtual_interfaces', 'uniq_virtual_interfaces0' 'address0deleted') ]: table = oslodbutils.get_table(engine, table_name) constraints = [ c for c in table.constraints if c.name == constraint_name ] for cons in constraints: # Need to use sqlalchemy-migrate UniqueConstraint cons = UniqueConstraint(*[c.name for c in cons.columns], name=cons.name, table=table) cons.drop()
def upgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine projects = Table('projects', meta, Column('id', Integer, primary_key=True, nullable=False, autoincrement=True), Column('external_id', String(length=255), nullable=False), Column('created_at', DateTime), Column('updated_at', DateTime), UniqueConstraint('external_id', name='uniq_projects0external_id'), mysql_engine='InnoDB', mysql_charset='latin1') projects.create(checkfirst=True) users = Table('users', meta, Column('id', Integer, primary_key=True, nullable=False, autoincrement=True), Column('external_id', String(length=255), nullable=False), Column('created_at', DateTime), Column('updated_at', DateTime), UniqueConstraint('external_id', name='uniq_users0external_id'), mysql_engine='InnoDB', mysql_charset='latin1') users.create(checkfirst=True) consumers = Table('consumers', meta, autoload=True) project_id_col = consumers.c.project_id user_id_col = consumers.c.user_id # NOTE(jaypipes): For PostgreSQL, we can't do col.alter(type=Integer) # because NVARCHAR and INTEGER are not compatible, so we need to do this # manual ALTER TABLE ... USING approach. if migrate_engine.name == 'postgresql': migrate_engine.execute("ALTER TABLE consumers ALTER COLUMN project_id " "TYPE INTEGER USING project_id::integer") migrate_engine.execute("ALTER TABLE consumers ALTER COLUMN user_id " "TYPE INTEGER USING user_id::integer") else: project_id_col.alter(type=Integer) user_id_col.alter(type=Integer)
def upgrade(migrate_engine): meta = sqlalchemy.MetaData() meta.bind = migrate_engine hosts_table = Table('hosts', meta, autoload=True) failover_segments = Table('failover_segments', meta, autoload=True) # NOTE(Dinesh_Bhor) We need to drop foreign keys first because unique # constraints that we want to delete depend on them. So drop the fk and # recreate it again after unique constraint deletion. cons_fk = ForeignKeyConstraint([hosts_table.c.failover_segment_id], [failover_segments.c.uuid], name="fk_failover_segments_uuid") cons_fk.drop(engine=migrate_engine) cons_unique = UniqueConstraint('failover_segment_id', 'name', 'deleted', name='uniq_host0name0deleted', table=hosts_table) cons_unique.drop(engine=migrate_engine) # Create an updated unique constraint updated_cons_unique = UniqueConstraint('name', 'deleted', name='uniq_host0name0deleted', table=hosts_table) cons_fk.create() updated_cons_unique.create()
def upgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine for prefix in ('', 'shadow_'): table = Table(prefix + 'block_device_mapping', meta, autoload=True) if not hasattr(table.c, 'uuid'): new_column = Column('uuid', String(36), nullable=True) table.create_column(new_column) if prefix == '': # Only add the constraint on the non-shadow table... con = UniqueConstraint('uuid', table=table, name="uniq_block_device_mapping0uuid") con.create(migrate_engine)
def downgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine # Remove the new column compute_nodes = Table("compute_nodes", meta, autoload=True) shadow_compute_nodes = Table("shadow_compute_nodes", meta, autoload=True) ukey = UniqueConstraint( "host", "hypervisor_hostname", table=compute_nodes, name="uniq_compute_nodes0host0hypervisor_hostname" ) ukey.drop() compute_nodes.drop_column("host") shadow_compute_nodes.drop_column("host")
def upgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine auth_tokens = Table('console_auth_tokens', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('token_hash', String(255), nullable=False), Column('console_type', String(255), nullable=False), Column('host', String(255), nullable=False), Column('port', Integer, nullable=False), Column('internal_access_path', String(255)), Column('instance_uuid', String(36), nullable=False), Column('expires', Integer, nullable=False), Index('console_auth_tokens_instance_uuid_idx', 'instance_uuid'), Index('console_auth_tokens_host_expires_idx', 'host', 'expires'), Index('console_auth_tokens_token_hash_idx', 'token_hash'), UniqueConstraint('token_hash', name='uniq_console_auth_tokens0token_hash'), mysql_engine='InnoDB', mysql_charset='utf8' ) auth_tokens.create(checkfirst=True)
def upgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine cell_mappings = Table('cell_mappings', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('uuid', String(length=36), nullable=False), Column('name', String(length=255)), Column('transport_url', Text()), Column('database_connection', Text()), UniqueConstraint('uuid', name='uniq_cell_mappings0uuid'), mysql_engine='InnoDB', mysql_charset='utf8') # NOTE(mriedem): DB2 creates an index when a unique constraint is created # so trying to add a second index on the uuid column will fail with # error SQL0605W, so omit the index in the case of DB2. if migrate_engine.name != 'ibm_db_sa': Index('uuid_idx', cell_mappings.c.uuid) cell_mappings.create(checkfirst=True)
def upgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine consumers = Table('consumers', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('id', Integer, primary_key=True, nullable=False, autoincrement=True), Column('uuid', String(length=36), nullable=False), Column('project_id', String(length=255), nullable=False), Column('user_id', String(length=255), nullable=False), Index('consumers_project_id_uuid_idx', 'project_id', 'uuid'), Index('consumers_project_id_user_id_uuid_idx', 'project_id', 'user_id', 'uuid'), UniqueConstraint('uuid', name='uniq_consumers0uuid'), mysql_engine='InnoDB', mysql_charset='latin1') consumers.create(checkfirst=True)
class Modules(db.Model): __tablename__ = 'modules' id = db.Column(db.Integer, primary_key=True) own = db.Column(db.String(16)) platform = db.Column(db.String(32)) feature = db.Column(db.String(32)) package = db.Column(db.String(64)) mould = db.Column(db.String(32)) url = db.Column(db.String(1024)) version = db.Column(db.String(16)) file_md5 = db.Column(db.String(32)) valid = db.Column(db.Boolean, default=True) timestamp = db.Column(db.Integer) __table_args__ = (UniqueConstraint('own', 'platform', 'feature', name='_own_platform_feature_uc'),) def update(self, new_modules): self.own = new_modules.own self.platform = new_modules.platform self.feature = new_modules.feature self.package = new_modules.package self.mould = new_modules.mould self.url = new_modules.url self.version = new_modules.version self.file_md5 = new_modules.file_md5 self.valid = new_modules.valid self.timestamp = new_modules.timestamp db.session.commit() def to_dict(self): kws = copy.deepcopy(self.__dict__) del kws['_sa_instance_state'] return kws
def downgrade(self, migrate_engine): UniqueConstraint('uuid', table=db_utils.get_table(migrate_engine, 'instances'), name='uniq_instances0uuid').drop() for table_name in ('instances', 'shadow_instances'): table = db_utils.get_table(migrate_engine, table_name) table.columns.uuid.alter(nullable=True)
def upgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine build_requests = Table('build_requests', meta, autoload=True) columns_to_add = [ ('instance_uuid', Column('instance_uuid', String(length=36))), ('instance', Column('instance', Text())), ] for (col_name, column) in columns_to_add: if not hasattr(build_requests.c, col_name): build_requests.create_column(column) for index in build_requests.indexes: if [c.name for c in index.columns] == ['instance_uuid']: break else: index = Index('build_requests_instance_uuid_idx', build_requests.c.instance_uuid) index.create() inspector = reflection.Inspector.from_engine(migrate_engine) constrs = inspector.get_unique_constraints('build_requests') constr_names = [constr['name'] for constr in constrs] if 'uniq_build_requests0instance_uuid' not in constr_names: UniqueConstraint('instance_uuid', table=build_requests, name='uniq_build_requests0instance_uuid').create()
def upgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine cell_mappings = Table('cell_mappings', meta, autoload=True) instance_mappings = Table( 'instance_mappings', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('instance_uuid', String(length=36), nullable=False), Column('cell_id', Integer, nullable=False), Column('project_id', String(length=255), nullable=False), UniqueConstraint('instance_uuid', name='uniq_instance_mappings0instance_uuid'), Index('project_id_idx', 'project_id'), ForeignKeyConstraint(columns=['cell_id'], refcolumns=[cell_mappings.c.id]), mysql_engine='InnoDB', mysql_charset='utf8') # NOTE(mriedem): DB2 creates an index when a unique constraint is created # so trying to add a second index on the instance_uuid column will fail # with error SQL0605W, so omit the index in the case of DB2. if migrate_engine.name != 'ibm_db_sa': Index('instance_uuid_idx', instance_mappings.c.instance_uuid) instance_mappings.create(checkfirst=True)
def upgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine build_requests = Table('build_requests', meta, autoload=True) request_specs = Table('request_specs', meta, autoload=True) for fkey in build_requests.foreign_keys: if fkey.target_fullname == 'request_specs.id': ForeignKeyConstraint(columns=['request_spec_id'], refcolumns=[request_specs.c.id], table=build_requests, name=fkey.name).drop() break # These are being made nullable because they are no longer used after the # addition of the instance column. However they need a deprecation period # before they can be dropped. columns_to_nullify = [ 'request_spec_id', 'user_id', 'security_groups', 'config_drive' ] for column in columns_to_nullify: getattr(build_requests.c, column).alter(nullable=True) inspector = reflection.Inspector.from_engine(migrate_engine) constrs = inspector.get_unique_constraints('build_requests') constr_names = [constr['name'] for constr in constrs] if 'uniq_build_requests0request_spec_id' in constr_names: UniqueConstraint('request_spec_id', table=build_requests, name='uniq_build_requests0request_spec_id').drop()
def upgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine cell_mappings = Table('cell_mappings', meta, autoload=True) host_mappings = Table('host_mappings', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('cell_id', Integer, nullable=False), Column('host', String(length=255), nullable=False), UniqueConstraint('host', name='uniq_host_mappings0host'), Index('host_idx', 'host'), ForeignKeyConstraint(columns=['cell_id'], refcolumns=[cell_mappings.c.id ]), mysql_engine='InnoDB', mysql_charset='utf8') host_mappings.create(checkfirst=True)
def downgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine # Remove the new column compute_nodes = Table('compute_nodes', meta, autoload=True) shadow_compute_nodes = Table('shadow_compute_nodes', meta, autoload=True) ukey = UniqueConstraint('host', 'hypervisor_hostname', table=compute_nodes, name="uniq_compute_nodes0host0hypervisor_hostname") ukey.drop() compute_nodes.drop_column('host') shadow_compute_nodes.drop_column('host')
def upgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine resource_providers = Table( 'resource_providers', meta, Column('id', Integer, primary_key=True, nullable=False), Column('uuid', String(36), nullable=False), UniqueConstraint('uuid', name='uniq_resource_providers0uuid'), mysql_engine='InnoDB', mysql_charset='latin1') # NOTE(mriedem): DB2 creates an index when a unique constraint is created # so trying to add a second index on the name column will fail with error # SQL0605W, so omit the index in the case of DB2. if migrate_engine.name != 'ibm_db_sa': Index('resource_providers_uuid_idx', resource_providers.c.uuid) inventories = Table('inventories', meta, Column('id', Integer, primary_key=True, nullable=False), Column('resource_provider_id', Integer, nullable=False), Column('resource_class_id', Integer, nullable=False), Column('total', Integer, nullable=False), Column('reserved', Integer, nullable=False), Column('min_unit', Integer, nullable=False), Column('max_unit', Integer, nullable=False), Column('step_size', Integer, nullable=False), Column('allocation_ratio', Float, nullable=False), mysql_engine='InnoDB', mysql_charset='latin1') Index('inventories_resource_provider_id_idx', inventories.c.resource_provider_id) Index('inventories_resource_class_id_idx', inventories.c.resource_class_id) allocations = Table('allocations', meta, Column('id', Integer, primary_key=True, nullable=False), Column('resource_provider_id', Integer, nullable=False), Column('consumer_id', String(36), nullable=False), Column('resource_class_id', Integer, nullable=False), Column('used', Integer, nullable=False), mysql_engine='InnoDB', mysql_charset='latin1') Index('allocations_resource_provider_class_id_idx', allocations.c.resource_provider_id, allocations.c.resource_class_id) Index('allocations_consumer_id_idx', allocations.c.consumer_id) Index('allocations_resource_class_id_idx', allocations.c.resource_class_id) for table in [resource_providers, inventories, allocations]: table.create(checkfirst=True) for table_name in ('', 'shadow_'): uuid_column = Column('uuid', String(36)) compute_nodes = Table('%scompute_nodes' % table_name, meta) compute_nodes.create_column(uuid_column)
def upgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine enum = Enum('ssh', 'x509', metadata=meta, name='keypair_types') enum.create(checkfirst=True) keypairs = Table('key_pairs', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('name', String(255), nullable=False), Column('user_id', String(255), nullable=False), Column('fingerprint', String(255)), Column('public_key', Text()), Column('type', enum, nullable=False, server_default=keypair.KEYPAIR_TYPE_SSH), UniqueConstraint('user_id', 'name', name="uniq_key_pairs0user_id0name"), mysql_engine='InnoDB', mysql_charset='utf8') keypairs.create(checkfirst=True)
def upgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine resource_providers = Table( 'resource_providers', meta, Column('id', Integer, primary_key=True, nullable=False), Column('uuid', String(36), nullable=False), UniqueConstraint('uuid', name='uniq_resource_providers0uuid'), mysql_engine='InnoDB', mysql_charset='latin1') Index('resource_providers_uuid_idx', resource_providers.c.uuid) inventories = Table('inventories', meta, Column('id', Integer, primary_key=True, nullable=False), Column('resource_provider_id', Integer, nullable=False), Column('resource_class_id', Integer, nullable=False), Column('total', Integer, nullable=False), Column('reserved', Integer, nullable=False), Column('min_unit', Integer, nullable=False), Column('max_unit', Integer, nullable=False), Column('step_size', Integer, nullable=False), Column('allocation_ratio', Float, nullable=False), mysql_engine='InnoDB', mysql_charset='latin1') Index('inventories_resource_provider_id_idx', inventories.c.resource_provider_id) Index('inventories_resource_class_id_idx', inventories.c.resource_class_id) allocations = Table('allocations', meta, Column('id', Integer, primary_key=True, nullable=False), Column('resource_provider_id', Integer, nullable=False), Column('consumer_id', String(36), nullable=False), Column('resource_class_id', Integer, nullable=False), Column('used', Integer, nullable=False), mysql_engine='InnoDB', mysql_charset='latin1') Index('allocations_resource_provider_class_id_idx', allocations.c.resource_provider_id, allocations.c.resource_class_id) Index('allocations_consumer_id_idx', allocations.c.consumer_id) Index('allocations_resource_class_id_idx', allocations.c.resource_class_id) for table in [resource_providers, inventories, allocations]: table.create(checkfirst=True) for table_name in ('', 'shadow_'): uuid_column = Column('uuid', String(36)) compute_nodes = Table('%scompute_nodes' % table_name, meta) compute_nodes.create_column(uuid_column)
def downgrade(migrate_engine): # drop the unique constraint on instances.uuid UniqueConstraint('uuid', table=utils.get_table(migrate_engine, 'instances'), name=UC_NAME).drop() # We can't bring the deleted records back but we can make uuid nullable. for table_name in ('instances', 'shadow_instances'): table = utils.get_table(migrate_engine, table_name) table.columns.uuid.alter(nullable=True)
def upgrade(migrate_engine): meta = sa.MetaData(bind=migrate_engine) load_tables = dict((table_name, sa.Table(table_name, meta, autoload=True)) for table_name in TABLES) if migrate_engine.name != 'sqlite': for table_name, indexes in INDEXES.items(): table = load_tables[table_name] for column, ref_table_name, ref_column_name in indexes: ref_table = load_tables[ref_table_name] params = { 'columns': [table.c[column]], 'refcolumns': [ref_table.c[ref_column_name]] } if (migrate_engine.name == "mysql" and table_name != 'alarm_history'): params['name'] = "_".join(('fk', table_name, column)) elif (migrate_engine.name == "postgresql" and table_name == "sample"): # The fk contains the old table name params['name'] = "_".join(('meter', column, 'fkey')) fkey = ForeignKeyConstraint(**params) fkey.drop() sourceassoc = load_tables['sourceassoc'] if migrate_engine.name != 'sqlite': idx = sa.Index('idx_su', sourceassoc.c.source_id, sourceassoc.c.user_id) idx.drop(bind=migrate_engine) idx = sa.Index('idx_sp', sourceassoc.c.source_id, sourceassoc.c.project_id) idx.drop(bind=migrate_engine) params = {} if migrate_engine.name == "mysql": params = {'name': 'uniq_sourceassoc0sample_id'} uc = UniqueConstraint('sample_id', table=sourceassoc, **params) uc.create() params = {} if migrate_engine.name == "mysql": params = {'name': 'uniq_sourceassoc0sample_id0user_id'} uc = UniqueConstraint('sample_id', 'user_id', table=sourceassoc, **params) uc.drop() sourceassoc.c.user_id.drop() sourceassoc.c.project_id.drop() for table_name in TABLES_DROP: sa.Table(table_name, meta, autoload=True).drop()
def upgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine groups = Table( 'instance_groups', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('user_id', String(length=255)), Column('project_id', String(length=255)), Column('uuid', String(length=36), nullable=False), Column('name', String(length=255)), UniqueConstraint('uuid', name='uniq_instance_groups0uuid'), mysql_engine='InnoDB', mysql_charset='utf8', ) groups.create(checkfirst=True) group_policy = Table( 'instance_group_policy', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('policy', String(length=255)), Column('group_id', Integer, ForeignKey('instance_groups.id'), nullable=False), Index('instance_group_policy_policy_idx', 'policy'), mysql_engine='InnoDB', mysql_charset='utf8', ) group_policy.create(checkfirst=True) group_member = Table( 'instance_group_member', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('instance_uuid', String(length=255)), Column('group_id', Integer, ForeignKey('instance_groups.id'), nullable=False), Index('instance_group_member_instance_idx', 'instance_uuid'), mysql_engine='InnoDB', mysql_charset='utf8', ) group_member.create(checkfirst=True)
def upgrade(migrate_engine): meta = MetaData(bind=migrate_engine) trusts = Table('trust', meta, autoload=True) UniqueConstraint('trustor_user_id', 'trustee_user_id', 'project_id', 'impersonation', 'expires_at', table=trusts, name='duplicate_trust_constraint').create()
def upgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine # Add a new column host compute_nodes = Table('compute_nodes', meta, autoload=True) shadow_compute_nodes = Table('shadow_compute_nodes', meta, autoload=True) # NOTE(sbauza) : Old compute nodes can report stats without this field, we # need to set it as nullable host = Column('host', String(255), nullable=True) if not hasattr(compute_nodes.c, 'host'): compute_nodes.create_column(host) if not hasattr(shadow_compute_nodes.c, 'host'): shadow_compute_nodes.create_column(host.copy()) # NOTE(sbauza) : Populate the host field with the value from the services # table will be done at the ComputeNode object level when save() ukey = UniqueConstraint('host', 'hypervisor_hostname', table=compute_nodes, name="uniq_compute_nodes0host0hypervisor_hostname") ukey.create()
def upgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine if migrate_engine.name == 'sqlite': # SQLite is also missing this one index if not utils.index_exists(migrate_engine, 'fixed_ips', 'address'): utils.add_index(migrate_engine, 'fixed_ips', 'address', ['address']) for src_table, src_column, dst_table, dst_column, name in FKEYS: src_table = Table(src_table, meta, autoload=True) if name in set(fk.name for fk in src_table.foreign_keys): continue src_column = src_table.c[src_column] dst_table = Table(dst_table, meta, autoload=True) dst_column = dst_table.c[dst_column] fkey = ForeignKeyConstraint(columns=[src_column], refcolumns=[dst_column], name=name) fkey.create() # SQLAlchemy versions < 1.0.0 don't reflect unique constraints # for SQLite correctly causing sqlalchemy-migrate to recreate # some tables with missing unique constraints. Re-add some # potentially missing unique constraints as a workaround. for table_name, name, column_names in UNIQUES: table = Table(table_name, meta, autoload=True) if name in set(c.name for c in table.constraints if isinstance(table, schema.UniqueConstraint)): continue uc = UniqueConstraint(*column_names, table=table, name=name) uc.create()
def upgrade(migrate_engine): # NOTE(mriedem): We're going to load up all of the tables so we can find # any with an instance_uuid column since those may be foreign keys back # to the instances table and we want to cleanup those records first. We # have to do this explicitly because the foreign keys in compute aren't # defined with cascading deletes. meta = MetaData(migrate_engine) meta.reflect(migrate_engine) # Scan the database first and fail if any NULL records found. process_null_records(meta, scan=True) # Now run the alter statements. process_null_records(meta, scan=False) # Create a unique constraint on instances.uuid for foreign keys. instances = meta.tables['instances'] UniqueConstraint('uuid', table=instances, name=UC_NAME).create()
def upgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine if migrate_engine.name == 'mysql': nameargs = {'collation': 'utf8_bin'} else: nameargs = {} resource_providers = Table('resource_providers', meta, autoload=True) traits = Table('traits', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('id', Integer, primary_key=True, nullable=False, autoincrement=True), Column('name', Unicode(255, **nameargs), nullable=False), UniqueConstraint('name', name='uniq_traits0name'), mysql_engine='InnoDB', mysql_charset='latin1') resource_provider_traits = Table( 'resource_provider_traits', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('trait_id', Integer, ForeignKey('traits.id'), primary_key=True, nullable=False), Column('resource_provider_id', Integer, primary_key=True, nullable=False), Index('resource_provider_traits_resource_provider_trait_idx', 'resource_provider_id', 'trait_id'), ForeignKeyConstraint(columns=['resource_provider_id'], refcolumns=[resource_providers.c.id]), mysql_engine='InnoDB', mysql_charset='latin1') for table in [traits, resource_provider_traits]: table.create(checkfirst=True)
def upgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine resource_classes = Table( 'resource_classes', meta, Column('id', Integer, primary_key=True, nullable=False), Column('name', String(length=255), nullable=False), Column('created_at', DateTime), Column('updated_at', DateTime), UniqueConstraint('name', name='uniq_resource_classes0name'), mysql_engine='InnoDB', mysql_charset='latin1') resource_classes.create(checkfirst=True)
def downgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine compute_nodes = Table('compute_nodes', meta, autoload=True) ukey = UniqueConstraint( 'host', 'hypervisor_hostname', 'deleted', table=compute_nodes, name="uniq_compute_nodes0host0hypervisor_hostname0deleted") ukey.drop() ukey = UniqueConstraint('host', 'hypervisor_hostname', table=compute_nodes, name="uniq_compute_nodes0host0hypervisor_hostname") ukey.create()
def upgrade(migrate_engine): meta = sa.MetaData(bind=migrate_engine) load_tables = dict((table_name, sa.Table(table_name, meta, autoload=True)) for table_name in TABLES) if migrate_engine.name != 'sqlite': for table_name, indexes in INDEXES.items(): table = load_tables[table_name] for column, ref_table_name, ref_column_name in indexes: ref_table = load_tables[ref_table_name] params = {'columns': [table.c[column]], 'refcolumns': [ref_table.c[ref_column_name]]} if migrate_engine.name == "mysql" and \ table_name != 'alarm_history': params['name'] = "_".join(('fk', table_name, column)) elif migrate_engine.name == "postgresql" and \ table_name == "sample": # The fk contains the old table name params['name'] = "_".join(('meter', column, 'fkey')) fkey = ForeignKeyConstraint(**params) fkey.drop() sourceassoc = load_tables['sourceassoc'] if migrate_engine.name != 'sqlite': idx = sa.Index('idx_su', sourceassoc.c.source_id, sourceassoc.c.user_id) idx.drop(bind=migrate_engine) idx = sa.Index('idx_sp', sourceassoc.c.source_id, sourceassoc.c.project_id) idx.drop(bind=migrate_engine) params = {} if migrate_engine.name == "mysql": params = {'name': 'uniq_sourceassoc0sample_id'} uc = UniqueConstraint('sample_id', table=sourceassoc, **params) uc.create() params = {} if migrate_engine.name == "mysql": params = {'name': 'uniq_sourceassoc0sample_id0user_id'} uc = UniqueConstraint('sample_id', 'user_id', table=sourceassoc, **params) uc.drop() sourceassoc.c.user_id.drop() sourceassoc.c.project_id.drop() for table_name in TABLES_DROP: sa.Table(table_name, meta, autoload=True).drop()
def upgrade(migrate_engine): meta = MetaData(bind=migrate_engine) resource_providers = Table('resource_providers', meta, autoload=True) name = Column('name', Unicode(200), nullable=True) generation = Column('generation', Integer, default=0) can_host = Column('can_host', Integer, default=0) if not hasattr(resource_providers.c, 'name'): # NOTE(cdent): The resource_providers table is defined as # latin1 to be more efficient. Now we need the name column # to be UTF8. First create the column, then modify it, # otherwise the declarative handling in sqlalchemy gets # confused. resource_providers.create_column(name) if migrate_engine.name == 'mysql': name_col_ddl = DDL( "ALTER TABLE resource_providers MODIFY name " "VARCHAR(200) CHARACTER SET utf8") conn = migrate_engine.connect() conn.execute(name_col_ddl) uc = UniqueConstraint('name', table=resource_providers, name='uniq_resource_providers0name') uc.create() utils.add_index(migrate_engine, 'resource_providers', 'resource_providers_name_idx', ['name']) if not hasattr(resource_providers.c, 'generation'): resource_providers.create_column(generation) if not hasattr(resource_providers.c, 'can_host'): resource_providers.create_column(can_host) resource_provider_aggregates = Table( 'resource_provider_aggregates', meta, Column('resource_provider_id', Integer, primary_key=True, nullable=False), Column('aggregate_id', Integer, primary_key=True, nullable=False), mysql_engine='InnoDB', mysql_charset='latin1' ) Index('resource_provider_aggregates_aggregate_id_idx', resource_provider_aggregates.c.aggregate_id) resource_provider_aggregates.create(checkfirst=True) utils.add_index(migrate_engine, 'allocations', 'allocations_resource_provider_class_used_idx', ['resource_provider_id', 'resource_class_id', 'used']) utils.drop_index(migrate_engine, 'allocations', 'allocations_resource_provider_class_id_idx') # Add a unique constraint so that any resource provider can have # only one inventory for any given resource class. inventories = Table('inventories', meta, autoload=True) inventories_uc = UniqueConstraint( 'resource_provider_id', 'resource_class_id', table=inventories, name='uniq_inventories0resource_provider_resource_class') inventories_uc.create() utils.add_index(migrate_engine, 'inventories', 'inventories_resource_provider_resource_class_idx', ['resource_provider_id', 'resource_class_id'])
def downgrade(migrate_engine): meta = sa.MetaData(bind=migrate_engine) sample = sa.Table('sample', meta, autoload=True) resource = sa.Table( 'resource', meta, sa.Column('id', sa.String(255), primary_key=True), sa.Column('resource_metadata', sa.Text), sa.Column('user_id', sa.String(255)), sa.Column('project_id', sa.String(255)), sa.Index('ix_resource_project_id', 'project_id'), sa.Index('ix_resource_user_id', 'user_id'), sa.Index('resource_user_id_project_id_key', 'user_id', 'project_id'), mysql_engine='InnoDB', mysql_charset='utf8', ) resource.create() source = sa.Table( 'source', meta, sa.Column('id', sa.String(255), primary_key=True), mysql_engine='InnoDB', mysql_charset='utf8', ) source.create() sourceassoc = sa.Table( 'sourceassoc', meta, sa.Column('sample_id', sa.Integer), sa.Column('resource_id', sa.String(255)), sa.Column('source_id', sa.String(255)), sa.Index('idx_sr', 'source_id', 'resource_id'), sa.Index('idx_ss', 'source_id', 'sample_id'), mysql_engine='InnoDB', mysql_charset='utf8', ) sourceassoc.create() params = {} if migrate_engine.name == "mysql": params = {'name': 'uniq_sourceassoc0sample_id'} uc = UniqueConstraint('sample_id', table=sourceassoc, **params) uc.create() # reload source/resource tables. # NOTE(gordc): fine to skip non-id attributes in table since # they're constantly updated and not used by api for table, col in [(source, 'source_id'), (resource, 'resource_id')]: q = sa.select([sample.c[col]]).distinct() # NOTE(sileht): workaround for # https://bitbucket.org/zzzeek/sqlalchemy/ # issue/3044/insert-from-select-union_all q.select = lambda: q sql_ins = table.insert().from_select([table.c.id], q) try: migrate_engine.execute(sql_ins) except TypeError: # from select is empty pass # reload sourceassoc tables for ref_col, col in [('id', 'sample_id'), ('resource_id', 'resource_id')]: q = sa.select([sample.c.source_id, sample.c[ref_col]]).distinct() q.select = lambda: q sql_ins = sourceassoc.insert().from_select([sourceassoc.c.source_id, sourceassoc.c[col]], q) try: migrate_engine.execute(sql_ins) except TypeError: # from select is empty pass sample.c.source_id.drop() load_tables = dict((table_name, sa.Table(table_name, meta, autoload=True)) for table_name in TABLES) # add foreign keys if migrate_engine.name != 'sqlite': for table_name, indexes in INDEXES.items(): table = load_tables[table_name] for column, ref_table_name, ref_column_name in indexes: ref_table = load_tables[ref_table_name] params = {'columns': [table.c[column]], 'refcolumns': [ref_table.c[ref_column_name]]} fk_table_name = table_name if migrate_engine.name == "mysql": params['name'] = "_".join(('fk', fk_table_name, column)) elif (migrate_engine.name == "postgresql" and table_name == 'sample'): # fk was not renamed in script 030 params['name'] = "_".join(('meter', column, 'fkey')) fkey = ForeignKeyConstraint(**params) fkey.create()
def downgrade(migrate_engine): meta = sa.MetaData(bind=migrate_engine) user = sa.Table( 'user', meta, sa.Column('id', sa.String(255), primary_key=True), mysql_engine='InnoDB', mysql_charset='utf8', ) project = sa.Table( 'project', meta, sa.Column('id', sa.String(255), primary_key=True), mysql_engine='InnoDB', mysql_charset='utf8', ) tables = [project, user] for i in sorted(tables): i.create() load_tables = dict((table_name, sa.Table(table_name, meta, autoload=True)) for table_name in TABLES) # Restore the sourceassoc columns and constraints sourceassoc = load_tables['sourceassoc'] user_id = sa.Column('user_id', sa.String(255)) project_id = sa.Column('project_id', sa.String(255)) sourceassoc.create_column(user_id) sourceassoc.create_column(project_id) if migrate_engine.name != 'sqlite': params = {} if migrate_engine.name == "mysql": params = {'name': 'uniq_sourceassoc0sample_id0user_id'} uc = UniqueConstraint('sample_id', 'user_id', table=sourceassoc, **params) uc.create() params = {} if migrate_engine.name == "mysql": params = {'name': 'uniq_sourceassoc0sample_id'} uc = UniqueConstraint('sample_id', table=sourceassoc, **params) uc.drop() idx = sa.Index('idx_su', sourceassoc.c.source_id, sourceassoc.c.user_id) idx.create(bind=migrate_engine) idx = sa.Index('idx_sp', sourceassoc.c.source_id, sourceassoc.c.project_id) idx.create(bind=migrate_engine) # Restore the user/project columns and constraints in all tables for table_name, indexes in INDEXES.items(): table = load_tables[table_name] for column, ref_table_name, ref_column_name in indexes: ref_table = load_tables[ref_table_name] c = getattr(Alias(table).c, column) except_q = exists([getattr(ref_table.c, ref_column_name)]) q = select([c]).where(and_(c != sa.null(), not_(except_q))) q = q.distinct() # NOTE(sileht): workaround for # https://bitbucket.org/zzzeek/sqlalchemy/ # issue/3044/insert-from-select-union_all q.select = lambda: q sql_ins = ref_table.insert().from_select( [getattr(ref_table.c, ref_column_name)], q) try: migrate_engine.execute(sql_ins) except TypeError: # from select is empty pass if migrate_engine.name != 'sqlite': params = {'columns': [table.c[column]], 'refcolumns': [ref_table.c[ref_column_name]]} if migrate_engine.name == "mysql" and \ table_name != 'alarm_history': params['name'] = "_".join(('fk', table_name, column)) elif migrate_engine.name == "postgresql" and \ table_name == "sample": # The fk contains the old table name params['name'] = "_".join(('meter', column, 'fkey')) fkey = ForeignKeyConstraint(**params) fkey.create()