Example #1
0
def downgrade(migrate_engine):
    """Remove volume_type_rate_limit table."""
    meta = MetaData()
    meta.bind = migrate_engine

    qos_specs = Table('quality_of_service_specs', meta, autoload=True)

    if migrate_engine.name == 'mysql':
        # NOTE(alanmeadows): MySQL Cannot drop column qos_specs_id
        # until the foreign key volumes_types_ibfk_1 is removed.  We
        # remove the foreign key first, and then we drop the column.
        table = Table('volume_types', meta, autoload=True)
        ref_table = Table('volume_types', meta, autoload=True)
        params = {'columns': [table.c['qos_specs_id']],
                  'refcolumns': [ref_table.c['id']],
                  'name': 'volume_types_ibfk_1'}

        fkey = ForeignKeyConstraint(**params)
        fkey.drop()

    volume_types = Table('volume_types', meta, autoload=True)
    qos_specs_id = Column('qos_specs_id', String(36))

    volume_types.drop_column(qos_specs_id)
    qos_specs.drop()
Example #2
0
def downgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine

    fk_name = None

    if migrate_engine.name == 'mysql':
        fk_name = 'reservations_ibfk_1'
    elif migrate_engine.name == 'postgresql':
        fk_name = 'reservations_usage_id_fkey'

    # NOTE: MySQL and PostgreSQL Cannot drop the quota_usages table
    # until the foreign key is removed.  We remove the foreign key first,
    # and then we drop the table.
    table = Table('reservations', meta, autoload=True)
    ref_table = Table('reservations', meta, autoload=True)
    params = {'columns': [table.c['usage_id']],
              'refcolumns': [ref_table.c['id']],
              'name': fk_name}

    if fk_name:
        fkey = ForeignKeyConstraint(**params)
        fkey.drop()

    quota_classes = Table('quota_classes', meta, autoload=True)
    quota_classes.drop()

    quota_usages = Table('quota_usages', meta, autoload=True)
    quota_usages.drop()

    reservations = Table('reservations', meta, autoload=True)
    reservations.drop()
def upgrade(migrate_engine):
    meta = MetaData(migrate_engine)
    trait_type = Table(
        'trait_type', meta,
        Column('id', Integer, primary_key=True),
        Column('desc', String(255)),
        Column('data_type', Integer),
        UniqueConstraint('desc', 'data_type', name="tt_unique")
    )
    trait = Table('trait', meta, autoload=True)
    unique_name = Table('unique_name', meta, autoload=True)
    trait_type.create(migrate_engine)
    # Trait type extracts data from Trait and Unique name.
    # We take all trait names from Unique Name, and data types
    # from Trait. We then remove dtype and name from trait, and
    # remove the name field.

    conn = migrate_engine.connect()
    sql = ("INSERT INTO trait_type "
           "SELECT unique_name.id, unique_name.key, trait.t_type FROM trait "
           "INNER JOIN unique_name "
           "ON trait.name_id = unique_name.id "
           "GROUP BY unique_name.id, unique_name.key, trait.t_type")
    conn.execute(sql)
    conn.close()

    # Now we need to drop the foreign key constraint, rename
    # the trait.name column, and re-add a new foreign
    # key constraint
    params = {'columns': [trait.c.name_id],
              'refcolumns': [unique_name.c.id]}
    if migrate_engine.name == 'mysql':
        params['name'] = "trait_ibfk_1"  # foreign key to the unique name table
    fkey = ForeignKeyConstraint(**params)
    fkey.drop()

    Column('trait_type_id', Integer).create(trait)

    # Move data from name_id column into trait_type_id column
    query = select([trait.c.id, trait.c.name_id])
    for key, value in migration.paged(query):
        trait.update().where(trait.c.id == key)\
            .values({"trait_type_id": value}).execute()

    trait.c.name_id.drop()

    params = {'columns': [trait.c.trait_type_id],
              'refcolumns': [trait_type.c.id]}
    if migrate_engine.name == 'mysql':
        params['name'] = "_".join(('fk', 'trait_type', 'id'))

    fkey = ForeignKeyConstraint(**params)
    fkey.create()

    # Drop the t_type column to data_type.
    trait.c.t_type.drop()

    # Finally, drop the unique_name table - we don't need it
    # anymore.
    unique_name.drop()
def downgrade(migrate_engine):
    metadata.bind = migrate_engine
    # Load existing tables
    metadata.reflect()
    # NOTE: all new data added in the upgrade method is eliminated here via table drops
    # Drop 1 foreign key constraint from the metadata_file table
    try:
        MetadataFile_table = Table( "metadata_file", metadata, autoload=True )
    except NoSuchTableError:
        MetadataFile_table = None
        log.debug( "Failed loading table metadata_file" )
    try:
        LibraryDatasetDatasetAssociation_table = Table( "library_dataset_dataset_association", metadata, autoload=True )
    except NoSuchTableError:
        LibraryDatasetDatasetAssociation_table = None
        log.debug( "Failed loading table library_dataset_dataset_association" )
    if MetadataFile_table is not None and LibraryDatasetDatasetAssociation_table is not None:
        try:
            cons = ForeignKeyConstraint( [MetadataFile_table.c.lda_id],
                                         [LibraryDatasetDatasetAssociation_table.c.id],
                                         name='metadata_file_lda_id_fkey' )
            # Drop the constraint
            cons.drop()
        except Exception, e:
            log.debug( "Dropping foreign key constraint 'metadata_file_lda_id_fkey' from table 'metadata_file' failed: %s" % ( str( e ) ) )
def upgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine

    compute_nodes = Table('compute_nodes', meta, autoload=True)
    shadow_compute_nodes = Table('shadow_compute_nodes', meta, autoload=True)
    services = Table('services', meta, autoload=True)

    _correct_sqlite_unique_constraints(migrate_engine, compute_nodes)

    # Make the service_id column nullable
    compute_nodes.c.service_id.alter(nullable=True)
    shadow_compute_nodes.c.service_id.alter(nullable=True)

    for fk in compute_nodes.foreign_keys:
        if fk.column == services.c.id:
            # Delete the FK
            fkey = ForeignKeyConstraint(columns=[compute_nodes.c.service_id],
                                        refcolumns=[services.c.id],
                                        name=fk.name)
            fkey.drop()
            break
    for index in compute_nodes.indexes:
        if 'service_id' in index.columns:
            # Delete the nested index which was created by the FK
            index.drop()
            break
def cascade_fkeys(metadata, restore=False):
    """ Sets all fkeys to cascade on update """
    for table_name, table in metadata.tables.items():
        for fkey in list(table.foreign_keys):
            if restore:
                if fkey.constraint.name in cascade_fkeys.fkey_onupdate_restore:
                    onupdate = cascade_fkeys.fkey_onupdate_restore[
                        fkey.constraint.name]
                else:
                    continue
            else:
                cascade_fkeys.fkey_onupdate_restore[fkey.constraint.name] = \
                    fkey.constraint.onupdate
                onupdate = "CASCADE"

            params = {
                'columns': fkey.constraint.columns,
                'refcolumns': [fkey.column],
                'name': fkey.constraint.name,
                'onupdate': fkey.constraint.onupdate,
                'ondelete': fkey.constraint.ondelete,
                'deferrable': fkey.constraint.deferrable,
                'initially': fkey.constraint.initially,
                'table': table
            }

            fkey_constraint = ForeignKeyConstraint(**params)
            fkey_constraint.drop()

            params['onupdate'] = onupdate
            fkey_constraint = ForeignKeyConstraint(**params)
            fkey_constraint.create()
def downgrade(migrate_engine):
    if migrate_engine.name == 'sqlite':
        return
    meta = MetaData(bind=migrate_engine)
    for table_name, ref, child in TABLES:
        table = Table(table_name, meta, autoload=True)

        column_name, ref_table_name, ref_column_name = ref
        column = table.c[column_name]

        ref_table = Table(ref_table_name, meta, autoload=True)
        ref_column = ref_table.c[ref_column_name]

        params = {'columns': [column], 'refcolumns': [ref_column]}
        if migrate_engine.name == 'mysql':
            params['name'] = "_".join(('fk', table_name, column_name))
        with migrate_engine.begin():
            fkey = ForeignKeyConstraint(**params)
            fkey.drop()

        with migrate_engine.begin():
            restore_rows(migrate_engine, meta, table_name)

        # compute_node_stats has a missing foreign key and is a child of
        # of compute_nodes. Don't bother processing it as a child since
        # only want to restore the dump once
        if child and table_name != 'compute_nodes':
            child_table_name, child_column_name, child_ref_column_name = child

            with migrate_engine.begin():
                restore_rows(migrate_engine, meta, child_table_name)
def downgrade(migrate_engine):
    if migrate_engine.name == "sqlite":
        return
    meta = MetaData(bind=migrate_engine)
    storage_pools = Table("storage_pools", meta, autoload=True)
    storage_groups = Table("storage_groups", meta, autolaod=True)
    params = {"columns": [storage_pools.c.primary_storage_group_id], "refcolumns": [storage_groups.c.id]}
    fkey = ForeignKeyConstraint(**params)
    fkey.drop()
def upgrade(migrate_engine):
    meta = MetaData(bind=migrate_engine)
    event_type = Table(
        'event_type', meta,
        Column('id', Integer, primary_key=True),
        Column('desc', String(255), unique=True),
        mysql_engine='InnoDB',
        mysql_charset='utf8',
    )
    event_type.create()
    event = Table('event', meta, autoload=True)
    unique_name = Table('unique_name', meta, autoload=True)

    # Event type is a specialization of Unique name, so
    # we insert into the event_type table all the distinct
    # unique names from the event.unique_name field along
    # with the key from the unique_name table, and
    # then rename the event.unique_name field to event.event_type
    conn = migrate_engine.connect()
    sql = ("INSERT INTO event_type "
           "SELECT unique_name.id, unique_name.key FROM event "
           "INNER JOIN unique_name "
           "ON event.unique_name_id = unique_name.id "
           "GROUP BY unique_name.id")
    conn.execute(sql)
    conn.close()
    # Now we need to drop the foreign key constraint, rename
    # the event.unique_name column, and re-add a new foreign
    # key constraint
    params = {'columns': [event.c.unique_name_id],
              'refcolumns': [unique_name.c.id]}
    if migrate_engine.name == 'mysql':
        params['name'] = "event_ibfk_1"
    fkey = ForeignKeyConstraint(**params)
    fkey.drop()

    Column('event_type_id', Integer).create(event)

    # Move data from unique_name_id column into event_type_id column
    # and delete the entry from the unique_name table
    query = select([event.c.id, event.c.unique_name_id])
    for key, value in migration.paged(query):
        event.update().where(event.c.id == key)\
            .values({"event_type_id": value}).execute()
        unique_name.delete()\
            .where(unique_name.c.id == key).execute()

    params = {'columns': [event.c.event_type_id],
              'refcolumns': [event_type.c.id]}
    if migrate_engine.name == 'mysql':
        params['name'] = "_".join(('fk', 'event_type', 'id'))
    fkey = ForeignKeyConstraint(**params)
    fkey.create()

    event.c.unique_name_id.drop()
def downgrade(migrate_engine):
    """Convert volume_type from UUID back to int."""
    meta = MetaData()
    meta.bind = migrate_engine

    volumes = Table("volumes", meta, autoload=True)
    volume_types = Table("volume_types", meta, autoload=True)
    extra_specs = Table("volume_type_extra_specs", meta, autoload=True)

    fkey_remove_list = [volumes.c.volume_type_id, volume_types.c.id, extra_specs.c.volume_type_id]

    for column in fkey_remove_list:
        fkeys = list(column.foreign_keys)
        if fkeys:
            fkey_name = fkeys[0].constraint.name
            fkey = ForeignKeyConstraint(columns=[column], refcolumns=[volume_types.c.id], name=fkey_name)

            try:
                fkey.drop()
            except Exception:
                if migrate_engine.url.get_dialect().name.startswith("sqlite"):
                    pass
                else:
                    raise

    vtype_list = list(volume_types.select().execute())
    new_id = 1

    for t in vtype_list:
        volumes.update().where(volumes.c.volume_type_id == t["id"]).values(volume_type_id=new_id).execute()

        extra_specs.update().where(extra_specs.c.volume_type_id == t["id"]).values(volume_type_id=new_id).execute()

        volume_types.update().where(volume_types.c.id == t["id"]).values(id=new_id).execute()

        new_id += 1

    volumes.c.volume_type_id.alter(Integer)
    volume_types.c.id.alter(Integer)
    extra_specs.c.volume_type_id.alter(Integer)

    for column in fkey_remove_list:
        fkeys = list(column.foreign_keys)
        if fkeys:
            fkey_name = fkeys[0].constraint.name
            fkey = ForeignKeyConstraint(columns=[column], refcolumns=[volume_types.c.id], name=fkey_name)
            try:
                fkey.create()
                LOG.info("Created foreign key %s" % fkey_name)
            except Exception:
                if migrate_engine.url.get_dialect().name.startswith("sqlite"):
                    pass
                else:
                    raise
Example #11
0
def downgrade(migrate_engine):
    # Operations to reverse the above upgrade go here.
    meta.bind = migrate_engine

    tt = Table('proc_SelectedHistoriesTable', meta, autoload=True)

    con = ForeignKeyConstraint([tt.c.selected_arar_id], [t.c.id])
    con.drop()

    t.drop()
    th.drop()
    tt.c.selected_arar_id.drop()
    def _remove_foreign_key_constraints(engine, meta, table_name):
        inspector = reflection.Inspector.from_engine(engine)

        for fk in inspector.get_foreign_keys(table_name):
            source_table = Table(table_name, meta, autoload=True)
            target_table = Table(fk['referred_table'], meta, autoload=True)

            fkey = ForeignKeyConstraint(
                columns=_get_columns(source_table, fk['constrained_columns']),
                refcolumns=_get_columns(target_table, fk['referred_columns']),
                name=fk['name'])
            fkey.drop()
def upgrade(migrate_engine):
    meta = sa.MetaData(bind=migrate_engine)
    load_tables = dict((table_name, sa.Table(table_name, meta,
                                             autoload=True))
                       for table_name in TABLES)

    if migrate_engine.name != 'sqlite':
        for table_name, indexes in INDEXES.items():
            table = load_tables[table_name]
            for column, ref_table_name, ref_column_name in indexes:
                ref_table = load_tables[ref_table_name]
                params = {'columns': [table.c[column]],
                          'refcolumns': [ref_table.c[ref_column_name]]}

                if migrate_engine.name == "mysql" and \
                        table_name != 'alarm_history':
                    params['name'] = "_".join(('fk', table_name, column))
                elif migrate_engine.name == "postgresql" and \
                        table_name == "sample":
                    # The fk contains the old table name
                    params['name'] = "_".join(('meter', column, 'fkey'))

                fkey = ForeignKeyConstraint(**params)
                fkey.drop()

    sourceassoc = load_tables['sourceassoc']
    if migrate_engine.name != 'sqlite':
        idx = sa.Index('idx_su', sourceassoc.c.source_id,
                       sourceassoc.c.user_id)
        idx.drop(bind=migrate_engine)
        idx = sa.Index('idx_sp', sourceassoc.c.source_id,
                       sourceassoc.c.project_id)
        idx.drop(bind=migrate_engine)

        params = {}
        if migrate_engine.name == "mysql":
            params = {'name': 'uniq_sourceassoc0sample_id'}
        uc = UniqueConstraint('sample_id', table=sourceassoc, **params)
        uc.create()

        params = {}
        if migrate_engine.name == "mysql":
            params = {'name': 'uniq_sourceassoc0sample_id0user_id'}
        uc = UniqueConstraint('sample_id', 'user_id',
                              table=sourceassoc, **params)
        uc.drop()
    sourceassoc.c.user_id.drop()
    sourceassoc.c.project_id.drop()

    for table_name in TABLES_DROP:
        sa.Table(table_name, meta, autoload=True).drop()
def upgrade(migrate_engine):
    if migrate_engine.name == "sqlite":
        return
    meta = MetaData(bind=migrate_engine)
    load_tables = dict((table_name, Table(table_name, meta, autoload=True)) for table_name in TABLES)
    for table_name, indexes in INDEXES.items():
        table = load_tables[table_name]
        for column, ref_table_name, ref_column_name in indexes:
            ref_table = load_tables[ref_table_name]
            params = {"columns": [table.c[column]], "refcolumns": [ref_table.c[ref_column_name]]}
            if migrate_engine.name == "mysql":
                params["name"] = "_".join(("fk", table_name, column))
            fkey = ForeignKeyConstraint(**params)
            fkey.drop()
def downgrade(migrate_engine):
    meta = MetaData(bind=migrate_engine)
    load_tables = dict((table_name, Table(table_name, meta, autoload=True))
                       for table_name in TABLES)
    for table_name, indexes in INDEXES.items():
        table = load_tables[table_name]
        for column, ref_table_name, ref_column_name in indexes:
            ref_table = load_tables[ref_table_name]
            params = {'columns': [table.c[column]],
                      'refcolumns': [ref_table.c[ref_column_name]]}
            if migrate_engine.name == 'mysql':
                params['name'] = "_".join(('fk', table_name, column))
            fkey = ForeignKeyConstraint(**params)
            fkey.drop()
Example #16
0
def downgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine

    fk_name = None

    if migrate_engine.name == 'mysql':
        fk_name = 'reservations_ibfk_1'
    elif migrate_engine.name == 'postgresql':
        fk_name = 'reservations_usage_id_fkey'

    # NOTE: MySQL and PostgreSQL Cannot drop the quota_usages table
    # until the foreign key is removed.  We remove the foreign key first,
    # and then we drop the table.
    table = Table('reservations', meta, autoload=True)
    ref_table = Table('reservations', meta, autoload=True)
    params = {'columns': [table.c['usage_id']],
              'refcolumns': [ref_table.c['id']],
              'name': fk_name}

    if fk_name:
        try:
            fkey = ForeignKeyConstraint(**params)
            fkey.drop()
        except Exception:
            msg = _LE("Dropping foreign key %s failed.")
            LOG.error(msg, fk_name)

    quota_classes = Table('quota_classes', meta, autoload=True)
    try:
        quota_classes.drop()
    except Exception:
        LOG.error(_LE("quota_classes table not dropped"))
        raise

    quota_usages = Table('quota_usages', meta, autoload=True)
    try:
        quota_usages.drop()
    except Exception:
        LOG.error(_LE("quota_usages table not dropped"))
        raise

    reservations = Table('reservations', meta, autoload=True)
    try:
        reservations.drop()
    except Exception:
        LOG.error(_LE("reservations table not dropped"))
        raise
Example #17
0
def downgrade(migrate_engine):
    meta = sqlalchemy.MetaData(bind=migrate_engine)

    stack = sqlalchemy.Table('stack', meta, autoload=True)

    if migrate_engine.name == 'sqlite':
        _downgrade_sqlite(migrate_engine)
    else:
        raw_template = sqlalchemy.Table('raw_template', meta, autoload=True)
        fkey = ForeignKeyConstraint(columns=[stack.c.prev_raw_template_id],
                                    refcolumns=[raw_template.c.id],
                                    name='prev_raw_template_ref')
        fkey.drop()
        stack.c.prev_raw_template_id.drop()
        stack.c.current_traversal.drop()
        stack.c.current_deps.drop()
def downgrade(migrate_engine):
    meta = MetaData(bind=migrate_engine)
    meta.bind = migrate_engine

    port_table = Table('port', meta, autoload=True)
    subnet_table = Table('subnet', meta, autoload=True)

    for fk in port_table.foreign_keys:
        if fk.column == subnet_table.c.id:
            # Delete the FK
            fkey = ForeignKeyConstraint(columns=[port_table.c.subnet_id],
                                        refcolumns=[subnet_table.c.id],
                                        name=fk.name)
            fkey.drop()
            break
    port_table.c.subnet_id.drop()
def downgrade(migrate_engine):
    print("037 downgrade")
    meta = MetaData()
    meta.bind = migrate_engine
    versions = Table('versions', meta, autoload=True)
    targets = Table('targets', meta, autoload=True)
    params = {'columns': [versions.c['target_id']],
              'refcolumns': [targets.c['id']],
              'name': 'versions_ibfk_1'}
    foreign = ForeignKeyConstraint(**params)
    foreign.drop()
    versions.drop_column(target_id)

    tables = [define_target_status_table(meta),
              define_targets_table(meta),
              define_host_versions_table(meta)]
    drop_tables(tables)
def upgrade(migrate_engine):
    """Add UUID primary key column to encryption."""
    meta = MetaData()
    meta.bind = migrate_engine

    encryptions = Table('encryption', meta, autoload=True)

    encryption_id_column_kwargs = {}
    if migrate_engine.name == 'ibm_db_sa':
        # NOTE(junxiebj): DB2 10.5 doesn't support primary key
        # constraints over nullable columns, so we have to
        # make the column non-nullable in the DB2 case.
        encryption_id_column_kwargs['nullable'] = False
    encryption_id = Column('encryption_id', String(36),
                           **encryption_id_column_kwargs)
    encryptions.create_column(encryption_id)

    encryption_items = list(encryptions.select().execute())

    for item in encryption_items:
        encryptions.update().\
            where(encryptions.c.volume_type_id == item['volume_type_id']).\
            values(encryption_id=str(uuid.uuid4())).execute()

    # NOTE (e0ne): need to drop FK first for MySQL
    if migrate_engine.name == 'mysql':
        ref_table = Table('volume_types', meta, autoload=True)
        params = {'columns': [encryptions.c['volume_type_id']],
                  'refcolumns': [ref_table.c['id']],
                  'name': 'encryption_ibfk_1'}
        volume_type_fk = ForeignKeyConstraint(**params)
        volume_type_fk.drop()

    try:
        volume_type_pk = PrimaryKeyConstraint('volume_type_id',
                                              table=encryptions)
        volume_type_pk.drop()
    except Exception:
        # NOTE (e0ne): SQLite doesn't support 'drop constraint' statament
        if migrate_engine.url.get_dialect().name.startswith('sqlite'):
            pass
        else:
            raise

    pkey = PrimaryKeyConstraint(encryptions.columns.encryption_id)
    pkey.create()
def downgrade(migrate_engine):
    if migrate_engine.name == 'mysql':
        # NOTE(jhesketh): MySQL Cannot drop index
        # 'uniq_aggregate_metadata0aggregate_id0key0deleted': needed in a
        # foreign key constraint. So we'll remove the fkey constraint on the
        # aggregate_metadata table and add it back after the index is
        # downgraded.
        meta = MetaData(bind=migrate_engine)
        table = Table('aggregate_metadata', meta, autoload=True)
        ref_table = Table('aggregates', meta, autoload=True)
        params = {'columns': [table.c['aggregate_id']],
                  'refcolumns': [ref_table.c['id']],
                  'name': 'aggregate_metadata_ibfk_1'}
        fkey = ForeignKeyConstraint(**params)
        fkey.drop()

    utils.drop_unique_constraint(migrate_engine, TABLE_NAME, UC_NAME, *COLUMNS)

    if migrate_engine.name == 'mysql':
        fkey.create()
def upgrade(migrate_engine):
    meta = sql.MetaData()
    meta.bind = migrate_engine

    registered_limit_table = sql.Table('registered_limit', meta, autoload=True)
    service_table = sql.Table('service', meta, autoload=True)
    region_table = sql.Table('region', meta, autoload=True)

    inspector = sql.engine.reflection.Inspector.from_engine(migrate_engine)
    for fk in inspector.get_foreign_keys('registered_limit'):
        if fk['referred_table'] == 'service':
            fkey = ForeignKeyConstraint([registered_limit_table.c.service_id],
                                        [service_table.c.id],
                                        name=fk['name'])
            fkey.drop()
        else:
            fkey = ForeignKeyConstraint([registered_limit_table.c.region_id],
                                        [region_table.c.id],
                                        name=fk['name'])
            fkey.drop()
def downgrade(migrate_engine):
    if migrate_engine.name == 'mysql':
        # NOTE(jhesketh): MySQL Cannot drop index
        # migrations_instance_uuid_and_status_idx needed in a foreign
        # key constraint. So we'll remove the fkey constraint on the
        # aggregate_metadata table and add it back after the indexes are
        # downgraded.
        meta = MetaData(bind=migrate_engine)
        table = Table('migrations', meta, autoload=True)
        ref_table = Table('instances', meta, autoload=True)
        params = {'columns': [table.c['instance_uuid']],
                  'refcolumns': [ref_table.c['uuid']]}
        if migrate_engine.name == 'mysql':
            params['name'] = 'fk_migrations_instance_uuid'
        fkey = ForeignKeyConstraint(**params)
        fkey.drop()

    utils.modify_indexes(migrate_engine, data, upgrade=False)

    if migrate_engine.name == 'mysql':
        fkey.create()
def upgrade(migrate_engine):
    meta = sqlalchemy.MetaData()
    meta.bind = migrate_engine

    resource_data = sqlalchemy.Table('resource_data', meta, autoload=True)
    resource = sqlalchemy.Table('resource', meta, autoload=True)

    for fk in resource_data.foreign_keys:
        if fk.column == resource.c.id:
            # delete the existing fk
            # and create with ondelete cascade and a proper name
            existing_fkey = ForeignKeyConstraint(
                columns=[resource_data.c.resource_id],
                refcolumns=[resource.c.id], name=fk.name)
            existing_fkey.drop()
            fkey = ForeignKeyConstraint(
                columns=[resource_data.c.resource_id],
                refcolumns=[resource.c.id],
                name="fk_resource_id", ondelete='CASCADE')
            fkey.create()
            break
def downgrade(migrate_engine):
    meta = MetaData(bind=migrate_engine)
    event_type = Table('event_type', meta, autoload=True)
    event = Table('event', meta, autoload=True)
    unique_name = Table('unique_name', meta, autoload=True)
    # Re-insert the event type table records into the old
    # unique_name table.
    conn = migrate_engine.connect()
    sql = ("INSERT INTO unique_name "
           "SELECT event_type.id, event_type.desc FROM event_type")
    conn.execute(sql)
    conn.close()
    # Drop the foreign key constraint to event_type, drop the
    # event_type table, rename the event.event_type column to
    # event.unique_name, and re-add the old foreign
    # key constraint
    params = {'columns': [event.c.event_type_id],
              'refcolumns': [event_type.c.id]}
    if migrate_engine.name == 'mysql':
        params['name'] = "_".join(('fk', 'event_type', 'id'))
    fkey = ForeignKeyConstraint(**params)
    fkey.drop()

    event_type.drop()

    Column('unique_name_id', Integer).create(event)

    # Move data from event_type_id column to unique_name_id column
    query = select([event.c.id, event.c.event_type_id])
    for key, value in migration.paged(query):
        event.update().where(event.c.id == key)\
            .values({"unique_name_id": value}).execute()

    event.c.event_type_id.drop()
    params = {'columns': [event.c.unique_name_id],
              'refcolumns': [unique_name.c.id]}
    if migrate_engine.name == 'mysql':
        params['name'] = 'event_ibfk_1'
    fkey = ForeignKeyConstraint(**params)
    fkey.create()
Example #26
0
def downgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine

    if migrate_engine.name == 'mysql':
        # NOTE(jsbryant): MySQL Cannot drop the quota_usages table
        # until the foreign key reservations_ibfk_1 is removed.  We
        # remove the foreign key first, and then we drop the table.
        table = Table('reservations', meta, autoload=True)
        ref_table = Table('reservations', meta, autoload=True)
        params = {'columns': [table.c['usage_id']],
                  'refcolumns': [ref_table.c['id']],
                  'name': 'reservations_ibfk_1'}

        try:
            fkey = ForeignKeyConstraint(**params)
            fkey.drop()
        except Exception:
            LOG.error(_("Dropping foreign key reservations_ibfk_1 failed."))

    quota_classes = Table('quota_classes', meta, autoload=True)
    try:
        quota_classes.drop()
    except Exception:
        LOG.error(_("quota_classes table not dropped"))
        raise

    quota_usages = Table('quota_usages', meta, autoload=True)
    try:
        quota_usages.drop()
    except Exception:
        LOG.error(_("quota_usages table not dropped"))
        raise

    reservations = Table('reservations', meta, autoload=True)
    try:
        reservations.drop()
    except Exception:
        LOG.error(_("reservations table not dropped"))
        raise
def upgrade(migrate_engine):
    meta = sa.MetaData(bind=migrate_engine)
    load_tables = dict((table_name, sa.Table(table_name, meta,
                                             autoload=True))
                       for table_name in TABLES)

    # drop foreign keys
    if migrate_engine.name != 'sqlite':
        for table_name, indexes in INDEXES.items():
            table = load_tables[table_name]
            for column, ref_table_name, ref_column_name in indexes:
                ref_table = load_tables[ref_table_name]
                params = {'columns': [table.c[column]],
                          'refcolumns': [ref_table.c[ref_column_name]]}
                fk_table_name = table_name
                if migrate_engine.name == "mysql":
                    params['name'] = "_".join(('fk', fk_table_name, column))
                elif (migrate_engine.name == "postgresql" and
                      table_name == 'sample'):
                    # fk was not renamed in script 030
                    params['name'] = "_".join(('meter', column, 'fkey'))
                fkey = ForeignKeyConstraint(**params)
                fkey.drop()

    # create source field in sample
    sample = load_tables['sample']
    sample.create_column(sa.Column('source_id', sa.String(255)))

    # move source values to samples
    sourceassoc = load_tables['sourceassoc']
    query = (sa.select([sourceassoc.c.sample_id, sourceassoc.c.source_id]).
             where(sourceassoc.c.sample_id.isnot(None)))
    for sample_id, source_id in migration.paged(query):
        (sample.update().where(sample_id == sample.c.id).
         values({'source_id': source_id}).execute())

    # drop tables
    for table_name in DROP_TABLES:
        sa.Table(table_name, meta, autoload=True).drop()
def downgrade(migrate_engine):
    metadata.bind = migrate_engine

    NewWorkflowStepConnection_table = Table("workflow_step_connection", metadata, autoload=True)
    for fkc in NewWorkflowStepConnection_table.foreign_key_constraints:
        mfkc = MigrateForeignKeyConstraint([_.parent for _ in fkc.elements], [_.column for _ in fkc.elements], name=fkc.name)
        try:
            mfkc.drop()
        except Exception:
            log.exception("Dropping foreign key constraint '%s' from table '%s' failed", mfkc.name, NewWorkflowStepConnection_table)

    for index in NewWorkflowStepConnection_table.indexes:
        drop_index(index, NewWorkflowStepConnection_table)
    NewWorkflowStepConnection_table.rename("workflow_step_connection_predowngrade145")
    # Try to deregister that table to work around some caching problems it seems.
    NewWorkflowStepConnection_table.deregister()
    metadata._remove_table("workflow_step_connection", metadata.schema)
    metadata.reflect()

    OldWorkflowStepConnection_table = Table(
        "workflow_step_connection", metadata,
        Column("id", Integer, primary_key=True),
        Column("output_step_id", Integer, ForeignKey("workflow_step.id"), index=True),
        Column("input_step_id", Integer, ForeignKey("workflow_step.id"), index=True),
        Column("output_name", TEXT),
        Column("input_name", TEXT),
        Column("input_subworkflow_step_id", Integer, ForeignKey("workflow_step.id"), index=True),
    )
    create_table(OldWorkflowStepConnection_table)

    insert_step_connections_cmd = \
        "INSERT INTO workflow_step_connection (output_step_id, input_step_id, output_name, input_name, input_subworkflow_step_id) " + \
        "SELECT wsc.output_step_id, wsi.workflow_step_id, wsc.output_name, wsi.name, wsc.input_subworkflow_step_id " + \
        "FROM workflow_step_connection_predowngrade145 AS wsc JOIN workflow_step_input AS wsi ON wsc.input_step_input_id = wsi.id ORDER BY wsc.id"
    migrate_engine.execute(insert_step_connections_cmd)

    for table in (NewWorkflowStepConnection_table, WorkflowStepInput_table):
        drop_table(table)
Example #29
0
def downgrade(migrate_engine):
    """Remove volume_type_rate_limit table."""
    meta = MetaData()
    meta.bind = migrate_engine

    qos_specs = Table('quality_of_service_specs', meta, autoload=True)

    if migrate_engine.name == 'mysql':
        # NOTE(alanmeadows): MySQL Cannot drop column qos_specs_id
        # until the foreign key volumes_types_ibfk_1 is removed.  We
        # remove the foreign key first, and then we drop the column.
        table = Table('volume_types', meta, autoload=True)
        ref_table = Table('volume_types', meta, autoload=True)
        params = {'columns': [table.c['qos_specs_id']],
                  'refcolumns': [ref_table.c['id']],
                  'name': 'volume_types_ibfk_1'}

        try:
            fkey = ForeignKeyConstraint(**params)
            fkey.drop()
        except Exception:
            LOG.error(_LE("Dropping foreign key volume_types_ibfk_1 failed"))

    volume_types = Table('volume_types', meta, autoload=True)
    qos_specs_id = Column('qos_specs_id', String(36))

    try:
        volume_types.drop_column(qos_specs_id)
    except Exception:
        LOG.error(_LE("Dropping qos_specs_id column failed."))
        raise

    try:
        qos_specs.drop()

    except Exception:
        LOG.error(_LE("Dropping quality_of_service_specs table failed."))
        raise
def downgrade(migrate_engine):
    if migrate_engine.name == 'mysql':
        # NOTE(jhesketh): MySQL Cannot drop index
        # migrations_instance_uuid_and_status_idx needed in a foreign
        # key constraint. So we'll remove the fkey constraint on the
        # aggregate_metadata table and add it back after the indexes are
        # downgraded.
        meta = MetaData(bind=migrate_engine)
        table = Table('migrations', meta, autoload=True)
        ref_table = Table('instances', meta, autoload=True)
        params = {
            'columns': [table.c['instance_uuid']],
            'refcolumns': [ref_table.c['uuid']]
        }
        if migrate_engine.name == 'mysql':
            params['name'] = 'fk_migrations_instance_uuid'
        fkey = ForeignKeyConstraint(**params)
        fkey.drop()

    utils.modify_indexes(migrate_engine, data, upgrade=False)

    if migrate_engine.name == 'mysql':
        fkey.create()
def downgrade(migrate_engine):
    if migrate_engine.name == 'sqlite':
        return
    meta = MetaData(bind=migrate_engine)
    load_tables = dict((table_name, Table(table_name, meta, autoload=True))
                       for table_name in TABLES)
    for table_name, indexes in INDEXES.items():
        table = load_tables[table_name]
        for column, ref_table_name, ref_column_name in indexes:
            ref_table = load_tables[ref_table_name]
            params = {'columns': [table.c[column]],
                      'refcolumns': [ref_table.c[ref_column_name]]}
            if migrate_engine.name == 'mysql':
                params['name'] = "_".join(('fk', table_name, column))
            with migrate_engine.begin():
                fkey = ForeignKeyConstraint(**params)
                fkey.drop()
        with migrate_engine.begin():
            # Restore data that had been dropped.
            table_dump_name = 'dump_' + table_name
            table_dump = Table(table_dump_name, meta, autoload=True)
            sql = utils.InsertFromSelect(table, table_dump.select())
            migrate_engine.execute(sql)
            table_dump.drop()
def upgrade(migrate_engine):
    meta = sqlalchemy.MetaData()
    meta.bind = migrate_engine

    hosts_table = Table('hosts', meta, autoload=True)
    failover_segments = Table('failover_segments', meta, autoload=True)
    # NOTE(Dinesh_Bhor) We need to drop foreign keys first because unique
    # constraints that we want to delete depend on them. So drop the fk and
    # recreate it again after unique constraint deletion.
    cons_fk = ForeignKeyConstraint([hosts_table.c.failover_segment_id],
                                   [failover_segments.c.uuid],
                                   name="fk_failover_segments_uuid")
    cons_fk.drop(engine=migrate_engine)

    cons_unique = UniqueConstraint('failover_segment_id', 'name', 'deleted',
                                   name='uniq_host0name0deleted',
                                   table=hosts_table)
    cons_unique.drop(engine=migrate_engine)
    # Create an updated unique constraint
    updated_cons_unique = UniqueConstraint('name', 'deleted',
                                           name='uniq_host0name0deleted',
                                           table=hosts_table)
    cons_fk.create()
    updated_cons_unique.create()
def downgrade(migrate_engine):
    """Convert volume_type from UUID back to int."""
    meta = MetaData()
    meta.bind = migrate_engine

    volumes = Table('volumes', meta, autoload=True)
    volume_types = Table('volume_types', meta, autoload=True)
    extra_specs = Table('volume_type_extra_specs', meta, autoload=True)

    fkey_remove_list = [volumes.c.volume_type_id,
                        volume_types.c.id,
                        extra_specs.c.volume_type_id]

    for column in fkey_remove_list:
        fkeys = list(column.foreign_keys)
        if fkeys:
            fkey_name = fkeys[0].constraint.name
            fkey = ForeignKeyConstraint(columns=[column],
                                        refcolumns=[volume_types.c.id],
                                        name=fkey_name)

            try:
                fkey.drop()
            except Exception:
                if migrate_engine.url.get_dialect().name.startswith('sqlite'):
                    pass
                else:
                    raise

    vtype_list = list(volume_types.select().execute())
    new_id = 1

    for t in vtype_list:
        volumes.update().\
            where(volumes.c.volume_type_id == t['id']).\
            values(volume_type_id=new_id).execute()

        extra_specs.update().\
            where(extra_specs.c.volume_type_id == t['id']).\
            values(volume_type_id=new_id).execute()

        volume_types.update().\
            where(volume_types.c.id == t['id']).\
            values(id=new_id).execute()

        new_id += 1

    if migrate_engine.name == 'postgresql':
        # NOTE(e0ne): PostgreSQL can't cast string to int automatically
        table_column_pairs = [('volumes', 'volume_type_id'),
                              ('volume_types', 'id'),
                              ('volume_type_extra_specs', 'volume_type_id')]
        sql = 'ALTER TABLE {0} ALTER COLUMN {1} ' + \
            'TYPE INTEGER USING {1}::numeric'

        for table, column in table_column_pairs:
            migrate_engine.execute(sql.format(table, column))
    else:
        volumes.c.volume_type_id.alter(Integer)
        volume_types.c.id.alter(Integer)
        extra_specs.c.volume_type_id.alter(Integer)

    for column in fkey_remove_list:
        fkeys = list(column.foreign_keys)
        if fkeys:
            fkey_name = fkeys[0].constraint.name
            fkey = ForeignKeyConstraint(columns=[column],
                                        refcolumns=[volume_types.c.id],
                                        name=fkey_name)
            try:
                fkey.create()
            except Exception:
                if migrate_engine.url.get_dialect().name.startswith('sqlite'):
                    pass
                else:
                    raise
def upgrade(migrate_engine):
    """Convert volume_type_id to UUID."""
    meta = MetaData()
    meta.bind = migrate_engine

    volumes = Table('volumes', meta, autoload=True)
    volume_types = Table('volume_types', meta, autoload=True)
    extra_specs = Table('volume_type_extra_specs', meta, autoload=True)

    fkey_remove_list = [volumes.c.volume_type_id,
                        volume_types.c.id,
                        extra_specs.c.volume_type_id]

    for column in fkey_remove_list:
        fkeys = list(column.foreign_keys)
        if fkeys:
            fkey_name = fkeys[0].constraint.name
            fkey = ForeignKeyConstraint(columns=[column],
                                        refcolumns=[volume_types.c.id],
                                        name=fkey_name)

            try:
                fkey.drop()
            except Exception:
                if migrate_engine.url.get_dialect().name.startswith('sqlite'):
                    pass
                else:
                    raise

    volumes.c.volume_type_id.alter(String(36))
    volume_types.c.id.alter(String(36))
    extra_specs.c.volume_type_id.alter(String(36))

    vtype_list = list(volume_types.select().execute())
    for t in vtype_list:
        new_id = str(uuid.uuid4())

        volumes.update().\
            where(volumes.c.volume_type_id == t['id']).\
            values(volume_type_id=new_id).execute()

        extra_specs.update().\
            where(extra_specs.c.volume_type_id == t['id']).\
            values(volume_type_id=new_id).execute()

        volume_types.update().\
            where(volume_types.c.id == t['id']).\
            values(id=new_id).execute()

    for column in fkey_remove_list:
        fkeys = list(column.foreign_keys)
        if fkeys:
            fkey_name = fkeys[0].constraint.name
            fkey = ForeignKeyConstraint(columns=[column],
                                        refcolumns=[volume_types.c.id],
                                        name=fkey_name)
            try:
                fkey.create()
            except Exception:
                if migrate_engine.url.get_dialect().name.startswith('sqlite'):
                    pass
                else:
                    raise
def upgrade(migrate_engine):
    meta = MetaData(migrate_engine)
    trait_type = Table(
        'trait_type',
        meta,
        Column('id', Integer, primary_key=True),
        Column('desc', String(255)),
        Column('data_type', Integer),
        UniqueConstraint('desc', 'data_type', name="tt_unique"),
        mysql_engine='InnoDB',
        mysql_charset='utf8',
    )
    trait = Table('trait', meta, autoload=True)
    unique_name = Table('unique_name', meta, autoload=True)
    trait_type.create(migrate_engine)
    # Trait type extracts data from Trait and Unique name.
    # We take all trait names from Unique Name, and data types
    # from Trait. We then remove dtype and name from trait, and
    # remove the name field.

    conn = migrate_engine.connect()
    sql = ("INSERT INTO trait_type "
           "SELECT unique_name.id, unique_name.key, trait.t_type FROM trait "
           "INNER JOIN unique_name "
           "ON trait.name_id = unique_name.id "
           "GROUP BY unique_name.id, unique_name.key, trait.t_type")
    conn.execute(sql)
    conn.close()

    # Now we need to drop the foreign key constraint, rename
    # the trait.name column, and re-add a new foreign
    # key constraint
    params = {'columns': [trait.c.name_id], 'refcolumns': [unique_name.c.id]}
    if migrate_engine.name == 'mysql':
        params['name'] = "trait_ibfk_1"  # foreign key to the unique name table
    fkey = ForeignKeyConstraint(**params)
    fkey.drop()

    Column('trait_type_id', Integer).create(trait)

    # Move data from name_id column into trait_type_id column
    query = select([trait.c.id, trait.c.name_id])
    for key, value in migration.paged(query):
        (trait.update().where(trait.c.id == key).values({
            "trait_type_id": value
        }).execute())

    trait.c.name_id.drop()

    params = {
        'columns': [trait.c.trait_type_id],
        'refcolumns': [trait_type.c.id]
    }
    if migrate_engine.name == 'mysql':
        params['name'] = "_".join(('fk', 'trait_type', 'id'))

    fkey = ForeignKeyConstraint(**params)
    fkey.create()

    # Drop the t_type column to data_type.
    trait.c.t_type.drop()

    # Finally, drop the unique_name table - we don't need it
    # anymore.
    unique_name.drop()
Example #36
0
def downgrade(migrate_engine):
    meta = MetaData(migrate_engine)
    unique_name = Table(
        'unique_name',
        meta,
        Column('id', Integer, primary_key=True),
        Column('key', String(255), unique=True),
        mysql_engine='InnoDB',
        mysql_charset='utf8',
    )

    trait_type = Table('trait_type', meta, autoload=True)
    trait = Table('trait', meta, autoload=True)

    # Create the UniqueName table, drop the foreign key constraint
    # to trait_type, drop the trait_type table, rename the
    # trait.trait_type column to traitname, re-add the dtype to
    # the trait table, and re-add the old foreign key constraint

    unique_name.create(migrate_engine)

    conn = migrate_engine.connect()
    sql = ("INSERT INTO unique_name "
           "SELECT trait_type.id, trait_type.desc "
           "FROM trait_type")

    conn.execute(sql)
    conn.close()
    params = {
        'columns': [trait.c.trait_type_id],
        'refcolumns': [trait_type.c.id]
    }

    if migrate_engine.name == 'mysql':
        params['name'] = "_".join(('fk', 'trait_type', 'id'))
    fkey = ForeignKeyConstraint(**params)
    fkey.drop()

    # Re-create the old columns in trait
    Column("name_id", Integer).create(trait)
    Column("t_type", Integer).create(trait)

    # copy data from trait_type.data_type into trait.t_type
    query = select([trait_type.c.id, trait_type.c.data_type])
    for key, value in migration.paged(query):
        trait.update().where(trait.c.trait_type_id == key)\
            .values({"t_type": value}).execute()

    # Move data from name_id column into trait_type_id column
    query = select([trait.c.id, trait.c.trait_type_id])
    for key, value in migration.paged(query):
        trait.update().where(trait.c.id == key)\
            .values({"name_id": value}).execute()

    # Add a foreign key to the unique_name table
    params = {'columns': [trait.c.name_id], 'refcolumns': [unique_name.c.id]}
    if migrate_engine.name == 'mysql':
        params['name'] = 'trait_ibfk_1'
    fkey = ForeignKeyConstraint(**params)
    fkey.create()

    trait.c.trait_type_id.drop()

    # Drop the trait_type table. It isn't needed anymore
    trait_type.drop()
Example #37
0
def downgrade(migrate_engine):
    metadata.bind = migrate_engine
    metadata.reflect()

    # NOTE: all new data added in the upgrade method is eliminated here via table drops
    # Drop 1 foreign key constraint from the metadata_file table
    MetadataFile_table = Table("metadata_file", metadata, autoload=True)
    LibraryDatasetDatasetAssociation_table = Table(
        "library_dataset_dataset_association", metadata, autoload=True)
    try:
        cons = ForeignKeyConstraint(
            [MetadataFile_table.c.lda_id],
            [LibraryDatasetDatasetAssociation_table.c.id],
            name='metadata_file_lda_id_fkey')
        # Drop the constraint
        cons.drop()
    except Exception:
        log.exception(
            "Dropping foreign key constraint 'metadata_file_lda_id_fkey' from table 'metadata_file' failed."
        )
    # Drop 1 foreign key constraint from the history_dataset_association table
    HistoryDatasetAssociation_table = Table("history_dataset_association",
                                            metadata,
                                            autoload=True)
    LibraryDatasetDatasetAssociation_table = Table(
        "library_dataset_dataset_association", metadata, autoload=True)
    try:
        cons = ForeignKeyConstraint(
            [
                HistoryDatasetAssociation_table.c.
                copied_from_library_dataset_dataset_association_id
            ], [LibraryDatasetDatasetAssociation_table.c.id],
            name=
            'history_dataset_association_copied_from_library_dataset_da_fkey')
        # Drop the constraint
        cons.drop()
    except Exception:
        log.exception(
            "Dropping foreign key constraint 'history_dataset_association_copied_from_library_dataset_da_fkey' from table 'history_dataset_association' failed."
        )
    # Drop all of the new tables above
    TABLES = [
        UserGroupAssociation_table,
        UserRoleAssociation_table,
        GroupRoleAssociation_table,
        Group_table,
        DatasetPermissions_table,
        LibraryPermissions_table,
        LibraryFolderPermissions_table,
        LibraryDatasetPermissions_table,
        LibraryDatasetDatasetAssociationPermissions_table,
        LibraryItemInfoPermissions_table,
        LibraryItemInfoTemplatePermissions_table,
        DefaultUserPermissions_table,
        DefaultHistoryPermissions_table,
        Role_table,
        LibraryDatasetDatasetInfoAssociation_table,
        LibraryDataset_table,
        LibraryDatasetDatasetAssociation_table,
        LibraryDatasetDatasetInfoTemplateAssociation_table,
        JobExternalOutputMetadata_table,
        Library_table,
        LibraryFolder_table,
        LibraryItemInfoTemplateElement_table,
        LibraryInfoTemplateAssociation_table,
        LibraryFolderInfoTemplateAssociation_table,
        LibraryDatasetInfoTemplateAssociation_table,
        LibraryInfoAssociation_table,
        LibraryFolderInfoAssociation_table,
        LibraryDatasetInfoAssociation_table,
        LibraryItemInfoElement_table,
        LibraryItemInfo_table,
        LibraryItemInfoTemplate_table,
    ]
    for table in TABLES:
        drop_table(table)
    # Drop the index on the Job.state column - changeset 2192
    drop_index('ix_job_state', 'job', 'state', metadata)
    # Drop 1 column from the stored_workflow table - changeset 2328
    drop_column('importable', 'stored_workflow', metadata)
    # Drop 1 column from the metadata_file table
    drop_column('lda_id', 'metadata_file', metadata)
    # Drop 1 column from the history_dataset_association table
    drop_column('copied_from_library_dataset_dataset_association_id',
                HistoryDatasetAssociation_table)
    # Drop 2 columns from the galaxy_user table
    User_table = Table("galaxy_user", metadata, autoload=True)
    drop_column('deleted', User_table)
    drop_column('purged', User_table)
Example #38
0
def upgrade(migrate_engine):
    """Convert volume and snapshot id columns from int to varchar."""
    meta = MetaData()
    meta.bind = migrate_engine

    volumes = Table('volumes', meta, autoload=True)
    snapshots = Table('snapshots', meta, autoload=True)
    iscsi_targets = Table('iscsi_targets', meta, autoload=True)
    volume_metadata = Table('volume_metadata', meta, autoload=True)
    block_device_mapping = Table('block_device_mapping', meta, autoload=True)
    sm_volumes = Table('sm_volume', meta, autoload=True)

    volume_mappings = Table('volume_id_mappings', meta, autoload=True)
    snapshot_mappings = Table('snapshot_id_mappings', meta, autoload=True)

    fkey_columns = [
        iscsi_targets.c.volume_id,
        volume_metadata.c.volume_id,
        sm_volumes.c.id,
    ]
    for column in fkey_columns:
        fkeys = list(column.foreign_keys)
        if fkeys:
            fkey_name = fkeys[0].constraint.name
            LOG.info('Dropping foreign key %s' % fkey_name)
            fkey = ForeignKeyConstraint(columns=[column],
                                        refcolumns=[volumes.c.id],
                                        name=fkey_name)
            try:
                fkey.drop()
            except Exception:
                if migrate_engine.url.get_dialect().name.startswith('sqlite'):
                    pass
                else:
                    raise

    volume_list = list(volumes.select().execute())
    for v in volume_list:
        new_id = select(
            [volume_mappings.c.uuid],
            volume_mappings.c.id == v['id']).execute().fetchone()[0]

        volumes.update().\
            where(volumes.c.id == v['id']).\
            values(id=new_id).execute()

        sm_volumes.update().\
            where(sm_volumes.c.id == v['id']).\
            values(id=new_id).execute()

        snapshots.update().\
            where(snapshots.c.volume_id == v['id']).\
            values(volume_id=new_id).execute()

        iscsi_targets.update().\
            where(iscsi_targets.c.volume_id == v['id']).\
            values(volume_id=new_id).execute()

        volume_metadata.update().\
            where(volume_metadata.c.volume_id == v['id']).\
            values(volume_id=new_id).execute()

        block_device_mapping.update().\
            where(block_device_mapping.c.volume_id == v['id']).\
            values(volume_id=new_id).execute()

    snapshot_list = list(snapshots.select().execute())
    for s in snapshot_list:
        new_id = select(
            [snapshot_mappings.c.uuid],
            snapshot_mappings.c.id == s['id']).execute().fetchone()[0]

        volumes.update().\
            where(volumes.c.snapshot_id == s['id']).\
            values(snapshot_id=new_id).execute()

        snapshots.update().\
            where(snapshots.c.id == s['id']).\
            values(id=new_id).execute()

        block_device_mapping.update().\
            where(block_device_mapping.c.snapshot_id == s['id']).\
            values(snapshot_id=new_id).execute()

    for column in fkey_columns:
        fkeys = list(column.foreign_keys)
        if fkeys:
            fkey = ForeignKeyConstraint(columns=[column],
                                        refcolumns=[volumes.c.id])
            try:
                fkey.create()
                LOG.info('Created foreign key %s' % fkey_name)
            except Exception:
                if migrate_engine.url.get_dialect().name.startswith('sqlite'):
                    pass
                else:
                    raise
Example #39
0
     HistoryDatasetAssociation_table = Table( "history_dataset_association", metadata, autoload=True )
 except NoSuchTableError:
     HistoryDatasetAssociation_table = None
     log.debug( "Failed loading table history_dataset_association" )
 try:
     LibraryDatasetDatasetAssociation_table = Table( "library_dataset_dataset_association", metadata, autoload=True )
 except NoSuchTableError:
     LibraryDatasetDatasetAssociation_table = None
     log.debug( "Failed loading table library_dataset_dataset_association" )
 if HistoryDatasetAssociation_table is not None and LibraryDatasetDatasetAssociation_table is not None:
     try:
         cons = ForeignKeyConstraint( [HistoryDatasetAssociation_table.c.copied_from_library_dataset_dataset_association_id],
                                      [LibraryDatasetDatasetAssociation_table.c.id],
                                      name='history_dataset_association_copied_from_library_dataset_da_fkey' )
         # Drop the constraint
         cons.drop()
     except Exception, e:
         log.debug( "Dropping foreign key constraint 'history_dataset_association_copied_from_library_dataset_da_fkey' from table 'history_dataset_association' failed: %s" % ( str( e ) ) )
 # Drop all of the new tables above
 try:
     UserGroupAssociation_table.drop()
 except Exception, e:
     log.debug( "Dropping user_group_association table failed: %s" % str( e ) )
 try:
     UserRoleAssociation_table.drop()
 except Exception, e:
     log.debug( "Dropping user_role_association table failed: %s" % str( e ) )
 try:
     GroupRoleAssociation_table.drop()
 except Exception, e:
     log.debug( "Dropping group_role_association table failed: %s" % str( e ) )
def downgrade(migrate_engine):
    metadata.bind = migrate_engine
    metadata.reflect()

    # NOTE: all new data added in the upgrade method is eliminated here via table drops
    # Drop 1 foreign key constraint from the metadata_file table
    try:
        MetadataFile_table = Table("metadata_file", metadata, autoload=True)
    except NoSuchTableError:
        MetadataFile_table = None
        log.debug("Failed loading table metadata_file")
    try:
        LibraryDatasetDatasetAssociation_table = Table(
            "library_dataset_dataset_association", metadata, autoload=True)
    except NoSuchTableError:
        LibraryDatasetDatasetAssociation_table = None
        log.debug("Failed loading table library_dataset_dataset_association")
    if MetadataFile_table is not None and LibraryDatasetDatasetAssociation_table is not None:
        try:
            cons = ForeignKeyConstraint(
                [MetadataFile_table.c.lda_id],
                [LibraryDatasetDatasetAssociation_table.c.id],
                name='metadata_file_lda_id_fkey')
            # Drop the constraint
            cons.drop()
        except Exception:
            log.exception(
                "Dropping foreign key constraint 'metadata_file_lda_id_fkey' from table 'metadata_file' failed."
            )
    # Drop 1 foreign key constraint from the history_dataset_association table
    try:
        HistoryDatasetAssociation_table = Table("history_dataset_association",
                                                metadata,
                                                autoload=True)
    except NoSuchTableError:
        HistoryDatasetAssociation_table = None
        log.debug("Failed loading table history_dataset_association")
    try:
        LibraryDatasetDatasetAssociation_table = Table(
            "library_dataset_dataset_association", metadata, autoload=True)
    except NoSuchTableError:
        LibraryDatasetDatasetAssociation_table = None
        log.debug("Failed loading table library_dataset_dataset_association")
    if HistoryDatasetAssociation_table is not None and LibraryDatasetDatasetAssociation_table is not None:
        try:
            cons = ForeignKeyConstraint(
                [
                    HistoryDatasetAssociation_table.c.
                    copied_from_library_dataset_dataset_association_id
                ], [LibraryDatasetDatasetAssociation_table.c.id],
                name=
                'history_dataset_association_copied_from_library_dataset_da_fkey'
            )
            # Drop the constraint
            cons.drop()
        except Exception:
            log.exception(
                "Dropping foreign key constraint 'history_dataset_association_copied_from_library_dataset_da_fkey' from table 'history_dataset_association' failed."
            )
    # Drop all of the new tables above
    try:
        UserGroupAssociation_table.drop()
    except Exception:
        log.exception("Dropping user_group_association table failed.")
    try:
        UserRoleAssociation_table.drop()
    except Exception:
        log.exception("Dropping user_role_association table failed.")
    try:
        GroupRoleAssociation_table.drop()
    except Exception:
        log.exception("Dropping group_role_association table failed.")
    try:
        Group_table.drop()
    except Exception:
        log.exception("Dropping galaxy_group table failed.")
    try:
        DatasetPermissions_table.drop()
    except Exception:
        log.exception("Dropping dataset_permissions table failed.")
    try:
        LibraryPermissions_table.drop()
    except Exception:
        log.exception("Dropping library_permissions table failed.")
    try:
        LibraryFolderPermissions_table.drop()
    except Exception:
        log.exception("Dropping library_folder_permissions table failed.")
    try:
        LibraryDatasetPermissions_table.drop()
    except Exception:
        log.exception("Dropping library_dataset_permissions table failed.")
    try:
        LibraryDatasetDatasetAssociationPermissions_table.drop()
    except Exception:
        log.exception(
            "Dropping library_dataset_dataset_association_permissions table failed."
        )
    try:
        LibraryItemInfoPermissions_table.drop()
    except Exception:
        log.exception("Dropping library_item_info_permissions table failed.")
    try:
        LibraryItemInfoTemplatePermissions_table.drop()
    except Exception:
        log.exception(
            "Dropping library_item_info_template_permissions table failed.")
    try:
        DefaultUserPermissions_table.drop()
    except Exception:
        log.exception("Dropping default_user_permissions table failed.")
    try:
        DefaultHistoryPermissions_table.drop()
    except Exception:
        log.exception("Dropping default_history_permissions table failed.")
    try:
        Role_table.drop()
    except Exception:
        log.exception("Dropping role table failed.")
    try:
        LibraryDatasetDatasetInfoAssociation_table.drop()
    except Exception:
        log.exception(
            "Dropping library_dataset_dataset_info_association table failed.")
    try:
        LibraryDataset_table.drop()
    except Exception:
        log.exception("Dropping library_dataset table failed.")
    try:
        LibraryDatasetDatasetAssociation_table.drop()
    except Exception:
        log.exception(
            "Dropping library_dataset_dataset_association table failed.")
    try:
        LibraryDatasetDatasetInfoTemplateAssociation_table.drop()
    except Exception:
        log.exception(
            "Dropping library_dataset_dataset_info_template_association table failed."
        )
    try:
        JobExternalOutputMetadata_table.drop()
    except Exception:
        log.exception("Dropping job_external_output_metadata table failed.")
    try:
        Library_table.drop()
    except Exception:
        log.exception("Dropping library table failed.")
    try:
        LibraryFolder_table.drop()
    except Exception:
        log.exception("Dropping library_folder table failed.")
    try:
        LibraryItemInfoTemplateElement_table.drop()
    except Exception:
        log.exception(
            "Dropping library_item_info_template_element table failed.")
    try:
        LibraryInfoTemplateAssociation_table.drop()
    except Exception:
        log.exception(
            "Dropping library_info_template_association table failed.")
    try:
        LibraryFolderInfoTemplateAssociation_table.drop()
    except Exception:
        log.exception(
            "Dropping library_folder_info_template_association table failed.")
    try:
        LibraryDatasetInfoTemplateAssociation_table.drop()
    except Exception:
        log.exception(
            "Dropping library_dataset_info_template_association table failed.")
    try:
        LibraryInfoAssociation_table.drop()
    except Exception:
        log.exception("Dropping library_info_association table failed.")
    try:
        LibraryFolderInfoAssociation_table.drop()
    except Exception:
        log.exception("Dropping library_folder_info_association table failed.")
    try:
        LibraryDatasetInfoAssociation_table.drop()
    except Exception:
        log.exception(
            "Dropping library_dataset_info_association table failed.")
    try:
        LibraryItemInfoElement_table.drop()
    except Exception:
        log.exception("Dropping library_item_info_element table failed.")
    try:
        LibraryItemInfo_table.drop()
    except Exception:
        log.exception("Dropping library_item_info table failed.")
    try:
        LibraryItemInfoTemplate_table.drop()
    except Exception:
        log.exception("Dropping library_item_info_template table failed.")
    # Drop the index on the Job.state column - changeset 2192
    drop_index('ix_job_state', 'job', 'state', metadata)
    # Drop 1 column from the stored_workflow table - changeset 2328
    try:
        StoredWorkflow_table = Table("stored_workflow",
                                     metadata,
                                     autoload=True)
    except NoSuchTableError:
        StoredWorkflow_table = None
        log.debug("Failed loading table stored_workflow")
    if StoredWorkflow_table is not None:
        try:
            col = StoredWorkflow_table.c.importable
            col.drop()
        except Exception:
            log.exception(
                "Dropping column 'importable' from stored_workflow table failed."
            )
    # Drop 1 column from the metadata_file table
    try:
        MetadataFile_table = Table("metadata_file", metadata, autoload=True)
    except NoSuchTableError:
        MetadataFile_table = None
        log.debug("Failed loading table metadata_file")
    if MetadataFile_table is not None:
        try:
            col = MetadataFile_table.c.lda_id
            col.drop()
        except Exception:
            log.exception(
                "Dropping column 'lda_id' from metadata_file table failed.")
    # Drop 1 column from the history_dataset_association table
    try:
        HistoryDatasetAssociation_table = Table("history_dataset_association",
                                                metadata,
                                                autoload=True)
    except NoSuchTableError:
        HistoryDatasetAssociation_table = None
        log.debug("Failed loading table history_dataset_association")
    if HistoryDatasetAssociation_table is not None:
        try:
            col = HistoryDatasetAssociation_table.c.copied_from_library_dataset_dataset_association_id
            col.drop()
        except Exception:
            log.exception(
                "Dropping column 'copied_from_library_dataset_dataset_association_id' from history_dataset_association table failed."
            )
    # Drop 2 columns from the galaxy_user table
    try:
        User_table = Table("galaxy_user", metadata, autoload=True)
    except NoSuchTableError:
        User_table = None
        log.debug("Failed loading table galaxy_user")
    if User_table is not None:
        try:
            col = User_table.c.deleted
            col.drop()
        except Exception:
            log.exception(
                "Dropping column 'deleted' from galaxy_user table failed.")
        try:
            col = User_table.c.purged
            col.drop()
        except Exception:
            log.exception(
                "Dropping column 'purged' from galaxy_user table failed.")
def downgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine

    # Drop column from snapshots table
    if migrate_engine.name == 'mysql':
        # MySQL cannot drop column cgsnapshot_id until the foreign key
        # constraint is removed. So remove the foreign key first, and
        # then drop the column.
        table = Table('snapshots', meta, autoload=True)
        ref_table = Table('snapshots', meta, autoload=True)
        params = {
            'columns': [table.c['cgsnapshot_id']],
            'refcolumns': [ref_table.c['id']],
            'name': 'snapshots_ibfk_1'
        }

        try:
            fkey = ForeignKeyConstraint(**params)
            fkey.drop()
        except Exception:
            LOG.error(
                _("Dropping foreign key 'cgsnapshot_id' in "
                  "the 'snapshots' table failed."))

    snapshots = Table('snapshots', meta, autoload=True)
    cgsnapshot_id = snapshots.columns.cgsnapshot_id
    snapshots.drop_column(cgsnapshot_id)

    # Drop column from volumes table
    if migrate_engine.name == 'mysql':
        # MySQL cannot drop column consistencygroup_id until the foreign
        # key constraint is removed. So remove the foreign key first,
        # and then drop the column.
        table = Table('volumes', meta, autoload=True)
        ref_table = Table('volumes', meta, autoload=True)
        params = {
            'columns': [table.c['consistencygroup_id']],
            'refcolumns': [ref_table.c['id']],
            'name': 'volumes_ibfk_1'
        }

        try:
            fkey = ForeignKeyConstraint(**params)
            fkey.drop()
        except Exception:
            LOG.error(
                _("Dropping foreign key 'consistencygroup_id' in "
                  "the 'volumes' table failed."))

    volumes = Table('volumes', meta, autoload=True)
    consistencygroup_id = volumes.columns.consistencygroup_id
    volumes.drop_column(consistencygroup_id)

    # Drop table
    cgsnapshots = Table('cgsnapshots', meta, autoload=True)
    try:
        cgsnapshots.drop()
    except Exception:
        LOG.error(_("cgsnapshots table not dropped"))
        raise

    # Drop table
    consistencygroups = Table('consistencygroups', meta, autoload=True)
    try:
        consistencygroups.drop()
    except Exception:
        LOG.error(_("consistencygroups table not dropped"))
        raise
def downgrade(migrate_engine):
    """Convert volume_type from UUID back to int."""
    meta = MetaData()
    meta.bind = migrate_engine

    volumes = Table('volumes', meta, autoload=True)
    volume_types = Table('volume_types', meta, autoload=True)
    extra_specs = Table('volume_type_extra_specs', meta, autoload=True)

    fkey_remove_list = [
        volumes.c.volume_type_id, volume_types.c.id,
        extra_specs.c.volume_type_id
    ]

    for column in fkey_remove_list:
        fkeys = list(column.foreign_keys)
        if fkeys:
            fkey_name = fkeys[0].constraint.name
            fkey = ForeignKeyConstraint(columns=[column],
                                        refcolumns=[volume_types.c.id],
                                        name=fkey_name)

            try:
                fkey.drop()
            except Exception:
                if migrate_engine.url.get_dialect().name.startswith('sqlite'):
                    pass
                else:
                    raise

    vtype_list = list(volume_types.select().execute())
    new_id = 1

    for t in vtype_list:
        volumes.update().\
            where(volumes.c.volume_type_id == t['id']).\
            values(volume_type_id=new_id).execute()

        extra_specs.update().\
            where(extra_specs.c.volume_type_id == t['id']).\
            values(volume_type_id=new_id).execute()

        volume_types.update().\
            where(volume_types.c.id == t['id']).\
            values(id=new_id).execute()

        new_id += 1

    volumes.c.volume_type_id.alter(Integer)
    volume_types.c.id.alter(Integer)
    extra_specs.c.volume_type_id.alter(Integer)

    for column in fkey_remove_list:
        fkeys = list(column.foreign_keys)
        if fkeys:
            fkey_name = fkeys[0].constraint.name
            fkey = ForeignKeyConstraint(columns=[column],
                                        refcolumns=[volume_types.c.id],
                                        name=fkey_name)
            try:
                fkey.create()
                LOG.info('Created foreign key %s' % fkey_name)
            except Exception:
                if migrate_engine.url.get_dialect().name.startswith('sqlite'):
                    pass
                else:
                    raise