Exemplo n.º 1
0
    def _pre_upgrade_273(self, engine):
        if engine.name != 'sqlite':
            return

        # Drop a variety of unique constraints to ensure that the script
        # properly readds them back
        for table_name, constraint_name in [
            ('compute_nodes', 'uniq_compute_nodes0'
             'host0hypervisor_hostname'),
            ('fixed_ips', 'uniq_fixed_ips0address0deleted'),
            ('instance_info_caches', 'uniq_instance_info_caches0'
             'instance_uuid'),
            ('instance_type_projects', 'uniq_instance_type_projects0'
             'instance_type_id0project_id0'
             'deleted'),
            ('pci_devices', 'uniq_pci_devices0compute_node_id0'
             'address0deleted'),
            ('virtual_interfaces', 'uniq_virtual_interfaces0'
             'address0deleted')
        ]:
            table = oslodbutils.get_table(engine, table_name)
            constraints = [
                c for c in table.constraints if c.name == constraint_name
            ]
            for cons in constraints:
                # Need to use sqlalchemy-migrate UniqueConstraint
                cons = UniqueConstraint(*[c.name for c in cons.columns],
                                        name=cons.name,
                                        table=table)
                cons.drop()
Exemplo n.º 2
0
    def _pre_upgrade_273(self, engine):
        if engine.name != 'sqlite':
            return

        # Drop a variety of unique constraints to ensure that the script
        # properly readds them back
        for table_name, constraint_name in [
                ('compute_nodes', 'uniq_compute_nodes0'
                                  'host0hypervisor_hostname'),
                ('fixed_ips', 'uniq_fixed_ips0address0deleted'),
                ('instance_info_caches', 'uniq_instance_info_caches0'
                                         'instance_uuid'),
                ('instance_type_projects', 'uniq_instance_type_projects0'
                                           'instance_type_id0project_id0'
                                           'deleted'),
                ('pci_devices', 'uniq_pci_devices0compute_node_id0'
                                'address0deleted'),
                ('virtual_interfaces', 'uniq_virtual_interfaces0'
                                       'address0deleted')]:
            table = oslodbutils.get_table(engine, table_name)
            constraints = [c for c in table.constraints
                           if c.name == constraint_name]
            for cons in constraints:
                # Need to use sqlalchemy-migrate UniqueConstraint
                cons = UniqueConstraint(*[c.name for c in cons.columns],
                                        name=cons.name,
                                        table=table)
                cons.drop()
Exemplo n.º 3
0
def upgrade(migrate_engine):
    meta = sqlalchemy.MetaData()
    meta.bind = migrate_engine

    hosts_table = Table('hosts', meta, autoload=True)
    failover_segments = Table('failover_segments', meta, autoload=True)
    # NOTE(Dinesh_Bhor) We need to drop foreign keys first because unique
    # constraints that we want to delete depend on them. So drop the fk and
    # recreate it again after unique constraint deletion.
    cons_fk = ForeignKeyConstraint([hosts_table.c.failover_segment_id],
                                   [failover_segments.c.uuid],
                                   name="fk_failover_segments_uuid")
    cons_fk.drop(engine=migrate_engine)

    cons_unique = UniqueConstraint('failover_segment_id',
                                   'name',
                                   'deleted',
                                   name='uniq_host0name0deleted',
                                   table=hosts_table)
    cons_unique.drop(engine=migrate_engine)
    # Create an updated unique constraint
    updated_cons_unique = UniqueConstraint('name',
                                           'deleted',
                                           name='uniq_host0name0deleted',
                                           table=hosts_table)
    cons_fk.create()
    updated_cons_unique.create()
Exemplo n.º 4
0
def upgrade(migrate_engine):
    meta = sa.MetaData(bind=migrate_engine)
    load_tables = dict((table_name, sa.Table(table_name, meta, autoload=True))
                       for table_name in TABLES)

    if migrate_engine.name != 'sqlite':
        for table_name, indexes in INDEXES.items():
            table = load_tables[table_name]
            for column, ref_table_name, ref_column_name in indexes:
                ref_table = load_tables[ref_table_name]
                params = {
                    'columns': [table.c[column]],
                    'refcolumns': [ref_table.c[ref_column_name]]
                }

                if (migrate_engine.name == "mysql"
                        and table_name != 'alarm_history'):
                    params['name'] = "_".join(('fk', table_name, column))
                elif (migrate_engine.name == "postgresql"
                      and table_name == "sample"):
                    # The fk contains the old table name
                    params['name'] = "_".join(('meter', column, 'fkey'))

                fkey = ForeignKeyConstraint(**params)
                fkey.drop()

    sourceassoc = load_tables['sourceassoc']
    if migrate_engine.name != 'sqlite':
        idx = sa.Index('idx_su', sourceassoc.c.source_id,
                       sourceassoc.c.user_id)
        idx.drop(bind=migrate_engine)
        idx = sa.Index('idx_sp', sourceassoc.c.source_id,
                       sourceassoc.c.project_id)
        idx.drop(bind=migrate_engine)

        params = {}
        if migrate_engine.name == "mysql":
            params = {'name': 'uniq_sourceassoc0sample_id'}
        uc = UniqueConstraint('sample_id', table=sourceassoc, **params)
        uc.create()

        params = {}
        if migrate_engine.name == "mysql":
            params = {'name': 'uniq_sourceassoc0sample_id0user_id'}
        uc = UniqueConstraint('sample_id',
                              'user_id',
                              table=sourceassoc,
                              **params)
        uc.drop()
    sourceassoc.c.user_id.drop()
    sourceassoc.c.project_id.drop()

    for table_name in TABLES_DROP:
        sa.Table(table_name, meta, autoload=True).drop()
def upgrade(migrate_engine):
    meta = sa.MetaData(bind=migrate_engine)
    load_tables = dict((table_name, sa.Table(table_name, meta,
                                             autoload=True))
                       for table_name in TABLES)

    if migrate_engine.name != 'sqlite':
        for table_name, indexes in INDEXES.items():
            table = load_tables[table_name]
            for column, ref_table_name, ref_column_name in indexes:
                ref_table = load_tables[ref_table_name]
                params = {'columns': [table.c[column]],
                          'refcolumns': [ref_table.c[ref_column_name]]}

                if migrate_engine.name == "mysql" and \
                        table_name != 'alarm_history':
                    params['name'] = "_".join(('fk', table_name, column))
                elif migrate_engine.name == "postgresql" and \
                        table_name == "sample":
                    # The fk contains the old table name
                    params['name'] = "_".join(('meter', column, 'fkey'))

                fkey = ForeignKeyConstraint(**params)
                fkey.drop()

    sourceassoc = load_tables['sourceassoc']
    if migrate_engine.name != 'sqlite':
        idx = sa.Index('idx_su', sourceassoc.c.source_id,
                       sourceassoc.c.user_id)
        idx.drop(bind=migrate_engine)
        idx = sa.Index('idx_sp', sourceassoc.c.source_id,
                       sourceassoc.c.project_id)
        idx.drop(bind=migrate_engine)

        params = {}
        if migrate_engine.name == "mysql":
            params = {'name': 'uniq_sourceassoc0sample_id'}
        uc = UniqueConstraint('sample_id', table=sourceassoc, **params)
        uc.create()

        params = {}
        if migrate_engine.name == "mysql":
            params = {'name': 'uniq_sourceassoc0sample_id0user_id'}
        uc = UniqueConstraint('sample_id', 'user_id',
                              table=sourceassoc, **params)
        uc.drop()
    sourceassoc.c.user_id.drop()
    sourceassoc.c.project_id.drop()

    for table_name in TABLES_DROP:
        sa.Table(table_name, meta, autoload=True).drop()
def downgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine
    compute_nodes = Table('compute_nodes', meta, autoload=True)

    ukey = UniqueConstraint(
        'host', 'hypervisor_hostname', 'deleted',
        table=compute_nodes,
        name="uniq_compute_nodes0host0hypervisor_hostname0deleted")
    ukey.drop()

    ukey = UniqueConstraint('host', 'hypervisor_hostname', table=compute_nodes,
                            name="uniq_compute_nodes0host0hypervisor_hostname")
    ukey.create()
def downgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine

    # Remove the new column
    compute_nodes = Table("compute_nodes", meta, autoload=True)
    shadow_compute_nodes = Table("shadow_compute_nodes", meta, autoload=True)

    ukey = UniqueConstraint(
        "host", "hypervisor_hostname", table=compute_nodes, name="uniq_compute_nodes0host0hypervisor_hostname"
    )
    ukey.drop()

    compute_nodes.drop_column("host")
    shadow_compute_nodes.drop_column("host")
Exemplo n.º 8
0
def downgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine

    # Remove the new column
    compute_nodes = Table('compute_nodes', meta, autoload=True)
    shadow_compute_nodes = Table('shadow_compute_nodes', meta, autoload=True)

    ukey = UniqueConstraint('host',
                            'hypervisor_hostname',
                            table=compute_nodes,
                            name="uniq_compute_nodes0host0hypervisor_hostname")
    ukey.drop()

    compute_nodes.drop_column('host')
    shadow_compute_nodes.drop_column('host')
Exemplo n.º 9
0
def downgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine
    compute_nodes = Table('compute_nodes', meta, autoload=True)

    ukey = UniqueConstraint(
        'host',
        'hypervisor_hostname',
        'deleted',
        table=compute_nodes,
        name="uniq_compute_nodes0host0hypervisor_hostname0deleted")
    ukey.drop()

    ukey = UniqueConstraint('host',
                            'hypervisor_hostname',
                            table=compute_nodes,
                            name="uniq_compute_nodes0host0hypervisor_hostname")
    ukey.create()
def upgrade(migrate_engine):
    meta = sqlalchemy.MetaData()
    meta.bind = migrate_engine

    hosts_table = Table('hosts', meta, autoload=True)
    failover_segments = Table('failover_segments', meta, autoload=True)
    # NOTE(Dinesh_Bhor) We need to drop foreign keys first because unique
    # constraints that we want to delete depend on them. So drop the fk and
    # recreate it again after unique constraint deletion.
    cons_fk = ForeignKeyConstraint([hosts_table.c.failover_segment_id],
                                   [failover_segments.c.uuid],
                                   name="fk_failover_segments_uuid")
    cons_fk.drop(engine=migrate_engine)

    cons_unique = UniqueConstraint('failover_segment_id', 'name', 'deleted',
                                   name='uniq_host0name0deleted',
                                   table=hosts_table)
    cons_unique.drop(engine=migrate_engine)
    # Create an updated unique constraint
    updated_cons_unique = UniqueConstraint('name', 'deleted',
                                           name='uniq_host0name0deleted',
                                           table=hosts_table)
    cons_fk.create()
    updated_cons_unique.create()
def downgrade(migrate_engine):
    meta = sa.MetaData(bind=migrate_engine)
    user = sa.Table(
        'user', meta,
        sa.Column('id', sa.String(255), primary_key=True),
        mysql_engine='InnoDB',
        mysql_charset='utf8',
    )

    project = sa.Table(
        'project', meta,
        sa.Column('id', sa.String(255), primary_key=True),
        mysql_engine='InnoDB',
        mysql_charset='utf8',
    )

    tables = [project, user]
    for i in sorted(tables):
        i.create()

    load_tables = dict((table_name, sa.Table(table_name, meta, autoload=True))
                       for table_name in TABLES)

    # Restore the sourceassoc columns and constraints
    sourceassoc = load_tables['sourceassoc']
    user_id = sa.Column('user_id', sa.String(255))
    project_id = sa.Column('project_id', sa.String(255))
    sourceassoc.create_column(user_id)
    sourceassoc.create_column(project_id)

    if migrate_engine.name != 'sqlite':
        params = {}
        if migrate_engine.name == "mysql":
            params = {'name': 'uniq_sourceassoc0sample_id0user_id'}
        uc = UniqueConstraint('sample_id', 'user_id',
                              table=sourceassoc, **params)
        uc.create()

        params = {}
        if migrate_engine.name == "mysql":
            params = {'name': 'uniq_sourceassoc0sample_id'}
        uc = UniqueConstraint('sample_id', table=sourceassoc, **params)
        uc.drop()

        idx = sa.Index('idx_su', sourceassoc.c.source_id,
                       sourceassoc.c.user_id)
        idx.create(bind=migrate_engine)
        idx = sa.Index('idx_sp', sourceassoc.c.source_id,
                       sourceassoc.c.project_id)
        idx.create(bind=migrate_engine)

    # Restore the user/project columns and constraints in all tables
    for table_name, indexes in INDEXES.items():
        table = load_tables[table_name]
        for column, ref_table_name, ref_column_name in indexes:
            ref_table = load_tables[ref_table_name]
            c = getattr(Alias(table).c, column)
            except_q = exists([getattr(ref_table.c, ref_column_name)])
            q = select([c]).where(and_(c != sa.null(), not_(except_q)))
            q = q.distinct()

            # NOTE(sileht): workaround for
            # https://bitbucket.org/zzzeek/sqlalchemy/
            # issue/3044/insert-from-select-union_all
            q.select = lambda: q

            sql_ins = ref_table.insert().from_select(
                [getattr(ref_table.c, ref_column_name)], q)
            try:
                migrate_engine.execute(sql_ins)
            except TypeError:
                # from select is empty
                pass

            if migrate_engine.name != 'sqlite':
                params = {'columns': [table.c[column]],
                          'refcolumns': [ref_table.c[ref_column_name]]}

                if migrate_engine.name == "mysql" and \
                        table_name != 'alarm_history':
                    params['name'] = "_".join(('fk', table_name, column))
                elif migrate_engine.name == "postgresql" and \
                        table_name == "sample":
                    # The fk contains the old table name
                    params['name'] = "_".join(('meter', column, 'fkey'))

                fkey = ForeignKeyConstraint(**params)
                fkey.create()
def downgrade(migrate_engine):
    meta = sa.MetaData(bind=migrate_engine)
    user = sa.Table(
        'user',
        meta,
        sa.Column('id', sa.String(255), primary_key=True),
        mysql_engine='InnoDB',
        mysql_charset='utf8',
    )

    project = sa.Table(
        'project',
        meta,
        sa.Column('id', sa.String(255), primary_key=True),
        mysql_engine='InnoDB',
        mysql_charset='utf8',
    )

    tables = [project, user]
    for i in sorted(tables):
        i.create()

    load_tables = dict((table_name, sa.Table(table_name, meta, autoload=True))
                       for table_name in TABLES)

    # Restore the sourceassoc columns and constraints
    sourceassoc = load_tables['sourceassoc']
    user_id = sa.Column('user_id', sa.String(255))
    project_id = sa.Column('project_id', sa.String(255))
    sourceassoc.create_column(user_id)
    sourceassoc.create_column(project_id)

    if migrate_engine.name != 'sqlite':
        params = {}
        if migrate_engine.name == "mysql":
            params = {'name': 'uniq_sourceassoc0sample_id0user_id'}
        uc = UniqueConstraint('sample_id',
                              'user_id',
                              table=sourceassoc,
                              **params)
        uc.create()

        params = {}
        if migrate_engine.name == "mysql":
            params = {'name': 'uniq_sourceassoc0sample_id'}
        uc = UniqueConstraint('sample_id', table=sourceassoc, **params)
        uc.drop()

        idx = sa.Index('idx_su', sourceassoc.c.source_id,
                       sourceassoc.c.user_id)
        idx.create(bind=migrate_engine)
        idx = sa.Index('idx_sp', sourceassoc.c.source_id,
                       sourceassoc.c.project_id)
        idx.create(bind=migrate_engine)

    # Restore the user/project columns and constraints in all tables
    for table_name, indexes in INDEXES.items():
        table = load_tables[table_name]
        for column, ref_table_name, ref_column_name in indexes:
            ref_table = load_tables[ref_table_name]
            c = getattr(Alias(table).c, column)
            except_q = exists([getattr(ref_table.c, ref_column_name)])
            q = select([c]).where(and_(c != sa.null(), not_(except_q)))
            q = q.distinct()

            # NOTE(sileht): workaround for
            # https://bitbucket.org/zzzeek/sqlalchemy/
            # issue/3044/insert-from-select-union_all
            q.select = lambda: q

            sql_ins = ref_table.insert().from_select(
                [getattr(ref_table.c, ref_column_name)], q)
            try:
                migrate_engine.execute(sql_ins)
            except TypeError:
                # from select is empty
                pass

            if migrate_engine.name != 'sqlite':
                params = {
                    'columns': [table.c[column]],
                    'refcolumns': [ref_table.c[ref_column_name]]
                }

                if migrate_engine.name == "mysql" and \
                        table_name != 'alarm_history':
                    params['name'] = "_".join(('fk', table_name, column))
                elif migrate_engine.name == "postgresql" and \
                        table_name == "sample":
                    # The fk contains the old table name
                    params['name'] = "_".join(('meter', column, 'fkey'))

                fkey = ForeignKeyConstraint(**params)
                fkey.create()