def downgrade():
    bind = op.get_bind()
    insp = engine.reflection.Inspector.from_engine(bind)

    # Remove the missing uniqueness constraint from the sql_metrics table.
    with op.batch_alter_table("sql_metrics", naming_convention=conv) as batch_op:
        batch_op.drop_constraint(
            generic_find_uq_constraint_name(
                "sql_metrics", {"metric_name", "table_id"}, insp
            )
            or "uq_sql_metrics_table_id",
            type_="unique",
        )

    # Restore the size of the sql_metrics.metric_name column and forego that it
    # be non-nullable.
    with op.batch_alter_table("sql_metrics") as batch_op:
        batch_op.alter_column(
            "metric_name", existing_type=String(255), nullable=True, type_=String(512)
        )

    # Forego that the sql_metrics.expression column be non-nullable.
    with op.batch_alter_table("sql_metrics") as batch_op:
        batch_op.alter_column("expression", existing_type=Text, nullable=True)

    # Forego that the metrics.metric_name column be non-nullable.
    with op.batch_alter_table("metrics") as batch_op:
        batch_op.alter_column("metric_name", existing_type=String(255), nullable=True)

    # Forego that the metrics.json column be non-nullable.
    with op.batch_alter_table("metrics") as batch_op:
        batch_op.alter_column("json", existing_type=Text, nullable=True)
def downgrade():

    bind = op.get_bind()
    insp = sa.engine.reflection.Inspector.from_engine(bind)

    # Restore the size of the metric_name column.
    with op.batch_alter_table('metrics', naming_convention=conv) as batch_op:
        batch_op.alter_column(
            'metric_name',
            existing_type=sa.String(length=255),
            type_=sa.String(length=512),
            existing_nullable=True,
        )

    # Remove the previous missing uniqueness constraints.
    for table, column in names.items():
        with op.batch_alter_table(table, naming_convention=conv) as batch_op:
            batch_op.drop_constraint(
                generic_find_uq_constraint_name(
                    table,
                    {column, 'datasource_id'},
                    insp,
                ) or 'uq_{}_{}'.format(table, column),
                type_='unique',
            )
示例#3
0
def downgrade():
    bind = op.get_bind()
    insp = engine.reflection.Inspector.from_engine(bind)

    # Remove the missing uniqueness constraint from the table_columns table.
    with op.batch_alter_table('table_columns',
                              naming_convention=conv) as batch_op:
        batch_op.drop_constraint(
            generic_find_uq_constraint_name(
                'table_columns',
                {'column_name', 'table_id'},
                insp,
            ) or 'uq_table_columns_column_name',
            type_='unique',
        )

    # Restore the size of the table_columns.column_name column and forego that
    # it be non-nullable.
    with op.batch_alter_table('table_columns') as batch_op:
        batch_op.alter_column(
            'column_name',
            existing_type=String(255),
            nullable=True,
            type_=String(256),
        )

    # Forego that the columns.column_name be non-nullable.
    with op.batch_alter_table('columns') as batch_op:
        batch_op.alter_column(
            'column_name',
            existing_type=String(255),
            nullable=True,
        )
def upgrade():
    bind = op.get_bind()

    # Uniqueness constraint if present only exists in MySQL.
    if isinstance(bind.dialect, MySQLDialect):
        constraint_name = generic_find_uq_constraint_name(
            "tables", {"table_name"}, Inspector.from_engine(bind))

        if constraint_name:
            op.drop_constraint(constraint_name, "tables", type_="unique")
示例#5
0
def upgrade():
    bind = op.get_bind()
    insp = engine.reflection.Inspector.from_engine(bind)

    # Drop the uniqueness constraint if it exists.
    constraint = generic_find_uq_constraint_name("tables", {"table_name"},
                                                 insp)

    if constraint:
        with op.batch_alter_table("tables",
                                  naming_convention=conv) as batch_op:
            batch_op.drop_constraint(constraint, type_="unique")
示例#6
0
def downgrade():
    bind = op.get_bind()
    insp = engine.reflection.Inspector.from_engine(bind)

    # Remove the missing uniqueness constraint from the sql_metrics table.
    with op.batch_alter_table('sql_metrics',
                              naming_convention=conv) as batch_op:
        batch_op.drop_constraint(
            generic_find_uq_constraint_name(
                'sql_metrics',
                {'metric_name', 'table_id'},
                insp,
            ) or 'uq_sql_metrics_table_id',
            type_='unique',
        )

    # Restore the size of the sql_metrics.metric_name column and forego that it
    # be non-nullable.
    with op.batch_alter_table('sql_metrics') as batch_op:
        batch_op.alter_column(
            'metric_name',
            existing_type=String(255),
            nullable=True,
            type_=String(512),
        )

    # Forego that the sql_metrics.expression column be non-nullable.
    with op.batch_alter_table('sql_metrics') as batch_op:
        batch_op.alter_column(
            'expression',
            existing_type=Text,
            nullable=True,
        )

    # Forego that the metrics.metric_name column be non-nullable.
    with op.batch_alter_table('metrics') as batch_op:
        batch_op.alter_column(
            'metric_name',
            existing_type=String(255),
            nullable=True,
        )

    # Forego that the metrics.json column be non-nullable.
    with op.batch_alter_table('metrics') as batch_op:
        batch_op.alter_column(
            'json',
            existing_type=Text,
            nullable=True,
        )
def downgrade():
    bind = op.get_bind()
    insp = sa.engine.reflection.Inspector.from_engine(bind)

    # Add cluster_name column
    with op.batch_alter_table("datasources") as batch_op:
        batch_op.add_column(sa.Column("cluster_name", sa.String(250)))

    # Update cluster_name values
    metadata = sa.MetaData(bind=bind)
    datasources = sa.Table("datasources", metadata, autoload=True)
    clusters = sa.Table("clusters", metadata, autoload=True)

    statement = datasources.update().values(
        cluster_name=sa.select([clusters.c.cluster_name]).where(
            datasources.c.cluster_id == clusters.c.id).as_scalar())
    bind.execute(statement)

    with op.batch_alter_table("datasources") as batch_op:
        # Drop cluster_id column
        fk_constraint_name = generic_find_fk_constraint_name(
            "datasources", {"id"}, "clusters", insp)
        uq_constraint_name = generic_find_uq_constraint_name(
            "datasources", {"cluster_id", "datasource_name"}, insp)
        batch_op.drop_constraint(fk_constraint_name, type_="foreignkey")
        batch_op.drop_constraint(uq_constraint_name, type_="unique")
        batch_op.drop_column("cluster_id")

        # Add constraints to cluster_name column
        batch_op.alter_column("cluster_name",
                              existing_type=sa.String(250),
                              nullable=False)
        batch_op.create_unique_constraint("uq_datasources_cluster_name",
                                          ["cluster_name", "datasource_name"])
        batch_op.create_foreign_key(
            "fk_datasources_cluster_name_clusters",
            "clusters",
            ["cluster_name"],
            ["cluster_name"],
        )
示例#8
0
def upgrade():
    bind = op.get_bind()
    insp = sa.engine.reflection.Inspector.from_engine(bind)

    # Add the new less restrictive uniqueness constraint.
    with op.batch_alter_table("datasources",
                              naming_convention=conv) as batch_op:
        batch_op.create_unique_constraint("uq_datasources_cluster_name",
                                          ["cluster_name", "datasource_name"])

    # Augment the tables which have a foreign key constraint related to the
    # datasources.datasource_name column.
    for foreign in ["columns", "metrics"]:
        with op.batch_alter_table(foreign, naming_convention=conv) as batch_op:

            # Add the datasource_id column with the relevant constraints.
            batch_op.add_column(sa.Column("datasource_id", sa.Integer))

            batch_op.create_foreign_key(
                "fk_{}_datasource_id_datasources".format(foreign),
                "datasources",
                ["datasource_id"],
                ["id"],
            )

        # Helper table for database migration using minimal schema.
        table = sa.Table(
            foreign,
            sa.MetaData(),
            sa.Column("id", sa.Integer, primary_key=True),
            sa.Column("datasource_name", sa.String(255)),
            sa.Column("datasource_id", sa.Integer),
        )

        # Migrate the existing data.
        for datasource in bind.execute(datasources.select()):
            bind.execute(table.update().where(
                table.c.datasource_name == datasource.datasource_name).values(
                    datasource_id=datasource.id))

        with op.batch_alter_table(foreign, naming_convention=conv) as batch_op:

            # Drop the datasource_name column and associated constraints. Note
            # due to prior revisions (1226819ee0e3, 3b626e2a6783) there may
            # incorectly be multiple duplicate constraints.
            names = generic_find_fk_constraint_names(foreign,
                                                     {"datasource_name"},
                                                     "datasources", insp)

            for name in names:
                batch_op.drop_constraint(
                    name
                    or "fk_{}_datasource_name_datasources".format(foreign),
                    type_="foreignkey",
                )

            batch_op.drop_column("datasource_name")

    try:
        # Drop the old more restrictive uniqueness constraint.
        with op.batch_alter_table("datasources",
                                  naming_convention=conv) as batch_op:
            batch_op.drop_constraint(
                generic_find_uq_constraint_name("datasources",
                                                {"datasource_name"}, insp)
                or "uq_datasources_datasource_name",
                type_="unique",
            )
    except Exception as ex:
        logging.warning(
            "Constraint drop failed, you may want to do this "
            "manually on your database. For context, this is a known "
            "issue around undeterministic contraint names on Postgres "
            "and perhaps more databases through SQLAlchemy.")
        logging.exception(ex)
示例#9
0
def downgrade():
    bind = op.get_bind()
    insp = sa.engine.reflection.Inspector.from_engine(bind)

    # Add the new more restrictive uniqueness constraint which is required by
    # the foreign key constraints. Note this operation will fail if the
    # datasources.datasource_name column is no longer unique.
    with op.batch_alter_table("datasources",
                              naming_convention=conv) as batch_op:
        batch_op.create_unique_constraint("uq_datasources_datasource_name",
                                          ["datasource_name"])

    # Augment the tables which have a foreign key constraint related to the
    # datasources.datasource_id column.
    for foreign in ["columns", "metrics"]:
        with op.batch_alter_table(foreign, naming_convention=conv) as batch_op:

            # Add the datasource_name column with the relevant constraints.
            batch_op.add_column(sa.Column("datasource_name", sa.String(255)))

            batch_op.create_foreign_key(
                "fk_{}_datasource_name_datasources".format(foreign),
                "datasources",
                ["datasource_name"],
                ["datasource_name"],
            )

        # Helper table for database migration using minimal schema.
        table = sa.Table(
            foreign,
            sa.MetaData(),
            sa.Column("id", sa.Integer, primary_key=True),
            sa.Column("datasource_name", sa.String(255)),
            sa.Column("datasource_id", sa.Integer),
        )

        # Migrate the existing data.
        for datasource in bind.execute(datasources.select()):
            bind.execute(table.update().where(
                table.c.datasource_id == datasource.id).values(
                    datasource_name=datasource.datasource_name))

        with op.batch_alter_table(foreign, naming_convention=conv) as batch_op:

            # Drop the datasource_id column and associated constraint.
            batch_op.drop_constraint(
                "fk_{}_datasource_id_datasources".format(foreign),
                type_="foreignkey")

            batch_op.drop_column("datasource_id")

    with op.batch_alter_table("datasources",
                              naming_convention=conv) as batch_op:

        # Prior to dropping the uniqueness constraint, the foreign key
        # associated with the cluster_name column needs to be dropped.
        batch_op.drop_constraint(
            generic_find_fk_constraint_name("datasources", {"cluster_name"},
                                            "clusters", insp)
            or "fk_datasources_cluster_name_clusters",
            type_="foreignkey",
        )

        # Drop the old less restrictive uniqueness constraint.
        batch_op.drop_constraint(
            generic_find_uq_constraint_name(
                "datasources", {"cluster_name", "datasource_name"}, insp)
            or "uq_datasources_cluster_name",
            type_="unique",
        )

        # Re-create the foreign key associated with the cluster_name column.
        batch_op.create_foreign_key(
            "fk_{}_datasource_id_datasources".format(foreign),
            "clusters",
            ["cluster_name"],
            ["cluster_name"],
        )
示例#10
0
def upgrade():
    bind = op.get_bind()
    insp = sa.engine.reflection.Inspector.from_engine(bind)

    # Add the new less restrictive uniqueness constraint.
    with op.batch_alter_table('datasources', naming_convention=conv) as batch_op:
        batch_op.create_unique_constraint(
            'uq_datasources_cluster_name',
            ['cluster_name', 'datasource_name'],
        )

    # Augment the tables which have a foreign key constraint related to the
    # datasources.datasource_name column.
    for foreign in ['columns', 'metrics']:
        with op.batch_alter_table(foreign, naming_convention=conv) as batch_op:

            # Add the datasource_id column with the relevant constraints.
            batch_op.add_column(sa.Column('datasource_id', sa.Integer))

            batch_op.create_foreign_key(
                'fk_{}_datasource_id_datasources'.format(foreign),
                'datasources',
                ['datasource_id'],
                ['id'],
            )

        # Helper table for database migration using minimal schema.
        table = sa.Table(
            foreign,
            sa.MetaData(),
            sa.Column('id', sa.Integer, primary_key=True),
            sa.Column('datasource_name', sa.String(255)),
            sa.Column('datasource_id', sa.Integer),
        )

        # Migrate the existing data.
        for datasource in bind.execute(datasources.select()):
            bind.execute(
                table.update().where(
                    table.c.datasource_name == datasource.datasource_name,
                ).values(
                    datasource_id=datasource.id,
                ),
            )

        with op.batch_alter_table(foreign, naming_convention=conv) as batch_op:

            # Drop the datasource_name column and associated constraints. Note
            # due to prior revisions (1226819ee0e3, 3b626e2a6783) there may
            # incorectly be multiple duplicate constraints.
            names = generic_find_fk_constraint_names(
                foreign,
                {'datasource_name'},
                'datasources',
                insp,
            )

            for name in names:
                batch_op.drop_constraint(
                    name or 'fk_{}_datasource_name_datasources'.format(foreign),
                    type_='foreignkey',
                )

            batch_op.drop_column('datasource_name')

    try:
        # Drop the old more restrictive uniqueness constraint.
        with op.batch_alter_table('datasources', naming_convention=conv) as batch_op:
            batch_op.drop_constraint(
                generic_find_uq_constraint_name(
                    'datasources',
                    {'datasource_name'},
                    insp,
                ) or 'uq_datasources_datasource_name',
                type_='unique',
            )
    except Exception as e:
        logging.warning(
            'Constraint drop failed, you may want to do this '
            'manually on your database. For context, this is a known '
            'issue around undeterministic contraint names on Postgres '
            'and perhaps more databases through SQLAlchemy.')
        logging.exception(e)
示例#11
0
def downgrade():
    bind = op.get_bind()
    insp = sa.engine.reflection.Inspector.from_engine(bind)

    # Add the new more restrictive uniqueness constraint which is required by
    # the foreign key constraints. Note this operation will fail if the
    # datasources.datasource_name column is no longer unique.
    with op.batch_alter_table('datasources', naming_convention=conv) as batch_op:
        batch_op.create_unique_constraint(
            'uq_datasources_datasource_name',
            ['datasource_name'],
        )

    # Augment the tables which have a foreign key constraint related to the
    # datasources.datasource_id column.
    for foreign in ['columns', 'metrics']:
        with op.batch_alter_table(foreign, naming_convention=conv) as batch_op:

            # Add the datasource_name column with the relevant constraints.
            batch_op.add_column(sa.Column('datasource_name', sa.String(255)))

            batch_op.create_foreign_key(
                'fk_{}_datasource_name_datasources'.format(foreign),
                'datasources',
                ['datasource_name'],
                ['datasource_name'],
            )

        # Helper table for database migration using minimal schema.
        table = sa.Table(
            foreign,
            sa.MetaData(),
            sa.Column('id', sa.Integer, primary_key=True),
            sa.Column('datasource_name', sa.String(255)),
            sa.Column('datasource_id', sa.Integer),
        )

        # Migrate the existing data.
        for datasource in bind.execute(datasources.select()):
            bind.execute(
                table.update().where(
                    table.c.datasource_id == datasource.id,
                ).values(
                    datasource_name=datasource.datasource_name,
                ),
            )

        with op.batch_alter_table(foreign, naming_convention=conv) as batch_op:

            # Drop the datasource_id column and associated constraint.
            batch_op.drop_constraint(
                'fk_{}_datasource_id_datasources'.format(foreign),
                type_='foreignkey',
            )

            batch_op.drop_column('datasource_id')

    with op.batch_alter_table('datasources', naming_convention=conv) as batch_op:

        # Prior to dropping the uniqueness constraint, the foreign key
        # associated with the cluster_name column needs to be dropped.
        batch_op.drop_constraint(
            generic_find_fk_constraint_name(
                'datasources',
                {'cluster_name'},
                'clusters',
                insp,
            ) or 'fk_datasources_cluster_name_clusters',
            type_='foreignkey',
        )

        # Drop the old less restrictive uniqueness constraint.
        batch_op.drop_constraint(
            generic_find_uq_constraint_name(
                'datasources',
                {'cluster_name', 'datasource_name'},
                insp,
            ) or 'uq_datasources_cluster_name',
            type_='unique',
        )

        # Re-create the foreign key associated with the cluster_name column.
        batch_op.create_foreign_key(
            'fk_{}_datasource_id_datasources'.format(foreign),
            'clusters',
            ['cluster_name'],
            ['cluster_name'],
        )