def upgrade():
    bind = op.get_bind()
    insp = sa.engine.reflection.Inspector.from_engine(bind)

    with op.batch_alter_table("tab_state") as batch_op:
        table_schema_id_constraint = generic_find_fk_constraint_name(
            "tab_state", {"id"}, "dbs", insp)
        if table_schema_id_constraint:
            batch_op.drop_constraint(
                table_schema_id_constraint,
                type_="foreignkey",
            )

        table_schema_id_constraint = generic_find_fk_constraint_name(
            "tab_state", {"client_id"}, "query", insp)
        if table_schema_id_constraint:
            batch_op.drop_constraint(
                table_schema_id_constraint,
                type_="foreignkey",
            )

        batch_op.create_foreign_key(
            "tab_state_database_id_fkey",
            "dbs",
            ["database_id"],
            ["id"],
            ondelete="CASCADE",
        )

        batch_op.create_foreign_key(
            "tab_state_latest_query_id_fkey",
            "query",
            ["latest_query_id"],
            ["client_id"],
            ondelete="SET NULL",
        )

    with op.batch_alter_table("table_schema") as batch_op:
        table_schema_id_constraint = generic_find_fk_constraint_name(
            "table_schema", {"id"}, "dbs", insp)
        if table_schema_id_constraint:
            batch_op.drop_constraint(
                table_schema_id_constraint,
                type_="foreignkey",
            )

        batch_op.create_foreign_key(
            "table_schema_database_id_fkey",
            "dbs",
            ["database_id"],
            ["id"],
            ondelete="CASCADE",
        )
Esempio n. 2
0
def upgrade():
    bind = op.get_bind()
    metadata = sa.MetaData(bind=bind)
    insp = sa.engine.reflection.Inspector.from_engine(bind)

    rls_filter_tables = op.create_table(
        "rls_filter_tables",
        sa.Column("id", sa.Integer(), nullable=False),
        sa.Column("table_id", sa.Integer(), nullable=True),
        sa.Column("rls_filter_id", sa.Integer(), nullable=True),
        sa.ForeignKeyConstraint(["rls_filter_id"],
                                ["row_level_security_filters.id"]),
        sa.ForeignKeyConstraint(["table_id"], ["tables.id"]),
        sa.PrimaryKeyConstraint("id"),
    )

    rlsf = sa.Table("row_level_security_filters", metadata, autoload=True)
    filter_ids = sa.select([rlsf.c.id, rlsf.c.table_id])

    for row in bind.execute(filter_ids):
        move_table_id = rls_filter_tables.insert().values(
            rls_filter_id=row["id"], table_id=row["table_id"])
        bind.execute(move_table_id)

    with op.batch_alter_table("row_level_security_filters") as batch_op:
        fk_constraint_name = generic_find_fk_constraint_name(
            "row_level_security_filters", {"id"}, "tables", insp)
        if fk_constraint_name:
            batch_op.drop_constraint(fk_constraint_name, type_="foreignkey")
        batch_op.drop_column("table_id")
Esempio n. 3
0
def upgrade():
    op.create_table('sqlatable_user',
                    sa.Column('id', sa.Integer(), nullable=False),
                    sa.Column('user_id', sa.Integer(), nullable=True),
                    sa.Column('table_id', sa.Integer(), nullable=True),
                    sa.ForeignKeyConstraint(
                        ['table_id'],
                        ['tables.id'],
                    ), sa.ForeignKeyConstraint(
                        ['user_id'],
                        ['ab_user.id'],
                    ), sa.PrimaryKeyConstraint('id'))
    op.create_table(
        'druiddatasource_user', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('user_id', sa.Integer(), nullable=True),
        sa.Column('datasource_id', sa.Integer(), nullable=True),
        sa.ForeignKeyConstraint(
            ['datasource_id'],
            ['datasources.id'],
        ), sa.ForeignKeyConstraint(
            ['user_id'],
            ['ab_user.id'],
        ), sa.PrimaryKeyConstraint('id'))

    bind = op.get_bind()
    insp = sa.engine.reflection.Inspector.from_engine(bind)
    session = db.Session(bind=bind)

    tables = session.query(SqlaTable).all()
    for table in tables:
        if table.user_id is not None:
            session.execute(sqlatable_user.insert().values(
                user_id=table.user_id, table_id=table.id))

    druiddatasources = session.query(DruidDatasource).all()
    for druiddatasource in druiddatasources:
        if druiddatasource.user_id is not None:
            session.execute(druiddatasource_user.insert().values(
                user_id=druiddatasource.user_id,
                datasource_id=druiddatasource.id))

    session.close()
    with op.batch_alter_table('tables') as batch_op:
        batch_op.drop_constraint('user_id', type_='foreignkey')
        batch_op.drop_column('user_id')
    with op.batch_alter_table('datasources') as batch_op:
        batch_op.drop_constraint(generic_find_fk_constraint_name(
            'datasources',
            {'id'},
            'ab_user',
            insp,
        ),
                                 type_='foreignkey')
        batch_op.drop_column('user_id')
def upgrade():
    op.create_table(
        "sqlatable_user",
        sa.Column("id", sa.Integer(), nullable=False),
        sa.Column("user_id", sa.Integer(), nullable=True),
        sa.Column("table_id", sa.Integer(), nullable=True),
        sa.ForeignKeyConstraint(["table_id"], ["tables.id"]),
        sa.ForeignKeyConstraint(["user_id"], ["ab_user.id"]),
        sa.PrimaryKeyConstraint("id"),
    )
    op.create_table(
        "druiddatasource_user",
        sa.Column("id", sa.Integer(), nullable=False),
        sa.Column("user_id", sa.Integer(), nullable=True),
        sa.Column("datasource_id", sa.Integer(), nullable=True),
        sa.ForeignKeyConstraint(["datasource_id"], ["datasources.id"]),
        sa.ForeignKeyConstraint(["user_id"], ["ab_user.id"]),
        sa.PrimaryKeyConstraint("id"),
    )

    bind = op.get_bind()
    insp = sa.engine.reflection.Inspector.from_engine(bind)
    session = db.Session(bind=bind)

    tables = session.query(SqlaTable).all()
    for table in tables:
        if table.user_id is not None:
            session.execute(
                sqlatable_user.insert().values(user_id=table.user_id, table_id=table.id)
            )

    druiddatasources = session.query(DruidDatasource).all()
    for druiddatasource in druiddatasources:
        if druiddatasource.user_id is not None:
            session.execute(
                druiddatasource_user.insert().values(
                    user_id=druiddatasource.user_id, datasource_id=druiddatasource.id
                )
            )

    session.close()
    with op.batch_alter_table("tables") as batch_op:
        batch_op.drop_constraint("user_id", type_="foreignkey")
        batch_op.drop_column("user_id")
    with op.batch_alter_table("datasources") as batch_op:
        batch_op.drop_constraint(
            generic_find_fk_constraint_name("datasources", {"id"}, "ab_user", insp),
            type_="foreignkey",
        )
        batch_op.drop_column("user_id")
def upgrade():
    op.create_table('sqlatable_user',
                    sa.Column('id', sa.Integer(), nullable=False),
                    sa.Column('user_id', sa.Integer(), nullable=True),
                    sa.Column('table_id', sa.Integer(), nullable=True),
                    sa.ForeignKeyConstraint(['table_id'], ['tables.id'], ),
                    sa.ForeignKeyConstraint(['user_id'], ['ab_user.id'], ),
                    sa.PrimaryKeyConstraint('id')
                    )
    op.create_table('druiddatasource_user',
                    sa.Column('id', sa.Integer(), nullable=False),
                    sa.Column('user_id', sa.Integer(), nullable=True),
                    sa.Column('datasource_id', sa.Integer(), nullable=True),
                    sa.ForeignKeyConstraint(['datasource_id'], ['datasources.id'], ),
                    sa.ForeignKeyConstraint(['user_id'], ['ab_user.id'], ),
                    sa.PrimaryKeyConstraint('id')
                    )

    bind = op.get_bind()
    insp = sa.engine.reflection.Inspector.from_engine(bind)
    session = db.Session(bind=bind)

    tables = session.query(SqlaTable).all()
    for table in tables:
        if table.user_id is not None:
            session.execute(
                sqlatable_user.insert().values(user_id=table.user_id, table_id=table.id)
            )

    druiddatasources = session.query(DruidDatasource).all()
    for druiddatasource in druiddatasources:
        if druiddatasource.user_id is not None:
            session.execute(
                druiddatasource_user.insert().values(user_id=druiddatasource.user_id, datasource_id=druiddatasource.id)
            )

    session.close()
    with op.batch_alter_table('tables') as batch_op:
        batch_op.drop_constraint('user_id', type_='foreignkey')
        batch_op.drop_column('user_id')
    with op.batch_alter_table('datasources') as batch_op:
        batch_op.drop_constraint(generic_find_fk_constraint_name(
            'datasources',
            {'id'},
            'ab_user',
            insp,
        ), type_='foreignkey')
        batch_op.drop_column('user_id')
def downgrade():
    bind = op.get_bind()
    insp = sa.engine.reflection.Inspector.from_engine(bind)

    with op.batch_alter_table("tab_state") as batch_op:
        batch_op.drop_constraint(
            generic_find_fk_constraint_name("tab_state", {"id"}, "saved_query",
                                            insp),
            type_="foreignkey",
        )

        batch_op.create_foreign_key(
            "saved_query_id",
            "saved_query",
            ["saved_query_id"],
            ["id"],
        )
def downgrade():
    bind = op.get_bind()
    insp = sa.engine.reflection.Inspector.from_engine(bind)

    # First, drop the foreign key constraint prior to altering columns.
    fk_datasources_cluster_name_clusters = (generic_find_fk_constraint_name(
        "datasources", {"cluster_name"}, "clusters",
        insp) or "fk_datasources_cluster_name_clusters")
    with op.batch_alter_table("datasources",
                              naming_convention=conv) as batch_op:
        batch_op.drop_constraint(fk_datasources_cluster_name_clusters,
                                 type_="foreignkey")

    # Second, make the columns nullable.
    with op.batch_alter_table("datasources") as batch_op:
        batch_op.alter_column("cluster_name",
                              existing_type=sa.String(250),
                              nullable=True)

    with op.batch_alter_table("clusters") as batch_op:
        batch_op.alter_column("cluster_name",
                              existing_type=sa.String(250),
                              nullable=True)
    with op.batch_alter_table("dbs") as batch_op:
        batch_op.alter_column("database_name",
                              existing_type=sa.String(250),
                              nullable=True)
    with op.batch_alter_table("tables") as batch_op:
        batch_op.alter_column("table_name",
                              existing_type=sa.String(250),
                              nullable=True)

    # Finally, re-add the foreign key constraint.
    with op.batch_alter_table("datasources") as batch_op:
        batch_op.create_foreign_key(
            fk_datasources_cluster_name_clusters,
            "clusters",
            ["cluster_name"],
            ["cluster_name"],
        )
def downgrade():
    bind = op.get_bind()
    insp = sa.engine.reflection.Inspector.from_engine(bind)

    # Add cluster_name column
    with op.batch_alter_table("datasources") as batch_op:
        batch_op.add_column(sa.Column("cluster_name", sa.String(250)))

    # Update cluster_name values
    metadata = sa.MetaData(bind=bind)
    datasources = sa.Table("datasources", metadata, autoload=True)
    clusters = sa.Table("clusters", metadata, autoload=True)

    statement = datasources.update().values(
        cluster_name=sa.select([clusters.c.cluster_name]).where(
            datasources.c.cluster_id == clusters.c.id).as_scalar())
    bind.execute(statement)

    with op.batch_alter_table("datasources") as batch_op:
        # Drop cluster_id column
        fk_constraint_name = generic_find_fk_constraint_name(
            "datasources", {"id"}, "clusters", insp)
        uq_constraint_name = generic_find_uq_constraint_name(
            "datasources", {"cluster_id", "datasource_name"}, insp)
        batch_op.drop_constraint(fk_constraint_name, type_="foreignkey")
        batch_op.drop_constraint(uq_constraint_name, type_="unique")
        batch_op.drop_column("cluster_id")

        # Add constraints to cluster_name column
        batch_op.alter_column("cluster_name",
                              existing_type=sa.String(250),
                              nullable=False)
        batch_op.create_unique_constraint("uq_datasources_cluster_name",
                                          ["cluster_name", "datasource_name"])
        batch_op.create_foreign_key(
            "fk_datasources_cluster_name_clusters",
            "clusters",
            ["cluster_name"],
            ["cluster_name"],
        )
Esempio n. 9
0
def downgrade():
    bind = op.get_bind()
    insp = sa.engine.reflection.Inspector.from_engine(bind)

    # Add the new more restrictive uniqueness constraint which is required by
    # the foreign key constraints. Note this operation will fail if the
    # datasources.datasource_name column is no longer unique.
    with op.batch_alter_table("datasources",
                              naming_convention=conv) as batch_op:
        batch_op.create_unique_constraint("uq_datasources_datasource_name",
                                          ["datasource_name"])

    # Augment the tables which have a foreign key constraint related to the
    # datasources.datasource_id column.
    for foreign in ["columns", "metrics"]:
        with op.batch_alter_table(foreign, naming_convention=conv) as batch_op:

            # Add the datasource_name column with the relevant constraints.
            batch_op.add_column(sa.Column("datasource_name", sa.String(255)))

            batch_op.create_foreign_key(
                "fk_{}_datasource_name_datasources".format(foreign),
                "datasources",
                ["datasource_name"],
                ["datasource_name"],
            )

        # Helper table for database migration using minimal schema.
        table = sa.Table(
            foreign,
            sa.MetaData(),
            sa.Column("id", sa.Integer, primary_key=True),
            sa.Column("datasource_name", sa.String(255)),
            sa.Column("datasource_id", sa.Integer),
        )

        # Migrate the existing data.
        for datasource in bind.execute(datasources.select()):
            bind.execute(table.update().where(
                table.c.datasource_id == datasource.id).values(
                    datasource_name=datasource.datasource_name))

        with op.batch_alter_table(foreign, naming_convention=conv) as batch_op:

            # Drop the datasource_id column and associated constraint.
            batch_op.drop_constraint(
                "fk_{}_datasource_id_datasources".format(foreign),
                type_="foreignkey")

            batch_op.drop_column("datasource_id")

    with op.batch_alter_table("datasources",
                              naming_convention=conv) as batch_op:

        # Prior to dropping the uniqueness constraint, the foreign key
        # associated with the cluster_name column needs to be dropped.
        batch_op.drop_constraint(
            generic_find_fk_constraint_name("datasources", {"cluster_name"},
                                            "clusters", insp)
            or "fk_datasources_cluster_name_clusters",
            type_="foreignkey",
        )

        # Drop the old less restrictive uniqueness constraint.
        batch_op.drop_constraint(
            generic_find_uq_constraint_name(
                "datasources", {"cluster_name", "datasource_name"}, insp)
            or "uq_datasources_cluster_name",
            type_="unique",
        )

        # Re-create the foreign key associated with the cluster_name column.
        batch_op.create_foreign_key(
            "fk_{}_datasource_id_datasources".format(foreign),
            "clusters",
            ["cluster_name"],
            ["cluster_name"],
        )
def upgrade():
    bind = op.get_bind()
    insp = sa.engine.reflection.Inspector.from_engine(bind)

    if isinstance(bind.dialect, SQLiteDialect):
        op.add_column(
            "alerts",
            sa.Column("validator_config", sa.Text(), server_default="", nullable=True),
        )
        op.add_column(
            "alerts",
            sa.Column("database_id", sa.Integer(), server_default="0", nullable=False),
        )
        op.add_column(
            "alerts", sa.Column("sql", sa.Text(), server_default="", nullable=False)
        )
        op.add_column(
            "alerts",
            sa.Column(
                "validator_type",
                sa.String(length=100),
                server_default="",
                nullable=False,
            ),
        )
    else:  # mysql does not support server_default for text fields
        op.add_column(
            "alerts",
            sa.Column("validator_config", sa.Text(), default="", nullable=True),
        )
        op.add_column(
            "alerts", sa.Column("database_id", sa.Integer(), default=0, nullable=False),
        )
        op.add_column("alerts", sa.Column("sql", sa.Text(), default="", nullable=False))
        op.add_column(
            "alerts",
            sa.Column(
                "validator_type", sa.String(length=100), default="", nullable=False
            ),
        )
    # Migrate data
    session = db.Session(bind=bind)
    alerts = session.query(Alert).all()
    for a in alerts:
        if a.sql_observer:
            a.sql = a.sql_observer[0].sql
            a.database_id = a.sql_observer[0].database_id
        if a.validators:
            a.validator_type = a.validators[0].validator_type
            a.validator_config = a.validators[0].config
    session.commit()

    if not isinstance(bind.dialect, SQLiteDialect):
        constraint = generic_find_fk_constraint_name(
            "sql_observations", {"id"}, "sql_observers", insp
        )
        op.drop_constraint(constraint, "sql_observations", type_="foreignkey")
        op.drop_column("sql_observations", "observer_id")

    op.drop_table("alert_validators")
    op.drop_table("sql_observers")

    # sqlite does not support column and fk deletion
    if isinstance(bind.dialect, SQLiteDialect):
        op.drop_table("sql_observations")
        op.create_table(
            "sql_observations",
            sa.Column("id", sa.Integer(), nullable=False),
            sa.Column("dttm", sa.DateTime(), nullable=True),
            sa.Column("alert_id", sa.Integer(), nullable=True),
            sa.Column("value", sa.Float(), nullable=True),
            sa.Column("error_msg", sa.String(length=500), nullable=True),
            sa.ForeignKeyConstraint(["alert_id"], ["alerts.id"],),
            sa.PrimaryKeyConstraint("id"),
        )
    else:
        op.create_foreign_key(None, "alerts", "dbs", ["database_id"], ["id"])
def downgrade():
    bind = op.get_bind()
    insp = sa.engine.reflection.Inspector.from_engine(bind)

    op.create_table(
        "sql_observers",
        sa.Column("created_on", sa.DateTime(), nullable=True),
        sa.Column("changed_on", sa.DateTime(), nullable=True),
        sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
        sa.Column("sql", sa.Text(), nullable=False),
        sa.Column("created_by_fk", sa.Integer(), autoincrement=False, nullable=True),
        sa.Column("changed_by_fk", sa.Integer(), autoincrement=False, nullable=True),
        sa.Column("alert_id", sa.Integer(), autoincrement=False, nullable=False),
        sa.Column("database_id", sa.Integer(), autoincrement=False, nullable=False),
        sa.ForeignKeyConstraint(["alert_id"], ["alerts.id"]),
        sa.ForeignKeyConstraint(["changed_by_fk"], ["ab_user.id"]),
        sa.ForeignKeyConstraint(["created_by_fk"], ["ab_user.id"]),
        sa.ForeignKeyConstraint(["database_id"], ["dbs.id"]),
        sa.PrimaryKeyConstraint("id"),
    )

    op.create_table(
        "alert_validators",
        sa.Column("created_on", sa.DateTime(), nullable=True),
        sa.Column("changed_on", sa.DateTime(), nullable=True),
        sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
        sa.Column("validator_type", sa.String(length=100), nullable=False,),
        sa.Column("config", sa.Text(), nullable=True),
        sa.Column("created_by_fk", sa.Integer(), autoincrement=False, nullable=True),
        sa.Column("changed_by_fk", sa.Integer(), autoincrement=False, nullable=True),
        sa.Column("alert_id", sa.Integer(), autoincrement=False, nullable=False),
        sa.ForeignKeyConstraint(
            ["alert_id"], ["alerts.id"], name="alert_validators_ibfk_1"
        ),
        sa.ForeignKeyConstraint(
            ["changed_by_fk"], ["ab_user.id"], name="alert_validators_ibfk_2"
        ),
        sa.ForeignKeyConstraint(
            ["created_by_fk"], ["ab_user.id"], name="alert_validators_ibfk_3"
        ),
        sa.PrimaryKeyConstraint("id"),
    )

    # Migrate data
    session = db.Session(bind=bind)
    alerts = session.query(Alert).all()
    for a in alerts:
        if a.sql:
            ob = SQLObserver(sql=a.sql, database_id=a.database_id)
            a.sql_observer.append(ob)
            session.add(ob)
        if a.validator_type:
            val = Validator(
                validator_type=a.validator_type,
                config=a.validator_config,
                alert_id=a.id,
            )
            a.validators.append(val)
            session.add(val)
    session.commit()

    # sqlite does not support dropping columns
    if isinstance(bind.dialect, SQLiteDialect):
        op.add_column(
            "sql_observations",
            sa.Column(
                "observer_id",
                sa.Integer(),
                autoincrement=False,
                nullable=False,
                server_default="0",
            ),
        )
        op.drop_table("alerts")
        op.create_table(
            "alerts",
            sa.Column("id", sa.Integer(), nullable=False),
            sa.Column("label", sa.String(length=150), nullable=False),
            sa.Column("active", sa.Boolean(), nullable=True),
            sa.Column("crontab", sa.String(length=50), nullable=False),
            sa.Column("alert_type", sa.String(length=50), nullable=True),
            sa.Column("log_retention", sa.Integer(), nullable=False, default=90),
            sa.Column(
                "grace_period", sa.Integer(), nullable=False, default=60 * 60 * 24
            ),
            sa.Column("recipients", sa.Text(), nullable=True),
            sa.Column("slice_id", sa.Integer(), nullable=True),
            sa.Column("dashboard_id", sa.Integer(), nullable=True),
            sa.Column("last_eval_dttm", sa.DateTime(), nullable=True),
            sa.Column("last_state", sa.String(length=10), nullable=True),
            sa.Column("changed_by_fk", sa.Integer(), nullable=True),
            sa.Column("changed_on", sa.DateTime(), nullable=True),
            sa.Column("created_by_fk", sa.Integer(), nullable=True),
            sa.Column("created_on", sa.DateTime(), nullable=True),
            sa.Column("slack_channel", sa.Text(), nullable=True),
            sa.ForeignKeyConstraint(["dashboard_id"], ["dashboards.id"],),
            sa.ForeignKeyConstraint(["slice_id"], ["slices.id"],),
            sa.ForeignKeyConstraint(["created_by_fk"], ["ab_user.id"],),
            sa.ForeignKeyConstraint(["changed_by_fk"], ["ab_user.id"],),
            sa.PrimaryKeyConstraint("id"),
        )
    else:
        op.add_column(
            "sql_observations",
            sa.Column(
                "observer_id",
                sa.Integer(),
                autoincrement=False,
                nullable=False,
                default=0,
            ),
        )
        constraint = generic_find_fk_constraint_name("alerts", {"id"}, "dbs", insp)
        op.drop_constraint(constraint, "alerts", type_="foreignkey")
        op.drop_column("alerts", "validator_type")
        op.drop_column("alerts", "sql")
        op.drop_column("alerts", "database_id")
        op.drop_column("alerts", "validator_config")

        op.create_foreign_key(
            "sql_observations_ibfk_2",
            "sql_observations",
            "sql_observers",
            ["observer_id"],
            ["id"],
        )
Esempio n. 12
0
def downgrade():
    bind = op.get_bind()
    insp = sa.engine.reflection.Inspector.from_engine(bind)

    # Add the new more restrictive uniqueness constraint which is required by
    # the foreign key constraints. Note this operation will fail if the
    # datasources.datasource_name column is no longer unique.
    with op.batch_alter_table('datasources', naming_convention=conv) as batch_op:
        batch_op.create_unique_constraint(
            'uq_datasources_datasource_name',
            ['datasource_name'],
        )

    # Augment the tables which have a foreign key constraint related to the
    # datasources.datasource_id column.
    for foreign in ['columns', 'metrics']:
        with op.batch_alter_table(foreign, naming_convention=conv) as batch_op:

            # Add the datasource_name column with the relevant constraints.
            batch_op.add_column(sa.Column('datasource_name', sa.String(255)))

            batch_op.create_foreign_key(
                'fk_{}_datasource_name_datasources'.format(foreign),
                'datasources',
                ['datasource_name'],
                ['datasource_name'],
            )

        # Helper table for database migration using minimal schema.
        table = sa.Table(
            foreign,
            sa.MetaData(),
            sa.Column('id', sa.Integer, primary_key=True),
            sa.Column('datasource_name', sa.String(255)),
            sa.Column('datasource_id', sa.Integer),
        )

        # Migrate the existing data.
        for datasource in bind.execute(datasources.select()):
            bind.execute(
                table.update().where(
                    table.c.datasource_id == datasource.id,
                ).values(
                    datasource_name=datasource.datasource_name,
                ),
            )

        with op.batch_alter_table(foreign, naming_convention=conv) as batch_op:

            # Drop the datasource_id column and associated constraint.
            batch_op.drop_constraint(
                'fk_{}_datasource_id_datasources'.format(foreign),
                type_='foreignkey',
            )

            batch_op.drop_column('datasource_id')

    with op.batch_alter_table('datasources', naming_convention=conv) as batch_op:

        # Prior to dropping the uniqueness constraint, the foreign key
        # associated with the cluster_name column needs to be dropped.
        batch_op.drop_constraint(
            generic_find_fk_constraint_name(
                'datasources',
                {'cluster_name'},
                'clusters',
                insp,
            ) or 'fk_datasources_cluster_name_clusters',
            type_='foreignkey',
        )

        # Drop the old less restrictive uniqueness constraint.
        batch_op.drop_constraint(
            generic_find_uq_constraint_name(
                'datasources',
                {'cluster_name', 'datasource_name'},
                insp,
            ) or 'uq_datasources_cluster_name',
            type_='unique',
        )

        # Re-create the foreign key associated with the cluster_name column.
        batch_op.create_foreign_key(
            'fk_{}_datasource_id_datasources'.format(foreign),
            'clusters',
            ['cluster_name'],
            ['cluster_name'],
        )