def upgrade():
    # cleanup after: https://github.com/airbnb/superset/pull/1078
    try:
        slices_ibfk_1 = generic_find_constraint_name(
            table="slices",
            columns={"druid_datasource_id"},
            referenced="datasources",
            db=db,
        )
        slices_ibfk_2 = generic_find_constraint_name(table="slices",
                                                     columns={"table_id"},
                                                     referenced="tables",
                                                     db=db)

        with op.batch_alter_table("slices") as batch_op:
            if slices_ibfk_1:
                batch_op.drop_constraint(slices_ibfk_1, type_="foreignkey")
            if slices_ibfk_2:
                batch_op.drop_constraint(slices_ibfk_2, type_="foreignkey")
            batch_op.drop_column("druid_datasource_id")
            batch_op.drop_column("table_id")
    except Exception as ex:
        logging.warning(str(ex))

    # fixed issue: https://github.com/airbnb/superset/issues/466
    try:
        with op.batch_alter_table("columns") as batch_op:
            batch_op.create_foreign_key(None, "datasources",
                                        ["datasource_name"],
                                        ["datasource_name"])
    except Exception as ex:
        logging.warning(str(ex))
    try:
        with op.batch_alter_table("query") as batch_op:
            batch_op.create_unique_constraint("client_id", ["client_id"])
    except Exception as ex:
        logging.warning(str(ex))

    try:
        with op.batch_alter_table("query") as batch_op:
            batch_op.drop_column("name")
    except Exception as ex:
        logging.warning(str(ex))
def upgrade():
    # cleanup after: https://github.com/airbnb/superset/pull/1078
    try:
        slices_ibfk_1 = generic_find_constraint_name(
            table='slices', columns={'druid_datasource_id'},
            referenced='datasources', db=db)
        slices_ibfk_2 = generic_find_constraint_name(
            table='slices', columns={'table_id'},
            referenced='tables', db=db)

        with op.batch_alter_table('slices') as batch_op:
            if slices_ibfk_1:
                batch_op.drop_constraint(slices_ibfk_1, type_='foreignkey')
            if slices_ibfk_2:
                batch_op.drop_constraint(slices_ibfk_2, type_='foreignkey')
            batch_op.drop_column('druid_datasource_id')
            batch_op.drop_column('table_id')
    except Exception as e:
        logging.warning(str(e))

    # fixed issue: https://github.com/airbnb/superset/issues/466
    try:
        with op.batch_alter_table('columns') as batch_op:
            batch_op.create_foreign_key(
                None, 'datasources', ['datasource_name'], ['datasource_name'])
    except Exception as e:
        logging.warning(str(e))
    try:
        with op.batch_alter_table('query') as batch_op:
            batch_op.create_unique_constraint('client_id', ['client_id'])
    except Exception as e:
        logging.warning(str(e))

    try:
        with op.batch_alter_table('query') as batch_op:
            batch_op.drop_column('name')
    except Exception as e:
        logging.warning(str(e))
def downgrade():
    try:
        with op.batch_alter_table("tables") as batch_op:
            batch_op.create_index("table_name", ["table_name"], unique=True)
    except Exception as e:
        logging.warning(str(e))

    try:
        with op.batch_alter_table("slices") as batch_op:
            batch_op.add_column(
                sa.Column(
                    "table_id",
                    mysql.INTEGER(display_width=11),
                    autoincrement=False,
                    nullable=True,
                )
            )
            batch_op.add_column(
                sa.Column(
                    "druid_datasource_id",
                    sa.Integer(),
                    autoincrement=False,
                    nullable=True,
                )
            )
            batch_op.create_foreign_key(
                "slices_ibfk_1", "datasources", ["druid_datasource_id"], ["id"]
            )
            batch_op.create_foreign_key("slices_ibfk_2", "tables", ["table_id"], ["id"])
    except Exception as e:
        logging.warning(str(e))

    try:
        fk_columns = generic_find_constraint_name(
            table="columns",
            columns={"datasource_name"},
            referenced="datasources",
            db=db,
        )
        with op.batch_alter_table("columns") as batch_op:
            batch_op.drop_constraint(fk_columns, type_="foreignkey")
    except Exception as e:
        logging.warning(str(e))

    op.add_column("query", sa.Column("name", sa.String(length=256), nullable=True))
    try:
        with op.batch_alter_table("query") as batch_op:
            batch_op.drop_constraint("client_id", type_="unique")
    except Exception as e:
        logging.warning(str(e))
def downgrade():
    try:
        with op.batch_alter_table('tables') as batch_op:
            batch_op.create_index('table_name', ['table_name'], unique=True)
    except Exception as e:
        logging.warning(str(e))

    try:
        with op.batch_alter_table('slices') as batch_op:
            batch_op.add_column(sa.Column(
                'table_id', mysql.INTEGER(display_width=11),
                autoincrement=False, nullable=True))
            batch_op.add_column(sa.Column(
                'druid_datasource_id', sa.Integer(), autoincrement=False,
                nullable=True))
            batch_op.create_foreign_key(
                'slices_ibfk_1', 'datasources', ['druid_datasource_id'],
                ['id'])
            batch_op.create_foreign_key(
                'slices_ibfk_2', 'tables', ['table_id'], ['id'])
    except Exception as e:
        logging.warning(str(e))

    try:
        fk_columns = generic_find_constraint_name(
            table='columns', columns={'datasource_name'},
            referenced='datasources', db=db)
        with op.batch_alter_table('columns') as batch_op:
            batch_op.drop_constraint(fk_columns, type_='foreignkey')
    except Exception as e:
        logging.warning(str(e))

    op.add_column(
        'query', sa.Column('name', sa.String(length=256), nullable=True))
    try:
        with op.batch_alter_table('query') as batch_op:
            batch_op.drop_constraint('client_id', type_='unique')
    except Exception as e:
        logging.warning(str(e))
def find_constraint_name(upgrade=True):
    cols = {'column_name'} if upgrade else {'datasource_name'}
    return generic_find_constraint_name(
        table='columns', columns=cols, referenced='datasources', db=db)
def find_constraint_name(upgrade=True):
    cols = {"column_name"} if upgrade else {"datasource_name"}
    return generic_find_constraint_name(table="columns",
                                        columns=cols,
                                        referenced="datasources",
                                        db=db)
def find_constraint_name(upgrade=True):
    cols = {'column_name'} if upgrade else {'datasource_name'}
    return generic_find_constraint_name(table='columns',
                                        columns=cols,
                                        referenced='datasources',
                                        db=db)