Esempio n. 1
0
    def _fixture(self):
        migration_context = mock.Mock(opts={})
        op = Operations(migration_context)
        batch = op.batch_alter_table('tname', recreate='never').__enter__()

        with mock.patch("alembic.operations.sa_schema") as mock_schema:
            yield batch
        batch.impl.flush()
        self.mock_schema = mock_schema
Esempio n. 2
0
    def _fixture(self):
        migration_context = mock.Mock(opts={})
        op = Operations(migration_context)
        batch = op.batch_alter_table('tname', recreate='never').__enter__()

        with mock.patch("alembic.operations.sa_schema") as mock_schema:
            yield batch
        batch.impl.flush()
        self.mock_schema = mock_schema
Esempio n. 3
0
    def alter_add_foreign_key_to_backtest_summary(self):
        ctx = MigrationContext.configure(self.db_client.connector)
        op = Operations(ctx)

        with op.batch_alter_table(
                self.backtest_management_table_name) as batch_op:
            batch_op.create_foreign_key("fk_management_summary",
                                        "backtest_summary",
                                        ["backtest_summary_id"], ["id"])
Esempio n. 4
0
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata
    """
    _reset_base(migrate_engine)
    from rhodecode.lib.dbmigrate.schema import db_3_7_0_0

    init_model_encryption(db_3_7_0_0)

    context = MigrationContext.configure(migrate_engine.connect())
    op = Operations(context)

    repository = db_3_7_0_0.Repository.__table__
    repo_name_column = repository.columns.repo_name
    clone_uri_column = repository.columns.clone_uri

    indexes = _get_indexes_list(migrate_engine, repository.name)
    repo_name_indexes = [
        i['name'] for i in indexes if 'repo_name' in i['column_names']
    ]
    constraints = _get_unique_constraint_list(migrate_engine, repository.name)
    repo_name_constraints = [
        c['name'] for c in constraints if 'repo_name' in c['column_names']
    ]

    with op.batch_alter_table(repository.name) as batch_op:
        repo_name_idx = 'r_repo_name_idx'
        if repo_name_idx in repo_name_indexes:
            batch_op.drop_index(repo_name_idx)
        for name in repo_name_constraints:
            batch_op.drop_constraint(name, type_='unique')

        batch_op.alter_column(repo_name_column.name, type_=Text)
        batch_op.alter_column(clone_uri_column.name, type_=Text)
        batch_op.create_index('r_repo_name_idx', ['repo_name'],
                              mysql_length=255)
        batch_op.add_column(Column('repo_name_hash', String(40), unique=False))

    _generate_repo_name_hashes(db_3_7_0_0, op, meta.Session)

    with op.batch_alter_table(repository.name) as batch_op:
        batch_op.create_unique_constraint('uq_repo_name_hash',
                                          ['repo_name_hash'])
Esempio n. 5
0
 def fix(self):
     context = MigrationContext.configure(self.parent.engine.connect())
     op = Operations(context)
     for table in self.parent.base.metadata.sorted_tables:
         if table.name == self.table:
             for column in table.columns:
                 if column.name == self.name:
                     with op.batch_alter_table(table.name) as batch_op:
                         batch_op.add_column(column.copy())
                     return
Esempio n. 6
0
File: sanity.py Progetto: kozec/cum
 def fix(self):
     context = MigrationContext.configure(self.parent.engine.connect())
     op = Operations(context)
     for table in self.parent.base.metadata.sorted_tables:
         if table.name == self.table:
             for column in table.columns:
                 if column.name == self.name:
                     with op.batch_alter_table(table.name) as batch_op:
                         batch_op.add_column(column.copy())
                     return
Esempio n. 7
0
    def _fixture(self, schema=None):
        migration_context = mock.Mock(
            opts={}, impl=mock.MagicMock(__dialect__='sqlite'))
        op = Operations(migration_context)
        batch = op.batch_alter_table('tname', recreate='never',
                                     schema=schema).__enter__()

        mock_schema = mock.MagicMock()
        with mock.patch("alembic.operations.schemaobj.sa_schema", mock_schema):
            yield batch
        batch.impl.flush()
        self.mock_schema = mock_schema
Esempio n. 8
0
    def _fixture(self, schema=None):
        migration_context = mock.Mock(
            opts={}, impl=mock.MagicMock(__dialect__='sqlite'))
        op = Operations(migration_context)
        batch = op.batch_alter_table(
            'tname', recreate='never', schema=schema).__enter__()

        mock_schema = mock.MagicMock()
        with mock.patch("alembic.operations.schemaobj.sa_schema", mock_schema):
            yield batch
        batch.impl.flush()
        self.mock_schema = mock_schema
Esempio n. 9
0
File: sanity.py Progetto: kozec/cum
 def fix(self):
     """Uses Alembic batch operations to alter the column datatype in the table.
     """
     context = MigrationContext.configure(self.parent.engine.connect())
     op = Operations(context)
     for table in self.parent.base.metadata.sorted_tables:
         if table.name == self.table:
             for column in table.columns:
                 if column.name == self.column['name']:
                     with op.batch_alter_table(table.name) as batch_op:
                         batch_op.alter_column(column.name,
                                               type_=column.type)
                     return
Esempio n. 10
0
 def fix(self):
     """Uses Alembic batch operations to alter the column datatype in the table.
     """
     context = MigrationContext.configure(self.parent.engine.connect())
     op = Operations(context)
     for table in self.parent.base.metadata.sorted_tables:
         if table.name == self.table:
             for column in table.columns:
                 if column.name == self.column['name']:
                     with op.batch_alter_table(table.name) as batch_op:
                         batch_op.alter_column(column.name,
                                               type_=column.type)
                     return
Esempio n. 11
0
    def create_backtest_management_table(self):
        backtest_management_template = BacktestManagement
        table_def = backtest_management_template.__table__

        # add specific params columns
        table_def = self.append_specific_params_column(table_def)

        backtest_summary_id = Column("backtest_summary_id", Integer)
        table_def.relation = relationship("BacktestSummary")
        table_def.append_column(backtest_summary_id)

        table_def.name = self.backtest_management_table_name

        table_def.create(bind=self.db_client.connector)
        
       # add foreign key constraint

        ctx = MigrationContext.configure(self.db_client.connector)
        op = Operations(ctx)

        with op.batch_alter_table(self.bot_name + "_backtest_management") as batch_op:
            batch_op.create_foreign_key("fk_management_summary", "backtest_summary", ["backtest_summary_id"], ["id"])
Esempio n. 12
0
class BatchRoundTripTest(TestBase):
    __requires__ = ('sqlalchemy_08', )
    __only_on__ = "sqlite"

    def setUp(self):
        self.conn = config.db.connect()
        self.metadata = MetaData()
        t1 = Table(
            'foo', self.metadata,
            Column('id', Integer, primary_key=True),
            Column('data', String(50)),
            Column('x', Integer),
            mysql_engine='InnoDB'
        )
        t1.create(self.conn)

        self.conn.execute(
            t1.insert(),
            [
                {"id": 1, "data": "d1", "x": 5},
                {"id": 2, "data": "22", "x": 6},
                {"id": 3, "data": "8.5", "x": 7},
                {"id": 4, "data": "9.46", "x": 8},
                {"id": 5, "data": "d5", "x": 9}
            ]
        )
        context = MigrationContext.configure(self.conn)
        self.op = Operations(context)

    def _no_pk_fixture(self):
        nopk = Table(
            'nopk', self.metadata,
            Column('a', Integer),
            Column('b', Integer),
            Column('c', Integer),
            mysql_engine='InnoDB'
        )
        nopk.create(self.conn)
        self.conn.execute(
            nopk.insert(),
            [
                {"a": 1, "b": 2, "c": 3},
                {"a": 2, "b": 4, "c": 5},
            ]

        )
        return nopk

    def tearDown(self):
        self.metadata.drop_all(self.conn)
        self.conn.close()

    def _assert_data(self, data, tablename='foo'):
        eq_(
            [dict(row) for row
             in self.conn.execute("select * from %s" % tablename)],
            data
        )

    def test_fk_points_to_me_auto(self):
        self._test_fk_points_to_me("auto")

    # in particular, this tests that the failures
    # on PG and MySQL result in recovery of the batch system,
    # e.g. that the _alembic_batch_temp table is dropped
    @config.requirements.no_referential_integrity
    def test_fk_points_to_me_recreate(self):
        self._test_fk_points_to_me("always")

    def _test_fk_points_to_me(self, recreate):
        bar = Table(
            'bar', self.metadata,
            Column('id', Integer, primary_key=True),
            Column('foo_id', Integer, ForeignKey('foo.id')),
            mysql_engine='InnoDB'
        )
        bar.create(self.conn)
        self.conn.execute(bar.insert(), {'id': 1, 'foo_id': 3})

        with self.op.batch_alter_table("foo", recreate=recreate) as batch_op:
            batch_op.alter_column(
                'data', new_column_name='newdata', existing_type=String(50))

    def test_change_type(self):
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.alter_column('data', type_=Integer)

        self._assert_data([
            {"id": 1, "data": 0, "x": 5},
            {"id": 2, "data": 22, "x": 6},
            {"id": 3, "data": 8, "x": 7},
            {"id": 4, "data": 9, "x": 8},
            {"id": 5, "data": 0, "x": 9}
        ])

    def test_drop_column(self):
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.drop_column('data')

        self._assert_data([
            {"id": 1, "x": 5},
            {"id": 2, "x": 6},
            {"id": 3, "x": 7},
            {"id": 4, "x": 8},
            {"id": 5, "x": 9}
        ])

    def test_add_pk_constraint(self):
        self._no_pk_fixture()
        with self.op.batch_alter_table("nopk", recreate="always") as batch_op:
            batch_op.create_primary_key('newpk', ['a', 'b'])

        pk_const = Inspector.from_engine(self.conn).get_pk_constraint('nopk')
        with config.requirements.reflects_pk_names.fail_if():
            eq_(pk_const['name'], 'newpk')
        eq_(pk_const['constrained_columns'], ['a', 'b'])

    @config.requirements.check_constraints_w_enforcement
    def test_add_ck_constraint(self):
        with self.op.batch_alter_table("foo", recreate="always") as batch_op:
            batch_op.create_check_constraint("newck", text("x > 0"))

        # we dont support reflection of CHECK constraints
        # so test this by just running invalid data in
        foo = self.metadata.tables['foo']

        assert_raises_message(
            exc.IntegrityError,
            "newck",
            self.conn.execute,
            foo.insert(), {"id": 6, "data": 5, "x": -2}
        )

    @config.requirements.sqlalchemy_094
    @config.requirements.unnamed_constraints
    def test_drop_foreign_key(self):
        bar = Table(
            'bar', self.metadata,
            Column('id', Integer, primary_key=True),
            Column('foo_id', Integer, ForeignKey('foo.id')),
            mysql_engine='InnoDB'
        )
        bar.create(self.conn)
        self.conn.execute(bar.insert(), {'id': 1, 'foo_id': 3})

        naming_convention = {
            "fk":
            "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
        }
        with self.op.batch_alter_table(
                "bar", naming_convention=naming_convention) as batch_op:
            batch_op.drop_constraint(
                "fk_bar_foo_id_foo", type_="foreignkey")
        eq_(
            Inspector.from_engine(self.conn).get_foreign_keys('bar'),
            []
        )

    def test_drop_column_fk_recreate(self):
        with self.op.batch_alter_table("foo", recreate='always') as batch_op:
            batch_op.drop_column('data')

        self._assert_data([
            {"id": 1, "x": 5},
            {"id": 2, "x": 6},
            {"id": 3, "x": 7},
            {"id": 4, "x": 8},
            {"id": 5, "x": 9}
        ])

    def test_rename_column(self):
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.alter_column('x', new_column_name='y')

        self._assert_data([
            {"id": 1, "data": "d1", "y": 5},
            {"id": 2, "data": "22", "y": 6},
            {"id": 3, "data": "8.5", "y": 7},
            {"id": 4, "data": "9.46", "y": 8},
            {"id": 5, "data": "d5", "y": 9}
        ])

    def test_rename_column_boolean(self):
        bar = Table(
            'bar', self.metadata,
            Column('id', Integer, primary_key=True),
            Column('flag', Boolean()),
            mysql_engine='InnoDB'
        )
        bar.create(self.conn)
        self.conn.execute(bar.insert(), {'id': 1, 'flag': True})
        self.conn.execute(bar.insert(), {'id': 2, 'flag': False})

        with self.op.batch_alter_table(
            "bar"
        ) as batch_op:
            batch_op.alter_column(
                'flag', new_column_name='bflag', existing_type=Boolean)

        self._assert_data([
            {"id": 1, 'bflag': True},
            {"id": 2, 'bflag': False},
        ], 'bar')

    @config.requirements.non_native_boolean
    def test_rename_column_non_native_boolean_no_ck(self):
        bar = Table(
            'bar', self.metadata,
            Column('id', Integer, primary_key=True),
            Column('flag', Boolean(create_constraint=False)),
            mysql_engine='InnoDB'
        )
        bar.create(self.conn)
        self.conn.execute(bar.insert(), {'id': 1, 'flag': True})
        self.conn.execute(bar.insert(), {'id': 2, 'flag': False})
        self.conn.execute(bar.insert(), {'id': 3, 'flag': 5})

        with self.op.batch_alter_table(
            "bar",
            reflect_args=[Column('flag', Boolean(create_constraint=False))]
        ) as batch_op:
            batch_op.alter_column(
                'flag', new_column_name='bflag', existing_type=Boolean)

        self._assert_data([
            {"id": 1, 'bflag': True},
            {"id": 2, 'bflag': False},
            {'id': 3, 'bflag': 5}
        ], 'bar')

    def test_drop_column_pk(self):
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.drop_column('id')

        self._assert_data([
            {"data": "d1", "x": 5},
            {"data": "22", "x": 6},
            {"data": "8.5", "x": 7},
            {"data": "9.46", "x": 8},
            {"data": "d5", "x": 9}
        ])

    def test_rename_column_pk(self):
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.alter_column('id', new_column_name='ident')

        self._assert_data([
            {"ident": 1, "data": "d1", "x": 5},
            {"ident": 2, "data": "22", "x": 6},
            {"ident": 3, "data": "8.5", "x": 7},
            {"ident": 4, "data": "9.46", "x": 8},
            {"ident": 5, "data": "d5", "x": 9}
        ])

    def test_add_column_auto(self):
        # note this uses ALTER
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.add_column(
                Column('data2', String(50), server_default='hi'))

        self._assert_data([
            {"id": 1, "data": "d1", "x": 5, 'data2': 'hi'},
            {"id": 2, "data": "22", "x": 6, 'data2': 'hi'},
            {"id": 3, "data": "8.5", "x": 7, 'data2': 'hi'},
            {"id": 4, "data": "9.46", "x": 8, 'data2': 'hi'},
            {"id": 5, "data": "d5", "x": 9, 'data2': 'hi'}
        ])

    def test_add_column_recreate(self):
        with self.op.batch_alter_table("foo", recreate='always') as batch_op:
            batch_op.add_column(
                Column('data2', String(50), server_default='hi'))

        self._assert_data([
            {"id": 1, "data": "d1", "x": 5, 'data2': 'hi'},
            {"id": 2, "data": "22", "x": 6, 'data2': 'hi'},
            {"id": 3, "data": "8.5", "x": 7, 'data2': 'hi'},
            {"id": 4, "data": "9.46", "x": 8, 'data2': 'hi'},
            {"id": 5, "data": "d5", "x": 9, 'data2': 'hi'}
        ])

    def test_create_drop_index(self):
        insp = Inspector.from_engine(config.db)
        eq_(
            insp.get_indexes('foo'), []
        )

        with self.op.batch_alter_table("foo", recreate='always') as batch_op:
            batch_op.create_index(
                'ix_data', ['data'], unique=True)

        self._assert_data([
            {"id": 1, "data": "d1", "x": 5},
            {"id": 2, "data": "22", "x": 6},
            {"id": 3, "data": "8.5", "x": 7},
            {"id": 4, "data": "9.46", "x": 8},
            {"id": 5, "data": "d5", "x": 9}
        ])

        insp = Inspector.from_engine(config.db)
        eq_(
            [
                dict(unique=ix['unique'],
                     name=ix['name'],
                     column_names=ix['column_names'])
                for ix in insp.get_indexes('foo')
            ],
            [{'unique': True, 'name': 'ix_data', 'column_names': ['data']}]
        )

        with self.op.batch_alter_table("foo", recreate='always') as batch_op:
            batch_op.drop_index('ix_data')

        insp = Inspector.from_engine(config.db)
        eq_(
            insp.get_indexes('foo'), []
        )
Esempio n. 13
0
class BatchRoundTripTest(TestBase):
    __requires__ = ('sqlalchemy_08', )
    __only_on__ = "sqlite"

    def setUp(self):
        self.conn = config.db.connect()
        self.metadata = MetaData()
        t1 = Table(
            'foo', self.metadata,
            Column('id', Integer, primary_key=True),
            Column('data', String(50)),
            Column('x', Integer),
            mysql_engine='InnoDB'
        )
        t1.create(self.conn)

        self.conn.execute(
            t1.insert(),
            [
                {"id": 1, "data": "d1", "x": 5},
                {"id": 2, "data": "22", "x": 6},
                {"id": 3, "data": "8.5", "x": 7},
                {"id": 4, "data": "9.46", "x": 8},
                {"id": 5, "data": "d5", "x": 9}
            ]
        )
        context = MigrationContext.configure(self.conn)
        self.op = Operations(context)

    @contextmanager
    def _sqlite_referential_integrity(self):
        self.conn.execute("PRAGMA foreign_keys=ON")
        try:
            yield
        finally:
            self.conn.execute("PRAGMA foreign_keys=OFF")

    def _no_pk_fixture(self):
        nopk = Table(
            'nopk', self.metadata,
            Column('a', Integer),
            Column('b', Integer),
            Column('c', Integer),
            mysql_engine='InnoDB'
        )
        nopk.create(self.conn)
        self.conn.execute(
            nopk.insert(),
            [
                {"a": 1, "b": 2, "c": 3},
                {"a": 2, "b": 4, "c": 5},
            ]

        )
        return nopk

    def _table_w_index_fixture(self):
        t = Table(
            't_w_ix', self.metadata,
            Column('id', Integer, primary_key=True),
            Column('thing', Integer),
            Column('data', String(20)),
        )
        Index('ix_thing', t.c.thing)
        t.create(self.conn)
        return t

    def _boolean_fixture(self):
        t = Table(
            'hasbool', self.metadata,
            Column('x', Boolean(create_constraint=True, name='ck1')),
            Column('y', Integer)
        )
        t.create(self.conn)

    def _timestamp_fixture(self):
        t = Table(
            'hasts', self.metadata,
            Column('x', DateTime()),
        )
        t.create(self.conn)
        return t

    def _int_to_boolean_fixture(self):
        t = Table(
            'hasbool', self.metadata,
            Column('x', Integer)
        )
        t.create(self.conn)

    def test_change_type_boolean_to_int(self):
        self._boolean_fixture()
        with self.op.batch_alter_table(
                "hasbool"
        ) as batch_op:
            batch_op.alter_column(
                'x', type_=Integer, existing_type=Boolean(
                    create_constraint=True, name='ck1'))
        insp = Inspector.from_engine(config.db)

        eq_(
            [c['type']._type_affinity for c in insp.get_columns('hasbool')
             if c['name'] == 'x'],
            [Integer]
        )

    def test_no_net_change_timestamp(self):
        t = self._timestamp_fixture()

        import datetime
        self.conn.execute(
            t.insert(),
            {"x": datetime.datetime(2012, 5, 18, 15, 32, 5)}
        )

        with self.op.batch_alter_table("hasts") as batch_op:
            batch_op.alter_column("x", type_=DateTime())

        eq_(
            self.conn.execute(select([t.c.x])).fetchall(),
            [(datetime.datetime(2012, 5, 18, 15, 32, 5),)]
        )

    def test_drop_col_schematype(self):
        self._boolean_fixture()
        with self.op.batch_alter_table(
                "hasbool"
        ) as batch_op:
            batch_op.drop_column('x')
        insp = Inspector.from_engine(config.db)

        assert 'x' not in (c['name'] for c in insp.get_columns('hasbool'))

    def test_change_type_int_to_boolean(self):
        self._int_to_boolean_fixture()
        with self.op.batch_alter_table(
                "hasbool"
        ) as batch_op:
            batch_op.alter_column(
                'x', type_=Boolean(create_constraint=True, name='ck1'))
        insp = Inspector.from_engine(config.db)

        if exclusions.against(config, "sqlite"):
            eq_(
                [c['type']._type_affinity for
                 c in insp.get_columns('hasbool') if c['name'] == 'x'],
                [Boolean]
            )
        elif exclusions.against(config, "mysql"):
            eq_(
                [c['type']._type_affinity for
                 c in insp.get_columns('hasbool') if c['name'] == 'x'],
                [Integer]
            )

    def tearDown(self):
        self.metadata.drop_all(self.conn)
        self.conn.close()

    def _assert_data(self, data, tablename='foo'):
        eq_(
            [dict(row) for row
             in self.conn.execute("select * from %s" % tablename)],
            data
        )

    def test_ix_existing(self):
        self._table_w_index_fixture()

        with self.op.batch_alter_table("t_w_ix") as batch_op:
            batch_op.alter_column('data', type_=String(30))
            batch_op.create_index("ix_data", ["data"])

        insp = Inspector.from_engine(config.db)
        eq_(
            set(
                (ix['name'], tuple(ix['column_names'])) for ix in
                insp.get_indexes('t_w_ix')
            ),
            set([
                ('ix_data', ('data',)),
                ('ix_thing', ('thing', ))
            ])
        )

    def test_fk_points_to_me_auto(self):
        self._test_fk_points_to_me("auto")

    # in particular, this tests that the failures
    # on PG and MySQL result in recovery of the batch system,
    # e.g. that the _alembic_batch_temp table is dropped
    @config.requirements.no_referential_integrity
    def test_fk_points_to_me_recreate(self):
        self._test_fk_points_to_me("always")

    @exclusions.only_on("sqlite")
    @exclusions.fails(
        "intentionally asserting that this "
        "doesn't work w/ pragma foreign keys")
    def test_fk_points_to_me_sqlite_refinteg(self):
        with self._sqlite_referential_integrity():
            self._test_fk_points_to_me("auto")

    def _test_fk_points_to_me(self, recreate):
        bar = Table(
            'bar', self.metadata,
            Column('id', Integer, primary_key=True),
            Column('foo_id', Integer, ForeignKey('foo.id')),
            mysql_engine='InnoDB'
        )
        bar.create(self.conn)
        self.conn.execute(bar.insert(), {'id': 1, 'foo_id': 3})

        with self.op.batch_alter_table("foo", recreate=recreate) as batch_op:
            batch_op.alter_column(
                'data', new_column_name='newdata', existing_type=String(50))

        insp = Inspector.from_engine(self.conn)
        eq_(
            [(key['referred_table'],
             key['referred_columns'], key['constrained_columns'])
             for key in insp.get_foreign_keys('bar')],
            [('foo', ['id'], ['foo_id'])]
        )

    def test_selfref_fk_auto(self):
        self._test_selfref_fk("auto")

    @config.requirements.no_referential_integrity
    def test_selfref_fk_recreate(self):
        self._test_selfref_fk("always")

    @exclusions.only_on("sqlite")
    @exclusions.fails(
        "intentionally asserting that this "
        "doesn't work w/ pragma foreign keys")
    def test_selfref_fk_sqlite_refinteg(self):
        with self._sqlite_referential_integrity():
            self._test_selfref_fk("auto")

    def _test_selfref_fk(self, recreate):
        bar = Table(
            'bar', self.metadata,
            Column('id', Integer, primary_key=True),
            Column('bar_id', Integer, ForeignKey('bar.id')),
            Column('data', String(50)),
            mysql_engine='InnoDB'
        )
        bar.create(self.conn)
        self.conn.execute(bar.insert(), {'id': 1, 'data': 'x', 'bar_id': None})
        self.conn.execute(bar.insert(), {'id': 2, 'data': 'y', 'bar_id': 1})

        with self.op.batch_alter_table("bar", recreate=recreate) as batch_op:
            batch_op.alter_column(
                'data', new_column_name='newdata', existing_type=String(50))

        insp = Inspector.from_engine(self.conn)

        insp = Inspector.from_engine(self.conn)
        eq_(
            [(key['referred_table'],
             key['referred_columns'], key['constrained_columns'])
             for key in insp.get_foreign_keys('bar')],
            [('bar', ['id'], ['bar_id'])]
        )

    def test_change_type(self):
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.alter_column('data', type_=Integer)

        self._assert_data([
            {"id": 1, "data": 0, "x": 5},
            {"id": 2, "data": 22, "x": 6},
            {"id": 3, "data": 8, "x": 7},
            {"id": 4, "data": 9, "x": 8},
            {"id": 5, "data": 0, "x": 9}
        ])

    def test_drop_column(self):
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.drop_column('data')

        self._assert_data([
            {"id": 1, "x": 5},
            {"id": 2, "x": 6},
            {"id": 3, "x": 7},
            {"id": 4, "x": 8},
            {"id": 5, "x": 9}
        ])

    def test_add_pk_constraint(self):
        self._no_pk_fixture()
        with self.op.batch_alter_table("nopk", recreate="always") as batch_op:
            batch_op.create_primary_key('newpk', ['a', 'b'])

        pk_const = Inspector.from_engine(self.conn).get_pk_constraint('nopk')
        with config.requirements.reflects_pk_names.fail_if():
            eq_(pk_const['name'], 'newpk')
        eq_(pk_const['constrained_columns'], ['a', 'b'])

    @config.requirements.check_constraints_w_enforcement
    def test_add_ck_constraint(self):
        with self.op.batch_alter_table("foo", recreate="always") as batch_op:
            batch_op.create_check_constraint("newck", text("x > 0"))

        # we dont support reflection of CHECK constraints
        # so test this by just running invalid data in
        foo = self.metadata.tables['foo']

        assert_raises_message(
            exc.IntegrityError,
            "newck",
            self.conn.execute,
            foo.insert(), {"id": 6, "data": 5, "x": -2}
        )

    @config.requirements.sqlalchemy_094
    @config.requirements.unnamed_constraints
    def test_drop_foreign_key(self):
        bar = Table(
            'bar', self.metadata,
            Column('id', Integer, primary_key=True),
            Column('foo_id', Integer, ForeignKey('foo.id')),
            mysql_engine='InnoDB'
        )
        bar.create(self.conn)
        self.conn.execute(bar.insert(), {'id': 1, 'foo_id': 3})

        naming_convention = {
            "fk":
            "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
        }
        with self.op.batch_alter_table(
                "bar", naming_convention=naming_convention) as batch_op:
            batch_op.drop_constraint(
                "fk_bar_foo_id_foo", type_="foreignkey")
        eq_(
            Inspector.from_engine(self.conn).get_foreign_keys('bar'),
            []
        )

    def test_drop_column_fk_recreate(self):
        with self.op.batch_alter_table("foo", recreate='always') as batch_op:
            batch_op.drop_column('data')

        self._assert_data([
            {"id": 1, "x": 5},
            {"id": 2, "x": 6},
            {"id": 3, "x": 7},
            {"id": 4, "x": 8},
            {"id": 5, "x": 9}
        ])

    def test_rename_column(self):
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.alter_column('x', new_column_name='y')

        self._assert_data([
            {"id": 1, "data": "d1", "y": 5},
            {"id": 2, "data": "22", "y": 6},
            {"id": 3, "data": "8.5", "y": 7},
            {"id": 4, "data": "9.46", "y": 8},
            {"id": 5, "data": "d5", "y": 9}
        ])

    def test_rename_column_boolean(self):
        bar = Table(
            'bar', self.metadata,
            Column('id', Integer, primary_key=True),
            Column('flag', Boolean()),
            mysql_engine='InnoDB'
        )
        bar.create(self.conn)
        self.conn.execute(bar.insert(), {'id': 1, 'flag': True})
        self.conn.execute(bar.insert(), {'id': 2, 'flag': False})

        with self.op.batch_alter_table(
            "bar"
        ) as batch_op:
            batch_op.alter_column(
                'flag', new_column_name='bflag', existing_type=Boolean)

        self._assert_data([
            {"id": 1, 'bflag': True},
            {"id": 2, 'bflag': False},
        ], 'bar')

    @config.requirements.non_native_boolean
    def test_rename_column_non_native_boolean_no_ck(self):
        bar = Table(
            'bar', self.metadata,
            Column('id', Integer, primary_key=True),
            Column('flag', Boolean(create_constraint=False)),
            mysql_engine='InnoDB'
        )
        bar.create(self.conn)
        self.conn.execute(bar.insert(), {'id': 1, 'flag': True})
        self.conn.execute(bar.insert(), {'id': 2, 'flag': False})
        self.conn.execute(
            # override Boolean type which as of 1.1 coerces numerics
            # to 1/0
            text("insert into bar (id, flag) values (:id, :flag)"),
            {'id': 3, 'flag': 5})

        with self.op.batch_alter_table(
            "bar",
            reflect_args=[Column('flag', Boolean(create_constraint=False))]
        ) as batch_op:
            batch_op.alter_column(
                'flag', new_column_name='bflag', existing_type=Boolean)

        self._assert_data([
            {"id": 1, 'bflag': True},
            {"id": 2, 'bflag': False},
            {'id': 3, 'bflag': 5}
        ], 'bar')

    def test_drop_column_pk(self):
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.drop_column('id')

        self._assert_data([
            {"data": "d1", "x": 5},
            {"data": "22", "x": 6},
            {"data": "8.5", "x": 7},
            {"data": "9.46", "x": 8},
            {"data": "d5", "x": 9}
        ])

    def test_rename_column_pk(self):
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.alter_column('id', new_column_name='ident')

        self._assert_data([
            {"ident": 1, "data": "d1", "x": 5},
            {"ident": 2, "data": "22", "x": 6},
            {"ident": 3, "data": "8.5", "x": 7},
            {"ident": 4, "data": "9.46", "x": 8},
            {"ident": 5, "data": "d5", "x": 9}
        ])

    def test_add_column_auto(self):
        # note this uses ALTER
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.add_column(
                Column('data2', String(50), server_default='hi'))

        self._assert_data([
            {"id": 1, "data": "d1", "x": 5, 'data2': 'hi'},
            {"id": 2, "data": "22", "x": 6, 'data2': 'hi'},
            {"id": 3, "data": "8.5", "x": 7, 'data2': 'hi'},
            {"id": 4, "data": "9.46", "x": 8, 'data2': 'hi'},
            {"id": 5, "data": "d5", "x": 9, 'data2': 'hi'}
        ])

    def test_add_column_recreate(self):
        with self.op.batch_alter_table("foo", recreate='always') as batch_op:
            batch_op.add_column(
                Column('data2', String(50), server_default='hi'))

        self._assert_data([
            {"id": 1, "data": "d1", "x": 5, 'data2': 'hi'},
            {"id": 2, "data": "22", "x": 6, 'data2': 'hi'},
            {"id": 3, "data": "8.5", "x": 7, 'data2': 'hi'},
            {"id": 4, "data": "9.46", "x": 8, 'data2': 'hi'},
            {"id": 5, "data": "d5", "x": 9, 'data2': 'hi'}
        ])

    def test_create_drop_index(self):
        insp = Inspector.from_engine(config.db)
        eq_(
            insp.get_indexes('foo'), []
        )

        with self.op.batch_alter_table("foo", recreate='always') as batch_op:
            batch_op.create_index(
                'ix_data', ['data'], unique=True)

        self._assert_data([
            {"id": 1, "data": "d1", "x": 5},
            {"id": 2, "data": "22", "x": 6},
            {"id": 3, "data": "8.5", "x": 7},
            {"id": 4, "data": "9.46", "x": 8},
            {"id": 5, "data": "d5", "x": 9}
        ])

        insp = Inspector.from_engine(config.db)
        eq_(
            [
                dict(unique=ix['unique'],
                     name=ix['name'],
                     column_names=ix['column_names'])
                for ix in insp.get_indexes('foo')
            ],
            [{'unique': True, 'name': 'ix_data', 'column_names': ['data']}]
        )

        with self.op.batch_alter_table("foo", recreate='always') as batch_op:
            batch_op.drop_index('ix_data')

        insp = Inspector.from_engine(config.db)
        eq_(
            insp.get_indexes('foo'), []
        )
Esempio n. 14
0
class CopyFromTest(TestBase):
    __requires__ = ('sqlalchemy_08', )

    def _fixture(self):
        self.metadata = MetaData()
        self.table = Table(
            'foo', self.metadata,
            Column('id', Integer, primary_key=True),
            Column('data', String(50)),
            Column('x', Integer),
        )

        context = op_fixture(dialect="sqlite", as_sql=True)
        self.op = Operations(context)
        return context

    def test_change_type(self):
        context = self._fixture()
        with self.op.batch_alter_table(
                "foo", copy_from=self.table) as batch_op:
            batch_op.alter_column('data', type_=Integer)

        context.assert_(
            'CREATE TABLE _alembic_batch_temp (id INTEGER NOT NULL, '
            'data INTEGER, x INTEGER, PRIMARY KEY (id))',
            'INSERT INTO _alembic_batch_temp (id, data, x) SELECT foo.id, '
            'CAST(foo.data AS INTEGER) AS anon_1, foo.x FROM foo',
            'DROP TABLE foo',
            'ALTER TABLE _alembic_batch_temp RENAME TO foo'
        )

    def test_create_drop_index_w_always(self):
        context = self._fixture()
        with self.op.batch_alter_table(
                "foo", copy_from=self.table, recreate='always') as batch_op:
            batch_op.create_index(
                'ix_data', ['data'], unique=True)

        context.assert_(
            'CREATE TABLE _alembic_batch_temp (id INTEGER NOT NULL, '
            'data VARCHAR(50), '
            'x INTEGER, PRIMARY KEY (id))',
            'CREATE UNIQUE INDEX ix_data ON _alembic_batch_temp (data)',
            'INSERT INTO _alembic_batch_temp (id, data, x) '
            'SELECT foo.id, foo.data, foo.x FROM foo',
            'DROP TABLE foo',
            'ALTER TABLE _alembic_batch_temp RENAME TO foo'
        )

        context.clear_assertions()

        Index('ix_data', self.table.c.data, unique=True)
        with self.op.batch_alter_table(
                "foo", copy_from=self.table, recreate='always') as batch_op:
            batch_op.drop_index('ix_data')

        context.assert_(
            'CREATE TABLE _alembic_batch_temp (id INTEGER NOT NULL, '
            'data VARCHAR(50), x INTEGER, PRIMARY KEY (id))',
            'INSERT INTO _alembic_batch_temp (id, data, x) '
            'SELECT foo.id, foo.data, foo.x FROM foo',
            'DROP TABLE foo',
            'ALTER TABLE _alembic_batch_temp RENAME TO foo'
        )

    def test_create_drop_index_wo_always(self):
        context = self._fixture()
        with self.op.batch_alter_table(
                "foo", copy_from=self.table) as batch_op:
            batch_op.create_index(
                'ix_data', ['data'], unique=True)

        context.assert_(
            'CREATE UNIQUE INDEX ix_data ON foo (data)'
        )

        context.clear_assertions()

        Index('ix_data', self.table.c.data, unique=True)
        with self.op.batch_alter_table(
                "foo", copy_from=self.table) as batch_op:
            batch_op.drop_index('ix_data')

        context.assert_(
            'DROP INDEX ix_data'
        )

    def test_create_drop_index_w_other_ops(self):
        context = self._fixture()
        with self.op.batch_alter_table(
                "foo", copy_from=self.table) as batch_op:
            batch_op.alter_column('data', type_=Integer)
            batch_op.create_index(
                'ix_data', ['data'], unique=True)

        context.assert_(
            'CREATE TABLE _alembic_batch_temp (id INTEGER NOT NULL, '
            'data INTEGER, x INTEGER, PRIMARY KEY (id))',
            'CREATE UNIQUE INDEX ix_data ON _alembic_batch_temp (data)',
            'INSERT INTO _alembic_batch_temp (id, data, x) SELECT foo.id, '
            'CAST(foo.data AS INTEGER) AS anon_1, foo.x FROM foo',
            'DROP TABLE foo',
            'ALTER TABLE _alembic_batch_temp RENAME TO foo'
        )

        context.clear_assertions()

        Index('ix_data', self.table.c.data, unique=True)
        with self.op.batch_alter_table(
                "foo", copy_from=self.table) as batch_op:
            batch_op.drop_index('ix_data')
            batch_op.alter_column('data', type_=String)

        context.assert_(
            'CREATE TABLE _alembic_batch_temp (id INTEGER NOT NULL, '
            'data VARCHAR, x INTEGER, PRIMARY KEY (id))',
            'INSERT INTO _alembic_batch_temp (id, data, x) SELECT foo.id, '
            'CAST(foo.data AS VARCHAR) AS anon_1, foo.x FROM foo',
            'DROP TABLE foo',
            'ALTER TABLE _alembic_batch_temp RENAME TO foo'
        )
Esempio n. 15
0
class BatchRoundTripTest(TestBase):
    __requires__ = ('sqlalchemy_08', )
    __only_on__ = "sqlite"

    def setUp(self):
        self.conn = config.db.connect()
        self.metadata = MetaData()
        t1 = Table(
            'foo', self.metadata,
            Column('id', Integer, primary_key=True),
            Column('data', String(50)),
            Column('x', Integer),
            mysql_engine='InnoDB'
        )
        t1.create(self.conn)

        self.conn.execute(
            t1.insert(),
            [
                {"id": 1, "data": "d1", "x": 5},
                {"id": 2, "data": "22", "x": 6},
                {"id": 3, "data": "8.5", "x": 7},
                {"id": 4, "data": "9.46", "x": 8},
                {"id": 5, "data": "d5", "x": 9}
            ]
        )
        context = MigrationContext.configure(self.conn)
        self.op = Operations(context)

    def tearDown(self):
        self.metadata.drop_all(self.conn)
        self.conn.close()

    def _assert_data(self, data):
        eq_(
            [dict(row) for row in self.conn.execute("select * from foo")],
            data
        )

    def test_fk_points_to_me_auto(self):
        self._test_fk_points_to_me("auto")

    # in particular, this tests that the failures
    # on PG and MySQL result in recovery of the batch system,
    # e.g. that the _alembic_batch_temp table is dropped
    @config.requirements.no_referential_integrity
    def test_fk_points_to_me_recreate(self):
        self._test_fk_points_to_me("always")

    def _test_fk_points_to_me(self, recreate):
        bar = Table(
            'bar', self.metadata,
            Column('id', Integer, primary_key=True),
            Column('foo_id', Integer, ForeignKey('foo.id')),
            mysql_engine='InnoDB'
        )
        bar.create(self.conn)
        self.conn.execute(bar.insert(), {'id': 1, 'foo_id': 3})

        with self.op.batch_alter_table("foo", recreate=recreate) as batch_op:
            batch_op.alter_column(
                'data', new_column_name='newdata', existing_type=String(50))

    def test_change_type(self):
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.alter_column('data', type_=Integer)

        self._assert_data([
            {"id": 1, "data": 0, "x": 5},
            {"id": 2, "data": 22, "x": 6},
            {"id": 3, "data": 8, "x": 7},
            {"id": 4, "data": 9, "x": 8},
            {"id": 5, "data": 0, "x": 9}
        ])

    def test_drop_column(self):
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.drop_column('data')

        self._assert_data([
            {"id": 1, "x": 5},
            {"id": 2, "x": 6},
            {"id": 3, "x": 7},
            {"id": 4, "x": 8},
            {"id": 5, "x": 9}
        ])

    def test_drop_column_fk_recreate(self):
        with self.op.batch_alter_table("foo", recreate='always') as batch_op:
            batch_op.drop_column('data')

        self._assert_data([
            {"id": 1, "x": 5},
            {"id": 2, "x": 6},
            {"id": 3, "x": 7},
            {"id": 4, "x": 8},
            {"id": 5, "x": 9}
        ])

    def test_rename_column(self):
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.alter_column('x', new_column_name='y')

        self._assert_data([
            {"id": 1, "data": "d1", "y": 5},
            {"id": 2, "data": "22", "y": 6},
            {"id": 3, "data": "8.5", "y": 7},
            {"id": 4, "data": "9.46", "y": 8},
            {"id": 5, "data": "d5", "y": 9}
        ])

    def test_drop_column_pk(self):
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.drop_column('id')

        self._assert_data([
            {"data": "d1", "x": 5},
            {"data": "22", "x": 6},
            {"data": "8.5", "x": 7},
            {"data": "9.46", "x": 8},
            {"data": "d5", "x": 9}
        ])

    def test_rename_column_pk(self):
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.alter_column('id', new_column_name='ident')

        self._assert_data([
            {"ident": 1, "data": "d1", "x": 5},
            {"ident": 2, "data": "22", "x": 6},
            {"ident": 3, "data": "8.5", "x": 7},
            {"ident": 4, "data": "9.46", "x": 8},
            {"ident": 5, "data": "d5", "x": 9}
        ])

    def test_add_column_auto(self):
        # note this uses ALTER
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.add_column(
                Column('data2', String(50), server_default='hi'))

        self._assert_data([
            {"id": 1, "data": "d1", "x": 5, 'data2': 'hi'},
            {"id": 2, "data": "22", "x": 6, 'data2': 'hi'},
            {"id": 3, "data": "8.5", "x": 7, 'data2': 'hi'},
            {"id": 4, "data": "9.46", "x": 8, 'data2': 'hi'},
            {"id": 5, "data": "d5", "x": 9, 'data2': 'hi'}
        ])

    def test_add_column_recreate(self):
        with self.op.batch_alter_table("foo", recreate='always') as batch_op:
            batch_op.add_column(
                Column('data2', String(50), server_default='hi'))

        self._assert_data([
            {"id": 1, "data": "d1", "x": 5, 'data2': 'hi'},
            {"id": 2, "data": "22", "x": 6, 'data2': 'hi'},
            {"id": 3, "data": "8.5", "x": 7, 'data2': 'hi'},
            {"id": 4, "data": "9.46", "x": 8, 'data2': 'hi'},
            {"id": 5, "data": "d5", "x": 9, 'data2': 'hi'}
        ])
Esempio n. 16
0
class BatchRoundTripTest(TestBase):
    __requires__ = ('sqlalchemy_08', )
    __only_on__ = "sqlite"

    def setUp(self):
        self.conn = config.db.connect()
        self.metadata = MetaData()
        t1 = Table(
            'foo', self.metadata,
            Column('id', Integer, primary_key=True),
            Column('data', String(50)),
            Column('x', Integer),
            mysql_engine='InnoDB'
        )
        t1.create(self.conn)

        self.conn.execute(
            t1.insert(),
            [
                {"id": 1, "data": "d1", "x": 5},
                {"id": 2, "data": "22", "x": 6},
                {"id": 3, "data": "8.5", "x": 7},
                {"id": 4, "data": "9.46", "x": 8},
                {"id": 5, "data": "d5", "x": 9}
            ]
        )
        context = MigrationContext.configure(self.conn)
        self.op = Operations(context)

    def tearDown(self):
        self.metadata.drop_all(self.conn)
        self.conn.close()

    def _assert_data(self, data, tablename='foo'):
        eq_(
            [dict(row) for row
             in self.conn.execute("select * from %s" % tablename)],
            data
        )

    def test_fk_points_to_me_auto(self):
        self._test_fk_points_to_me("auto")

    # in particular, this tests that the failures
    # on PG and MySQL result in recovery of the batch system,
    # e.g. that the _alembic_batch_temp table is dropped
    @config.requirements.no_referential_integrity
    def test_fk_points_to_me_recreate(self):
        self._test_fk_points_to_me("always")

    def _test_fk_points_to_me(self, recreate):
        bar = Table(
            'bar', self.metadata,
            Column('id', Integer, primary_key=True),
            Column('foo_id', Integer, ForeignKey('foo.id')),
            mysql_engine='InnoDB'
        )
        bar.create(self.conn)
        self.conn.execute(bar.insert(), {'id': 1, 'foo_id': 3})

        with self.op.batch_alter_table("foo", recreate=recreate) as batch_op:
            batch_op.alter_column(
                'data', new_column_name='newdata', existing_type=String(50))

    def test_change_type(self):
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.alter_column('data', type_=Integer)

        self._assert_data([
            {"id": 1, "data": 0, "x": 5},
            {"id": 2, "data": 22, "x": 6},
            {"id": 3, "data": 8, "x": 7},
            {"id": 4, "data": 9, "x": 8},
            {"id": 5, "data": 0, "x": 9}
        ])

    def test_drop_column(self):
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.drop_column('data')

        self._assert_data([
            {"id": 1, "x": 5},
            {"id": 2, "x": 6},
            {"id": 3, "x": 7},
            {"id": 4, "x": 8},
            {"id": 5, "x": 9}
        ])

    @config.requirements.sqlalchemy_094
    @config.requirements.unnamed_constraints
    def test_drop_foreign_key(self):
        bar = Table(
            'bar', self.metadata,
            Column('id', Integer, primary_key=True),
            Column('foo_id', Integer, ForeignKey('foo.id')),
            mysql_engine='InnoDB'
        )
        bar.create(self.conn)
        self.conn.execute(bar.insert(), {'id': 1, 'foo_id': 3})

        naming_convention = {
            "fk":
            "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
        }
        with self.op.batch_alter_table(
                "bar", naming_convention=naming_convention) as batch_op:
            batch_op.drop_constraint(
                "fk_bar_foo_id_foo", type_="foreignkey")
        eq_(
            Inspector.from_engine(self.conn).get_foreign_keys('bar'),
            []
        )

    def test_drop_column_fk_recreate(self):
        with self.op.batch_alter_table("foo", recreate='always') as batch_op:
            batch_op.drop_column('data')

        self._assert_data([
            {"id": 1, "x": 5},
            {"id": 2, "x": 6},
            {"id": 3, "x": 7},
            {"id": 4, "x": 8},
            {"id": 5, "x": 9}
        ])

    def test_rename_column(self):
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.alter_column('x', new_column_name='y')

        self._assert_data([
            {"id": 1, "data": "d1", "y": 5},
            {"id": 2, "data": "22", "y": 6},
            {"id": 3, "data": "8.5", "y": 7},
            {"id": 4, "data": "9.46", "y": 8},
            {"id": 5, "data": "d5", "y": 9}
        ])

    def test_rename_column_boolean(self):
        bar = Table(
            'bar', self.metadata,
            Column('id', Integer, primary_key=True),
            Column('flag', Boolean()),
            mysql_engine='InnoDB'
        )
        bar.create(self.conn)
        self.conn.execute(bar.insert(), {'id': 1, 'flag': True})
        self.conn.execute(bar.insert(), {'id': 2, 'flag': False})

        with self.op.batch_alter_table(
            "bar"
        ) as batch_op:
            batch_op.alter_column(
                'flag', new_column_name='bflag', existing_type=Boolean)

        self._assert_data([
            {"id": 1, 'bflag': True},
            {"id": 2, 'bflag': False},
        ], 'bar')

    @config.requirements.non_native_boolean
    def test_rename_column_non_native_boolean_no_ck(self):
        bar = Table(
            'bar', self.metadata,
            Column('id', Integer, primary_key=True),
            Column('flag', Boolean(create_constraint=False)),
            mysql_engine='InnoDB'
        )
        bar.create(self.conn)
        self.conn.execute(bar.insert(), {'id': 1, 'flag': True})
        self.conn.execute(bar.insert(), {'id': 2, 'flag': False})
        self.conn.execute(bar.insert(), {'id': 3, 'flag': 5})

        with self.op.batch_alter_table(
            "bar",
            reflect_args=[Column('flag', Boolean(create_constraint=False))]
        ) as batch_op:
            batch_op.alter_column(
                'flag', new_column_name='bflag', existing_type=Boolean)

        self._assert_data([
            {"id": 1, 'bflag': True},
            {"id": 2, 'bflag': False},
            {'id': 3, 'bflag': 5}
        ], 'bar')

    def test_drop_column_pk(self):
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.drop_column('id')

        self._assert_data([
            {"data": "d1", "x": 5},
            {"data": "22", "x": 6},
            {"data": "8.5", "x": 7},
            {"data": "9.46", "x": 8},
            {"data": "d5", "x": 9}
        ])

    def test_rename_column_pk(self):
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.alter_column('id', new_column_name='ident')

        self._assert_data([
            {"ident": 1, "data": "d1", "x": 5},
            {"ident": 2, "data": "22", "x": 6},
            {"ident": 3, "data": "8.5", "x": 7},
            {"ident": 4, "data": "9.46", "x": 8},
            {"ident": 5, "data": "d5", "x": 9}
        ])

    def test_add_column_auto(self):
        # note this uses ALTER
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.add_column(
                Column('data2', String(50), server_default='hi'))

        self._assert_data([
            {"id": 1, "data": "d1", "x": 5, 'data2': 'hi'},
            {"id": 2, "data": "22", "x": 6, 'data2': 'hi'},
            {"id": 3, "data": "8.5", "x": 7, 'data2': 'hi'},
            {"id": 4, "data": "9.46", "x": 8, 'data2': 'hi'},
            {"id": 5, "data": "d5", "x": 9, 'data2': 'hi'}
        ])

    def test_add_column_recreate(self):
        with self.op.batch_alter_table("foo", recreate='always') as batch_op:
            batch_op.add_column(
                Column('data2', String(50), server_default='hi'))

        self._assert_data([
            {"id": 1, "data": "d1", "x": 5, 'data2': 'hi'},
            {"id": 2, "data": "22", "x": 6, 'data2': 'hi'},
            {"id": 3, "data": "8.5", "x": 7, 'data2': 'hi'},
            {"id": 4, "data": "9.46", "x": 8, 'data2': 'hi'},
            {"id": 5, "data": "d5", "x": 9, 'data2': 'hi'}
        ])

    def test_create_drop_index(self):
        insp = Inspector.from_engine(config.db)
        eq_(
            insp.get_indexes('foo'), []
        )

        with self.op.batch_alter_table("foo", recreate='always') as batch_op:
            batch_op.create_index(
                'ix_data', ['data'], unique=True)

        self._assert_data([
            {"id": 1, "data": "d1", "x": 5},
            {"id": 2, "data": "22", "x": 6},
            {"id": 3, "data": "8.5", "x": 7},
            {"id": 4, "data": "9.46", "x": 8},
            {"id": 5, "data": "d5", "x": 9}
        ])

        insp = Inspector.from_engine(config.db)
        eq_(
            [
                dict(unique=ix['unique'],
                     name=ix['name'],
                     column_names=ix['column_names'])
                for ix in insp.get_indexes('foo')
            ],
            [{'unique': True, 'name': 'ix_data', 'column_names': ['data']}]
        )

        with self.op.batch_alter_table("foo", recreate='always') as batch_op:
            batch_op.drop_index('ix_data')

        insp = Inspector.from_engine(config.db)
        eq_(
            insp.get_indexes('foo'), []
        )
Esempio n. 17
0
class BatchRoundTripTest(TestBase):
    __requires__ = ('sqlalchemy_08', )
    __only_on__ = "sqlite"

    def setUp(self):
        self.conn = config.db.connect()
        self.metadata = MetaData()
        t1 = Table('foo',
                   self.metadata,
                   Column('id', Integer, primary_key=True),
                   Column('data', String(50)),
                   Column('x', Integer),
                   mysql_engine='InnoDB')
        t1.create(self.conn)

        self.conn.execute(t1.insert(), [{
            "id": 1,
            "data": "d1",
            "x": 5
        }, {
            "id": 2,
            "data": "22",
            "x": 6
        }, {
            "id": 3,
            "data": "8.5",
            "x": 7
        }, {
            "id": 4,
            "data": "9.46",
            "x": 8
        }, {
            "id": 5,
            "data": "d5",
            "x": 9
        }])
        context = MigrationContext.configure(self.conn)
        self.op = Operations(context)

    @contextmanager
    def _sqlite_referential_integrity(self):
        self.conn.execute("PRAGMA foreign_keys=ON")
        try:
            yield
        finally:
            self.conn.execute("PRAGMA foreign_keys=OFF")

    def _no_pk_fixture(self):
        nopk = Table('nopk',
                     self.metadata,
                     Column('a', Integer),
                     Column('b', Integer),
                     Column('c', Integer),
                     mysql_engine='InnoDB')
        nopk.create(self.conn)
        self.conn.execute(nopk.insert(), [
            {
                "a": 1,
                "b": 2,
                "c": 3
            },
            {
                "a": 2,
                "b": 4,
                "c": 5
            },
        ])
        return nopk

    def _table_w_index_fixture(self):
        t = Table(
            't_w_ix',
            self.metadata,
            Column('id', Integer, primary_key=True),
            Column('thing', Integer),
            Column('data', String(20)),
        )
        Index('ix_thing', t.c.thing)
        t.create(self.conn)
        return t

    def _boolean_fixture(self):
        t = Table('hasbool', self.metadata,
                  Column('x', Boolean(create_constraint=True, name='ck1')),
                  Column('y', Integer))
        t.create(self.conn)

    def _int_to_boolean_fixture(self):
        t = Table('hasbool', self.metadata, Column('x', Integer))
        t.create(self.conn)

    def test_change_type_boolean_to_int(self):
        self._boolean_fixture()
        with self.op.batch_alter_table("hasbool") as batch_op:
            batch_op.alter_column('x',
                                  type_=Integer,
                                  existing_type=Boolean(create_constraint=True,
                                                        name='ck1'))
        insp = Inspector.from_engine(config.db)

        eq_([
            c['type']._type_affinity
            for c in insp.get_columns('hasbool') if c['name'] == 'x'
        ], [Integer])

    def test_drop_col_schematype(self):
        self._boolean_fixture()
        with self.op.batch_alter_table("hasbool") as batch_op:
            batch_op.drop_column('x')
        insp = Inspector.from_engine(config.db)

        assert 'x' not in (c['name'] for c in insp.get_columns('hasbool'))

    def test_change_type_int_to_boolean(self):
        self._int_to_boolean_fixture()
        with self.op.batch_alter_table("hasbool") as batch_op:
            batch_op.alter_column('x',
                                  type_=Boolean(create_constraint=True,
                                                name='ck1'))
        insp = Inspector.from_engine(config.db)

        if exclusions.against(config, "sqlite"):
            eq_([
                c['type']._type_affinity
                for c in insp.get_columns('hasbool') if c['name'] == 'x'
            ], [Boolean])
        elif exclusions.against(config, "mysql"):
            eq_([
                c['type']._type_affinity
                for c in insp.get_columns('hasbool') if c['name'] == 'x'
            ], [Integer])

    def tearDown(self):
        self.metadata.drop_all(self.conn)
        self.conn.close()

    def _assert_data(self, data, tablename='foo'):
        eq_([
            dict(row)
            for row in self.conn.execute("select * from %s" % tablename)
        ], data)

    def test_ix_existing(self):
        self._table_w_index_fixture()

        with self.op.batch_alter_table("t_w_ix") as batch_op:
            batch_op.alter_column('data', type_=String(30))
            batch_op.create_index("ix_data", ["data"])

        insp = Inspector.from_engine(config.db)
        eq_(
            set((ix['name'], tuple(ix['column_names']))
                for ix in insp.get_indexes('t_w_ix')),
            set([('ix_data', ('data', )), ('ix_thing', ('thing', ))]))

    def test_fk_points_to_me_auto(self):
        self._test_fk_points_to_me("auto")

    # in particular, this tests that the failures
    # on PG and MySQL result in recovery of the batch system,
    # e.g. that the _alembic_batch_temp table is dropped
    @config.requirements.no_referential_integrity
    def test_fk_points_to_me_recreate(self):
        self._test_fk_points_to_me("always")

    @exclusions.only_on("sqlite")
    @exclusions.fails("intentionally asserting that this "
                      "doesn't work w/ pragma foreign keys")
    def test_fk_points_to_me_sqlite_refinteg(self):
        with self._sqlite_referential_integrity():
            self._test_fk_points_to_me("auto")

    def _test_fk_points_to_me(self, recreate):
        bar = Table('bar',
                    self.metadata,
                    Column('id', Integer, primary_key=True),
                    Column('foo_id', Integer, ForeignKey('foo.id')),
                    mysql_engine='InnoDB')
        bar.create(self.conn)
        self.conn.execute(bar.insert(), {'id': 1, 'foo_id': 3})

        with self.op.batch_alter_table("foo", recreate=recreate) as batch_op:
            batch_op.alter_column('data',
                                  new_column_name='newdata',
                                  existing_type=String(50))

        insp = Inspector.from_engine(self.conn)
        eq_([(key['referred_table'], key['referred_columns'],
              key['constrained_columns'])
             for key in insp.get_foreign_keys('bar')],
            [('foo', ['id'], ['foo_id'])])

    def test_selfref_fk_auto(self):
        self._test_selfref_fk("auto")

    @config.requirements.no_referential_integrity
    def test_selfref_fk_recreate(self):
        self._test_selfref_fk("always")

    @exclusions.only_on("sqlite")
    @exclusions.fails("intentionally asserting that this "
                      "doesn't work w/ pragma foreign keys")
    def test_selfref_fk_sqlite_refinteg(self):
        with self._sqlite_referential_integrity():
            self._test_selfref_fk("auto")

    def _test_selfref_fk(self, recreate):
        bar = Table('bar',
                    self.metadata,
                    Column('id', Integer, primary_key=True),
                    Column('bar_id', Integer, ForeignKey('bar.id')),
                    Column('data', String(50)),
                    mysql_engine='InnoDB')
        bar.create(self.conn)
        self.conn.execute(bar.insert(), {'id': 1, 'data': 'x', 'bar_id': None})
        self.conn.execute(bar.insert(), {'id': 2, 'data': 'y', 'bar_id': 1})

        with self.op.batch_alter_table("bar", recreate=recreate) as batch_op:
            batch_op.alter_column('data',
                                  new_column_name='newdata',
                                  existing_type=String(50))

        insp = Inspector.from_engine(self.conn)

        insp = Inspector.from_engine(self.conn)
        eq_([(key['referred_table'], key['referred_columns'],
              key['constrained_columns'])
             for key in insp.get_foreign_keys('bar')],
            [('bar', ['id'], ['bar_id'])])

    def test_change_type(self):
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.alter_column('data', type_=Integer)

        self._assert_data([{
            "id": 1,
            "data": 0,
            "x": 5
        }, {
            "id": 2,
            "data": 22,
            "x": 6
        }, {
            "id": 3,
            "data": 8,
            "x": 7
        }, {
            "id": 4,
            "data": 9,
            "x": 8
        }, {
            "id": 5,
            "data": 0,
            "x": 9
        }])

    def test_drop_column(self):
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.drop_column('data')

        self._assert_data([{
            "id": 1,
            "x": 5
        }, {
            "id": 2,
            "x": 6
        }, {
            "id": 3,
            "x": 7
        }, {
            "id": 4,
            "x": 8
        }, {
            "id": 5,
            "x": 9
        }])

    def test_add_pk_constraint(self):
        self._no_pk_fixture()
        with self.op.batch_alter_table("nopk", recreate="always") as batch_op:
            batch_op.create_primary_key('newpk', ['a', 'b'])

        pk_const = Inspector.from_engine(self.conn).get_pk_constraint('nopk')
        with config.requirements.reflects_pk_names.fail_if():
            eq_(pk_const['name'], 'newpk')
        eq_(pk_const['constrained_columns'], ['a', 'b'])

    @config.requirements.check_constraints_w_enforcement
    def test_add_ck_constraint(self):
        with self.op.batch_alter_table("foo", recreate="always") as batch_op:
            batch_op.create_check_constraint("newck", text("x > 0"))

        # we dont support reflection of CHECK constraints
        # so test this by just running invalid data in
        foo = self.metadata.tables['foo']

        assert_raises_message(exc.IntegrityError, "newck", self.conn.execute,
                              foo.insert(), {
                                  "id": 6,
                                  "data": 5,
                                  "x": -2
                              })

    @config.requirements.sqlalchemy_094
    @config.requirements.unnamed_constraints
    def test_drop_foreign_key(self):
        bar = Table('bar',
                    self.metadata,
                    Column('id', Integer, primary_key=True),
                    Column('foo_id', Integer, ForeignKey('foo.id')),
                    mysql_engine='InnoDB')
        bar.create(self.conn)
        self.conn.execute(bar.insert(), {'id': 1, 'foo_id': 3})

        naming_convention = {
            "fk":
            "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
        }
        with self.op.batch_alter_table(
                "bar", naming_convention=naming_convention) as batch_op:
            batch_op.drop_constraint("fk_bar_foo_id_foo", type_="foreignkey")
        eq_(Inspector.from_engine(self.conn).get_foreign_keys('bar'), [])

    def test_drop_column_fk_recreate(self):
        with self.op.batch_alter_table("foo", recreate='always') as batch_op:
            batch_op.drop_column('data')

        self._assert_data([{
            "id": 1,
            "x": 5
        }, {
            "id": 2,
            "x": 6
        }, {
            "id": 3,
            "x": 7
        }, {
            "id": 4,
            "x": 8
        }, {
            "id": 5,
            "x": 9
        }])

    def test_rename_column(self):
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.alter_column('x', new_column_name='y')

        self._assert_data([{
            "id": 1,
            "data": "d1",
            "y": 5
        }, {
            "id": 2,
            "data": "22",
            "y": 6
        }, {
            "id": 3,
            "data": "8.5",
            "y": 7
        }, {
            "id": 4,
            "data": "9.46",
            "y": 8
        }, {
            "id": 5,
            "data": "d5",
            "y": 9
        }])

    def test_rename_column_boolean(self):
        bar = Table('bar',
                    self.metadata,
                    Column('id', Integer, primary_key=True),
                    Column('flag', Boolean()),
                    mysql_engine='InnoDB')
        bar.create(self.conn)
        self.conn.execute(bar.insert(), {'id': 1, 'flag': True})
        self.conn.execute(bar.insert(), {'id': 2, 'flag': False})

        with self.op.batch_alter_table("bar") as batch_op:
            batch_op.alter_column('flag',
                                  new_column_name='bflag',
                                  existing_type=Boolean)

        self._assert_data([
            {
                "id": 1,
                'bflag': True
            },
            {
                "id": 2,
                'bflag': False
            },
        ], 'bar')

    @config.requirements.non_native_boolean
    def test_rename_column_non_native_boolean_no_ck(self):
        bar = Table('bar',
                    self.metadata,
                    Column('id', Integer, primary_key=True),
                    Column('flag', Boolean(create_constraint=False)),
                    mysql_engine='InnoDB')
        bar.create(self.conn)
        self.conn.execute(bar.insert(), {'id': 1, 'flag': True})
        self.conn.execute(bar.insert(), {'id': 2, 'flag': False})
        self.conn.execute(bar.insert(), {'id': 3, 'flag': 5})

        with self.op.batch_alter_table(
                "bar",
                reflect_args=[
                    Column('flag', Boolean(create_constraint=False))
                ]) as batch_op:
            batch_op.alter_column('flag',
                                  new_column_name='bflag',
                                  existing_type=Boolean)

        self._assert_data([{
            "id": 1,
            'bflag': True
        }, {
            "id": 2,
            'bflag': False
        }, {
            'id': 3,
            'bflag': 5
        }], 'bar')

    def test_drop_column_pk(self):
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.drop_column('id')

        self._assert_data([{
            "data": "d1",
            "x": 5
        }, {
            "data": "22",
            "x": 6
        }, {
            "data": "8.5",
            "x": 7
        }, {
            "data": "9.46",
            "x": 8
        }, {
            "data": "d5",
            "x": 9
        }])

    def test_rename_column_pk(self):
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.alter_column('id', new_column_name='ident')

        self._assert_data([{
            "ident": 1,
            "data": "d1",
            "x": 5
        }, {
            "ident": 2,
            "data": "22",
            "x": 6
        }, {
            "ident": 3,
            "data": "8.5",
            "x": 7
        }, {
            "ident": 4,
            "data": "9.46",
            "x": 8
        }, {
            "ident": 5,
            "data": "d5",
            "x": 9
        }])

    def test_add_column_auto(self):
        # note this uses ALTER
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.add_column(
                Column('data2', String(50), server_default='hi'))

        self._assert_data([{
            "id": 1,
            "data": "d1",
            "x": 5,
            'data2': 'hi'
        }, {
            "id": 2,
            "data": "22",
            "x": 6,
            'data2': 'hi'
        }, {
            "id": 3,
            "data": "8.5",
            "x": 7,
            'data2': 'hi'
        }, {
            "id": 4,
            "data": "9.46",
            "x": 8,
            'data2': 'hi'
        }, {
            "id": 5,
            "data": "d5",
            "x": 9,
            'data2': 'hi'
        }])

    def test_add_column_recreate(self):
        with self.op.batch_alter_table("foo", recreate='always') as batch_op:
            batch_op.add_column(
                Column('data2', String(50), server_default='hi'))

        self._assert_data([{
            "id": 1,
            "data": "d1",
            "x": 5,
            'data2': 'hi'
        }, {
            "id": 2,
            "data": "22",
            "x": 6,
            'data2': 'hi'
        }, {
            "id": 3,
            "data": "8.5",
            "x": 7,
            'data2': 'hi'
        }, {
            "id": 4,
            "data": "9.46",
            "x": 8,
            'data2': 'hi'
        }, {
            "id": 5,
            "data": "d5",
            "x": 9,
            'data2': 'hi'
        }])

    def test_create_drop_index(self):
        insp = Inspector.from_engine(config.db)
        eq_(insp.get_indexes('foo'), [])

        with self.op.batch_alter_table("foo", recreate='always') as batch_op:
            batch_op.create_index('ix_data', ['data'], unique=True)

        self._assert_data([{
            "id": 1,
            "data": "d1",
            "x": 5
        }, {
            "id": 2,
            "data": "22",
            "x": 6
        }, {
            "id": 3,
            "data": "8.5",
            "x": 7
        }, {
            "id": 4,
            "data": "9.46",
            "x": 8
        }, {
            "id": 5,
            "data": "d5",
            "x": 9
        }])

        insp = Inspector.from_engine(config.db)
        eq_([
            dict(unique=ix['unique'],
                 name=ix['name'],
                 column_names=ix['column_names'])
            for ix in insp.get_indexes('foo')
        ], [{
            'unique': True,
            'name': 'ix_data',
            'column_names': ['data']
        }])

        with self.op.batch_alter_table("foo", recreate='always') as batch_op:
            batch_op.drop_index('ix_data')

        insp = Inspector.from_engine(config.db)
        eq_(insp.get_indexes('foo'), [])
Esempio n. 18
0
class CopyFromTest(TestBase):
    __requires__ = ('sqlalchemy_08', )

    def _fixture(self):
        self.metadata = MetaData()
        self.table = Table(
            'foo',
            self.metadata,
            Column('id', Integer, primary_key=True),
            Column('data', String(50)),
            Column('x', Integer),
        )

        context = op_fixture(dialect="sqlite", as_sql=True)
        self.op = Operations(context)
        return context

    def test_change_type(self):
        context = self._fixture()
        with self.op.batch_alter_table("foo",
                                       copy_from=self.table) as batch_op:
            batch_op.alter_column('data', type_=Integer)

        context.assert_(
            'CREATE TABLE _alembic_batch_temp (id INTEGER NOT NULL, '
            'data INTEGER, x INTEGER, PRIMARY KEY (id))',
            'INSERT INTO _alembic_batch_temp (id, data, x) SELECT foo.id, '
            'CAST(foo.data AS INTEGER) AS anon_1, foo.x FROM foo',
            'DROP TABLE foo', 'ALTER TABLE _alembic_batch_temp RENAME TO foo')

    def test_change_type_from_schematype(self):
        context = self._fixture()
        self.table.append_column(
            Column('y', Boolean(create_constraint=True, name="ck1")))

        with self.op.batch_alter_table("foo",
                                       copy_from=self.table) as batch_op:
            batch_op.alter_column('y',
                                  type_=Integer,
                                  existing_type=Boolean(create_constraint=True,
                                                        name="ck1"))
        context.assert_(
            'CREATE TABLE _alembic_batch_temp (id INTEGER NOT NULL, '
            'data VARCHAR(50), x INTEGER, y INTEGER, PRIMARY KEY (id))',
            'INSERT INTO _alembic_batch_temp (id, data, x, y) SELECT foo.id, '
            'foo.data, foo.x, CAST(foo.y AS INTEGER) AS anon_1 FROM foo',
            'DROP TABLE foo', 'ALTER TABLE _alembic_batch_temp RENAME TO foo')

    def test_change_type_to_schematype(self):
        context = self._fixture()
        self.table.append_column(Column('y', Integer))

        with self.op.batch_alter_table("foo",
                                       copy_from=self.table) as batch_op:
            batch_op.alter_column('y',
                                  existing_type=Integer,
                                  type_=Boolean(create_constraint=True,
                                                name="ck1"))
        context.assert_(
            'CREATE TABLE _alembic_batch_temp (id INTEGER NOT NULL, '
            'data VARCHAR(50), x INTEGER, y BOOLEAN, PRIMARY KEY (id), '
            'CONSTRAINT ck1 CHECK (y IN (0, 1)))',
            'INSERT INTO _alembic_batch_temp (id, data, x, y) SELECT foo.id, '
            'foo.data, foo.x, CAST(foo.y AS BOOLEAN) AS anon_1 FROM foo',
            'DROP TABLE foo', 'ALTER TABLE _alembic_batch_temp RENAME TO foo')

    def test_create_drop_index_w_always(self):
        context = self._fixture()
        with self.op.batch_alter_table("foo",
                                       copy_from=self.table,
                                       recreate='always') as batch_op:
            batch_op.create_index('ix_data', ['data'], unique=True)

        context.assert_(
            'CREATE TABLE _alembic_batch_temp (id INTEGER NOT NULL, '
            'data VARCHAR(50), '
            'x INTEGER, PRIMARY KEY (id))',
            'INSERT INTO _alembic_batch_temp (id, data, x) '
            'SELECT foo.id, foo.data, foo.x FROM foo',
            'DROP TABLE foo',
            'ALTER TABLE _alembic_batch_temp RENAME TO foo',
            'CREATE UNIQUE INDEX ix_data ON foo (data)',
        )

        context.clear_assertions()

        Index('ix_data', self.table.c.data, unique=True)
        with self.op.batch_alter_table("foo",
                                       copy_from=self.table,
                                       recreate='always') as batch_op:
            batch_op.drop_index('ix_data')

        context.assert_(
            'CREATE TABLE _alembic_batch_temp (id INTEGER NOT NULL, '
            'data VARCHAR(50), x INTEGER, PRIMARY KEY (id))',
            'INSERT INTO _alembic_batch_temp (id, data, x) '
            'SELECT foo.id, foo.data, foo.x FROM foo', 'DROP TABLE foo',
            'ALTER TABLE _alembic_batch_temp RENAME TO foo')

    def test_create_drop_index_wo_always(self):
        context = self._fixture()
        with self.op.batch_alter_table("foo",
                                       copy_from=self.table) as batch_op:
            batch_op.create_index('ix_data', ['data'], unique=True)

        context.assert_('CREATE UNIQUE INDEX ix_data ON foo (data)')

        context.clear_assertions()

        Index('ix_data', self.table.c.data, unique=True)
        with self.op.batch_alter_table("foo",
                                       copy_from=self.table) as batch_op:
            batch_op.drop_index('ix_data')

        context.assert_('DROP INDEX ix_data')

    def test_create_drop_index_w_other_ops(self):
        context = self._fixture()
        with self.op.batch_alter_table("foo",
                                       copy_from=self.table) as batch_op:
            batch_op.alter_column('data', type_=Integer)
            batch_op.create_index('ix_data', ['data'], unique=True)

        context.assert_(
            'CREATE TABLE _alembic_batch_temp (id INTEGER NOT NULL, '
            'data INTEGER, x INTEGER, PRIMARY KEY (id))',
            'INSERT INTO _alembic_batch_temp (id, data, x) SELECT foo.id, '
            'CAST(foo.data AS INTEGER) AS anon_1, foo.x FROM foo',
            'DROP TABLE foo',
            'ALTER TABLE _alembic_batch_temp RENAME TO foo',
            'CREATE UNIQUE INDEX ix_data ON foo (data)',
        )

        context.clear_assertions()

        Index('ix_data', self.table.c.data, unique=True)
        with self.op.batch_alter_table("foo",
                                       copy_from=self.table) as batch_op:
            batch_op.drop_index('ix_data')
            batch_op.alter_column('data', type_=String)

        context.assert_(
            'CREATE TABLE _alembic_batch_temp (id INTEGER NOT NULL, '
            'data VARCHAR, x INTEGER, PRIMARY KEY (id))',
            'INSERT INTO _alembic_batch_temp (id, data, x) SELECT foo.id, '
            'CAST(foo.data AS VARCHAR) AS anon_1, foo.x FROM foo',
            'DROP TABLE foo', 'ALTER TABLE _alembic_batch_temp RENAME TO foo')
Esempio n. 19
0
class BatchRoundTripTest(TestBase):
    __requires__ = ('sqlalchemy_08', )
    __only_on__ = "sqlite"

    def setUp(self):
        self.conn = config.db.connect()
        self.metadata = MetaData()
        t1 = Table('foo',
                   self.metadata,
                   Column('id', Integer, primary_key=True),
                   Column('data', String(50)),
                   Column('x', Integer),
                   mysql_engine='InnoDB')
        t1.create(self.conn)

        self.conn.execute(t1.insert(), [{
            "id": 1,
            "data": "d1",
            "x": 5
        }, {
            "id": 2,
            "data": "22",
            "x": 6
        }, {
            "id": 3,
            "data": "8.5",
            "x": 7
        }, {
            "id": 4,
            "data": "9.46",
            "x": 8
        }, {
            "id": 5,
            "data": "d5",
            "x": 9
        }])
        context = MigrationContext.configure(self.conn)
        self.op = Operations(context)

    def tearDown(self):
        self.metadata.drop_all(self.conn)
        self.conn.close()

    def _assert_data(self, data):
        eq_([dict(row) for row in self.conn.execute("select * from foo")],
            data)

    def test_fk_points_to_me_auto(self):
        self._test_fk_points_to_me("auto")

    # in particular, this tests that the failures
    # on PG and MySQL result in recovery of the batch system,
    # e.g. that the _alembic_batch_temp table is dropped
    @config.requirements.no_referential_integrity
    def test_fk_points_to_me_recreate(self):
        self._test_fk_points_to_me("always")

    def _test_fk_points_to_me(self, recreate):
        bar = Table('bar',
                    self.metadata,
                    Column('id', Integer, primary_key=True),
                    Column('foo_id', Integer, ForeignKey('foo.id')),
                    mysql_engine='InnoDB')
        bar.create(self.conn)
        self.conn.execute(bar.insert(), {'id': 1, 'foo_id': 3})

        with self.op.batch_alter_table("foo", recreate=recreate) as batch_op:
            batch_op.alter_column('data',
                                  new_column_name='newdata',
                                  existing_type=String(50))

    def test_change_type(self):
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.alter_column('data', type_=Integer)

        self._assert_data([{
            "id": 1,
            "data": 0,
            "x": 5
        }, {
            "id": 2,
            "data": 22,
            "x": 6
        }, {
            "id": 3,
            "data": 8,
            "x": 7
        }, {
            "id": 4,
            "data": 9,
            "x": 8
        }, {
            "id": 5,
            "data": 0,
            "x": 9
        }])

    def test_drop_column(self):
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.drop_column('data')

        self._assert_data([{
            "id": 1,
            "x": 5
        }, {
            "id": 2,
            "x": 6
        }, {
            "id": 3,
            "x": 7
        }, {
            "id": 4,
            "x": 8
        }, {
            "id": 5,
            "x": 9
        }])

    def test_drop_column_fk_recreate(self):
        with self.op.batch_alter_table("foo", recreate='always') as batch_op:
            batch_op.drop_column('data')

        self._assert_data([{
            "id": 1,
            "x": 5
        }, {
            "id": 2,
            "x": 6
        }, {
            "id": 3,
            "x": 7
        }, {
            "id": 4,
            "x": 8
        }, {
            "id": 5,
            "x": 9
        }])

    def test_rename_column(self):
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.alter_column('x', new_column_name='y')

        self._assert_data([{
            "id": 1,
            "data": "d1",
            "y": 5
        }, {
            "id": 2,
            "data": "22",
            "y": 6
        }, {
            "id": 3,
            "data": "8.5",
            "y": 7
        }, {
            "id": 4,
            "data": "9.46",
            "y": 8
        }, {
            "id": 5,
            "data": "d5",
            "y": 9
        }])

    def test_drop_column_pk(self):
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.drop_column('id')

        self._assert_data([{
            "data": "d1",
            "x": 5
        }, {
            "data": "22",
            "x": 6
        }, {
            "data": "8.5",
            "x": 7
        }, {
            "data": "9.46",
            "x": 8
        }, {
            "data": "d5",
            "x": 9
        }])

    def test_rename_column_pk(self):
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.alter_column('id', new_column_name='ident')

        self._assert_data([{
            "ident": 1,
            "data": "d1",
            "x": 5
        }, {
            "ident": 2,
            "data": "22",
            "x": 6
        }, {
            "ident": 3,
            "data": "8.5",
            "x": 7
        }, {
            "ident": 4,
            "data": "9.46",
            "x": 8
        }, {
            "ident": 5,
            "data": "d5",
            "x": 9
        }])

    def test_add_column_auto(self):
        # note this uses ALTER
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.add_column(
                Column('data2', String(50), server_default='hi'))

        self._assert_data([{
            "id": 1,
            "data": "d1",
            "x": 5,
            'data2': 'hi'
        }, {
            "id": 2,
            "data": "22",
            "x": 6,
            'data2': 'hi'
        }, {
            "id": 3,
            "data": "8.5",
            "x": 7,
            'data2': 'hi'
        }, {
            "id": 4,
            "data": "9.46",
            "x": 8,
            'data2': 'hi'
        }, {
            "id": 5,
            "data": "d5",
            "x": 9,
            'data2': 'hi'
        }])

    def test_add_column_recreate(self):
        with self.op.batch_alter_table("foo", recreate='always') as batch_op:
            batch_op.add_column(
                Column('data2', String(50), server_default='hi'))

        self._assert_data([{
            "id": 1,
            "data": "d1",
            "x": 5,
            'data2': 'hi'
        }, {
            "id": 2,
            "data": "22",
            "x": 6,
            'data2': 'hi'
        }, {
            "id": 3,
            "data": "8.5",
            "x": 7,
            'data2': 'hi'
        }, {
            "id": 4,
            "data": "9.46",
            "x": 8,
            'data2': 'hi'
        }, {
            "id": 5,
            "data": "d5",
            "x": 9,
            'data2': 'hi'
        }])