Esempio n. 1
0
def create_operations(ctx=None, **kwargs):
    """Create an alembic operations object."""
    if ctx is None:
        ctx = create_migration_ctx(**kwargs)
    operations = Operations(ctx)
    operations.has_table = has_table
    return operations
Esempio n. 2
0
def create_operations(ctx=None, **kwargs):
    """Create an alembic operations object."""
    if ctx is None:
        ctx = create_migration_ctx(**kwargs)
    operations = Operations(ctx)
    operations.has_table = has_table
    return operations
Esempio n. 3
0
    def setUp(self):
        self.conn = config.db.connect()
        self.metadata = MetaData()
        t1 = Table('foo',
                   self.metadata,
                   Column('id', Integer, primary_key=True),
                   Column('data', String(50)),
                   Column('x', Integer),
                   mysql_engine='InnoDB')
        t1.create(self.conn)

        self.conn.execute(t1.insert(), [{
            "id": 1,
            "data": "d1",
            "x": 5
        }, {
            "id": 2,
            "data": "22",
            "x": 6
        }, {
            "id": 3,
            "data": "8.5",
            "x": 7
        }, {
            "id": 4,
            "data": "9.46",
            "x": 8
        }, {
            "id": 5,
            "data": "d5",
            "x": 9
        }])
        context = MigrationContext.configure(self.conn)
        self.op = Operations(context)
Esempio n. 4
0
def upgrade(engine):
    session_maker = sessionmaker(bind=engine)
    session = session_maker()

    r = (session.query(model.ByteHubVersion).order_by(
        model.ByteHubVersion.timestamp.desc()).first())
    if not r:
        current_version = "0"
    else:
        current_version = r.version

    if parse(current_version) >= parse(version.__version__):
        # Up-to-date: nothing to do
        session.close()
        return

    if parse(current_version) < parse(version.__version__):
        print(f"Upgrading ByteHub database schema...")
        with engine.connect() as connection:
            context = MigrationContext.configure(connection)
            op = Operations(context)
            try:
                op.add_column("namespace", Column("backend", String(128)))
            except OperationalError:
                pass

    # Add version number
    obj = model.ByteHubVersion()
    obj.version = version.__version__
    session.add(obj)
    session.commit()

    session.close()
Esempio n. 5
0
    def add_new_columns(self):
        """
        Adds columns present in df but not in the SQL table
        for given instance of PandasSpecialEngine.

        Notes
        -----
        Sadly, it seems that we cannot create JSON columns.
        """
        # create deepcopies of the column because we are going to unbound
        # them for the table model (otherwise alembic would think we add
        # a column that already exists in the database)
        cols_to_add = [deepcopy(col) for col in self.table.columns
                       if col.name not in self.get_db_columns_names()]
        # check columns are not index levels
        if any((c.name in self.df.index.names for c in cols_to_add)):
            raise ValueError(('Cannot add any column that is part of the df index!\n'
                              "You'll have to update your table primary key or change your "
                              "df index"))
        
        with self.engine.connect() as con:
            ctx = MigrationContext.configure(con)
            op = Operations(ctx)
            for col in cols_to_add:
                col.table = None # Important! unbound column from table
                op.add_column(self.table.name, col, schema=self.schema)
                log(f"Added column {col} (type: {col.type}) in table {self.table.name} "
                    f'(schema="{self.schema}")')
Esempio n. 6
0
    def adapt_dtype_of_empty_db_columns(self):
        """
        Changes the data types of empty columns in the SQL table defined
        in given instance of a PandasSpecialEngine.

        This should only happen in case of data type mismatches.
        This means with columns for which the sqlalchemy table
        model for df and the model for the SQL table have different data types.
        """
        empty_db_columns = self.get_empty_columns()
        db_table = self.get_db_table_schema()
        # if column does not have value in db and there are values
        # in the frame then change the column type if needed
        for col in empty_db_columns:
            # check if the column also exists in df
            if col.name not in self.df.columns:
                continue
            # check same type
            orig_type = db_table.columns[col.name].type.compile(self.engine.dialect)
            dest_type = self.table.columns[col.name].type.compile(self.engine.dialect)
            # remove character count e.g. "VARCHAR(50)" -> "VARCHAR" 
            orig_type = RE_CHARCOUNT_COL_TYPE.sub('', orig_type)
            dest_type = RE_CHARCOUNT_COL_TYPE.sub('', dest_type)
            # if same type or we want to insert TEXT instead of JSON continue
            # (JSON is not supported on some DBs so it's normal to have TEXT instead)
            if ((orig_type == dest_type) or
                ((orig_type == 'JSON') and (dest_type == 'TEXT'))):
                continue
            # grab the col/index from the df
            # so we can check if there are any values
            if col.name in self.df.index.names:
                df_col = self.df.index.get_level_values(col.name)
            else:
                df_col = self.df[col.name]
            if df_col.notna().any():
                # raise error if we have to modify the dtype but we have a SQlite engine
                # (SQLite does not support data type alteration)
                if self._db_type == 'sqlite':
                    raise ValueError('SQlite does not support column data type alteration!')
                with self.engine.connect() as con:
                    ctx = MigrationContext.configure(con)
                    op = Operations(ctx)
                    new_col = self.table.columns[col.name]
                    # check if postgres (in which case we have to use "using" syntax
                    # to alter columns data types)
                    if self._db_type == 'postgres':
                        escaped_col = str(new_col.compile(dialect=self.engine.dialect))
                        compiled_type = new_col.type.compile(dialect=self.engine.dialect)
                        alter_kwargs = {'postgresql_using':f'{escaped_col}::{compiled_type}'}
                    else:
                        alter_kwargs = {}
                    op.alter_column(table_name=self.table.name,
                                    column_name=new_col.name,
                                    type_=new_col.type,
                                    schema=self.schema,
                                    **alter_kwargs)
                    log(f"Changed type of column {new_col.name} "
                        f"from {col.type} to {new_col.type} "
                        f'in table {self.table.name} (schema="{self.schema}")')
Esempio n. 7
0
def test_standalone_op():
    eng, buf = capture_db()

    env = MigrationContext.configure(eng)
    op = Operations(env)

    op.alter_column("t", "c", nullable=True)
    eq_(buf, ['ALTER TABLE t ALTER COLUMN c DROP NOT NULL'])
Esempio n. 8
0
    def alter_add_foreign_key_to_backtest_summary(self):
        ctx = MigrationContext.configure(self.db_client.connector)
        op = Operations(ctx)

        with op.batch_alter_table(
                self.backtest_management_table_name) as batch_op:
            batch_op.create_foreign_key("fk_management_summary",
                                        "backtest_summary",
                                        ["backtest_summary_id"], ["id"])
Esempio n. 9
0
    def _fixture(self):
        migration_context = mock.Mock(opts={})
        op = Operations(migration_context)
        batch = op.batch_alter_table('tname', recreate='never').__enter__()

        with mock.patch("alembic.operations.sa_schema") as mock_schema:
            yield batch
        batch.impl.flush()
        self.mock_schema = mock_schema
Esempio n. 10
0
    def _fixture(self):
        migration_context = mock.Mock(opts={})
        op = Operations(migration_context)
        batch = op.batch_alter_table('tname', recreate='never').__enter__()

        with mock.patch("alembic.operations.sa_schema") as mock_schema:
            yield batch
        batch.impl.flush()
        self.mock_schema = mock_schema
Esempio n. 11
0
File: sanity.py Progetto: kozec/cum
 def fix(self):
     context = MigrationContext.configure(self.parent.engine.connect())
     op = Operations(context)
     for table in self.parent.base.metadata.sorted_tables:
         if table.name == self.table:
             for column in table.columns:
                 if column.name == self.name:
                     with op.batch_alter_table(table.name) as batch_op:
                         batch_op.add_column(column.copy())
                     return
Esempio n. 12
0
 def fix(self):
     context = MigrationContext.configure(self.parent.engine.connect())
     op = Operations(context)
     for table in self.parent.base.metadata.sorted_tables:
         if table.name == self.table:
             for column in table.columns:
                 if column.name == self.name:
                     with op.batch_alter_table(table.name) as batch_op:
                         batch_op.add_column(column.copy())
                     return
Esempio n. 13
0
    def _fixture(self, schema=None):
        migration_context = mock.Mock(
            opts={}, impl=mock.MagicMock(__dialect__='sqlite'))
        op = Operations(migration_context)
        batch = op.batch_alter_table('tname', recreate='never',
                                     schema=schema).__enter__()

        mock_schema = mock.MagicMock()
        with mock.patch("alembic.operations.schemaobj.sa_schema", mock_schema):
            yield batch
        batch.impl.flush()
        self.mock_schema = mock_schema
Esempio n. 14
0
def add_columns(engine, raw_diffs, table_names):
    with engine.begin() as conn:
        ctx = get_migration_context(conn, table_names)
        op = Operations(ctx)
        columns = _get_columns_to_add(raw_diffs, table_names)

        for col in columns:
            table_name = col.table.name
            # the column has a reference to a table definition that already
            # has the column defined, so remove that and add the column
            col.table = None
            op.add_column(table_name, col)
Esempio n. 15
0
def change_column_nullable(table_oid, column_index, nullable, engine):
    table = tables.reflect_table_from_oid(table_oid, engine)
    column = table.columns[column_index]
    with engine.begin() as conn:
        ctx = MigrationContext.configure(conn)
        op = Operations(ctx)
        op.alter_column(table.name,
                        column.name,
                        nullable=nullable,
                        schema=table.schema)
    return tables.reflect_table_from_oid(table_oid,
                                         engine).columns[column_index]
Esempio n. 16
0
    def _alter_column_name(self, base, old_name, new_name):
        # NOTE: Create alembic connection and operation object! By John Doe

        db_conn = config.ENGINE.connect()
        alembic_ctx = MigrationContext.configure(db_conn)
        alembic_op = Operations(alembic_ctx)
        doc_table = get_doc_table(base.metadata.name, config.METADATA,
                                  **base.relational_fields)
        alembic_op.alter_column(doc_table.name,
                                old_name,
                                new_column_name=new_name)
        db_conn.close()
Esempio n. 17
0
def add_columns(engine, raw_diffs, table_names):
    with engine.begin() as conn:
        ctx = get_migration_context(conn, table_names)
        op = Operations(ctx)
        columns = _get_columns_to_add(raw_diffs, table_names)

        for col in columns:
            table_name = col.table.name
            # the column has a reference to a table definition that already
            # has the column defined, so remove that and add the column
            col.table = None
            op.add_column(table_name, col)
Esempio n. 18
0
def drop_column(
    engine,
    table_oid,
    column_index,
):
    column_index = int(column_index)
    table = tables.reflect_table_from_oid(table_oid, engine)
    column = table.columns[column_index]
    with engine.begin() as conn:
        ctx = MigrationContext.configure(conn)
        op = Operations(ctx)
        op.drop_column(table.name, column.name, schema=table.schema)
Esempio n. 19
0
    def _fixture(self, schema=None):
        migration_context = mock.Mock(
            opts={}, impl=mock.MagicMock(__dialect__='sqlite'))
        op = Operations(migration_context)
        batch = op.batch_alter_table(
            'tname', recreate='never', schema=schema).__enter__()

        mock_schema = mock.MagicMock()
        with mock.patch("alembic.operations.schemaobj.sa_schema", mock_schema):
            yield batch
        batch.impl.flush()
        self.mock_schema = mock_schema
Esempio n. 20
0
    def _fixture(self):
        self.metadata = MetaData()
        self.table = Table(
            'foo', self.metadata,
            Column('id', Integer, primary_key=True),
            Column('data', String(50)),
            Column('x', Integer),
        )

        context = op_fixture(dialect="sqlite", as_sql=True)
        self.op = Operations(context)
        return context
Esempio n. 21
0
def get_db_tables(conn):
    """Get current and default table values from the db.

    :param engine: Initialized alembic engine object.
    :type engine: object
    :returns: tuple
    """
    query = text("SELECT TABLE_NAME from information_schema.tables\
                  WHERE TABLE_NAME\
                  LIKE '%alembic_version%'\
                  AND table_schema = 'refstack'")
    context = alembic_migration.MigrationContext.configure(conn)
    op = Operations(context)
    connection = op.get_bind()
    search = connection.execute(query)
    result = search.fetchall()
    if isinstance(result, Iterable):
        result = [table[0] for table in result]
    else:
        result = None
    # if there is more than one version table, modify the
    # one that does not have the default name, because subunit2sql uses the
    # default name.
    if result:
        current_name =\
            next((table for table in result if table != "alembic_version"),
                 result[0])
        current_name = current_name.decode('utf-8')
        current_version = get_table_version(conn, current_name)
        default_name =\
            next((table for table in result
                  if table == "alembic_version"), None)
        default_version = get_table_version(conn, default_name)
        if len(result) > 1 and not current_version:
            if not default_name:
                # this is the case where there is more than one
                # nonstandard-named alembic table, and no default
                current_name = next(
                    (table for table in result if table != current_name),
                    result[0])
                current_name = current_name.decode('utf-8')
            elif current_name:
                # this is the case where the current-named table
                # exists, but is empty
                current_name = default_name
                current_version = default_version
        current_table = (current_name, current_version)
        default_table = (default_name, default_version)
    else:
        default_table = (None, None)
        current_table = default_table
    return current_table, default_table
Esempio n. 22
0
def get_db_tables(conn):
    """Get current and default table values from the db.

    :param engine: Initialized alembic engine object.
    :type engine: object
    :returns: tuple
    """
    query = text("SELECT TABLE_NAME from information_schema.tables\
                  WHERE TABLE_NAME\
                  LIKE '%alembic_version%'\
                  AND table_schema = 'refstack'")
    context = alembic_migration.MigrationContext.configure(conn)
    op = Operations(context)
    connection = op.get_bind()
    search = connection.execute(query)
    result = search.fetchall()
    if isinstance(result, Iterable):
        result = [table[0] for table in result]
    else:
        result = None
    # if there is more than one version table, modify the
    # one that does not have the default name, because subunit2sql uses the
    # default name.
    if result:
        current_name =\
            next((table for table in result if table != "alembic_version"),
                 result[0])
        current_name = current_name.decode('utf-8')
        current_version = get_table_version(conn, current_name)
        default_name =\
            next((table for table in result
                  if table == "alembic_version"), None)
        default_version = get_table_version(conn, default_name)
        if len(result) > 1 and not current_version:
            if not default_name:
                # this is the case where there is more than one
                # nonstandard-named alembic table, and no default
                current_name = next((table for table in result
                                     if table != current_name),
                                    result[0])
                current_name = current_name.decode('utf-8')
            elif current_name:
                # this is the case where the current-named table
                # exists, but is empty
                current_name = default_name
                current_version = default_version
        current_table = (current_name, current_version)
        default_table = (default_name, default_version)
    else:
        default_table = (None, None)
        current_table = default_table
    return current_table, default_table
Esempio n. 23
0
File: sanity.py Progetto: kozec/cum
 def fix(self):
     """Uses Alembic batch operations to alter the column datatype in the table.
     """
     context = MigrationContext.configure(self.parent.engine.connect())
     op = Operations(context)
     for table in self.parent.base.metadata.sorted_tables:
         if table.name == self.table:
             for column in table.columns:
                 if column.name == self.column['name']:
                     with op.batch_alter_table(table.name) as batch_op:
                         batch_op.alter_column(column.name,
                                               type_=column.type)
                     return
Esempio n. 24
0
 def fix(self):
     """Uses Alembic batch operations to alter the column datatype in the table.
     """
     context = MigrationContext.configure(self.parent.engine.connect())
     op = Operations(context)
     for table in self.parent.base.metadata.sorted_tables:
         if table.name == self.table:
             for column in table.columns:
                 if column.name == self.column['name']:
                     with op.batch_alter_table(table.name) as batch_op:
                         batch_op.alter_column(column.name,
                                               type_=column.type)
                     return
Esempio n. 25
0
class PostgresqlInlineLiteralTest(TestBase):
    __only_on__ = "postgresql"
    __backend__ = True

    @classmethod
    def setup_class(cls):
        cls.bind = config.db
        cls.bind.execute(
            """
            create table tab (
                col varchar(50)
            )
        """
        )
        cls.bind.execute(
            """
            insert into tab (col) values
                ('old data 1'),
                ('old data 2.1'),
                ('old data 3')
        """
        )

    @classmethod
    def teardown_class(cls):
        cls.bind.execute("drop table tab")

    def setUp(self):
        self.conn = self.bind.connect()
        ctx = MigrationContext.configure(self.conn)
        self.op = Operations(ctx)

    def tearDown(self):
        self.conn.close()

    def test_inline_percent(self):
        # TODO: here's the issue, you need to escape this.
        tab = table("tab", column("col"))
        self.op.execute(
            tab.update()
            .where(tab.c.col.like(self.op.inline_literal("%.%")))
            .values(col=self.op.inline_literal("new data")),
            execution_options={"no_parameters": True},
        )
        eq_(
            self.conn.execute(
                "select count(*) from tab where col='new data'"
            ).scalar(),
            1,
        )
Esempio n. 26
0
    def swap_tables(self):
        """Swap tables around to present the exported data.

        Swaps the current tables to old tables, then swaps write tables to current.
        Finally drops the old tables leaving just the current tables.
        """
        connection = self.engine.connect()
        ctx = MigrationContext.configure(connection)
        op = Operations(ctx)

        def gen_table_names(write_table):
            """Generate current and old table names from write tables."""
            # Current tables do not have the prefix 'w'
            current_table = write_table[1:]
            old_table = current_table + "_old"
            return write_table, current_table, old_table

        tables = dict(Base.metadata.tables)
        tables.pop("kvittering")
        tables = tables.keys()
        tables = list(map(gen_table_names, tables))

        # Drop any left-over old tables that may exist
        with ctx.begin_transaction():
            for _, _, old_table in tables:
                try:
                    op.drop_table(old_table)
                except Exception:
                    pass

        # Rename current to old and write to current
        with ctx.begin_transaction():
            for write_table, current_table, old_table in tables:
                # Rename current table to old table
                # No current tables is OK
                try:
                    op.rename_table(current_table, old_table)
                except Exception:
                    pass
                # Rename write table to current table
                op.rename_table(write_table, current_table)

        # Drop any old tables that may exist
        with ctx.begin_transaction():
            for _, _, old_table in tables:
                # Drop old tables
                try:
                    op.drop_table(old_table)
                except Exception:
                    pass
Esempio n. 27
0
def add_columns(engine, diffs):
    with engine.begin() as conn:
        ctx = get_migration_context(conn)
        op = Operations(ctx)
        col_diffs = _filter_diffs(diffs, [DiffTypes.ADD_NULLABLE_COLUMN])
        for diff in col_diffs:
            col = diff.column
            table_name = col.table.name
            # the column has a reference to a table definition that already
            # has the column defined, so remove that and add the column
            col.table = None
            op.add_column(table_name, col)

    return col_diffs
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata
    """
    _reset_base(migrate_engine)
    from rhodecode.lib.dbmigrate.schema import db_3_7_0_0

    init_model_encryption(db_3_7_0_0)

    context = MigrationContext.configure(migrate_engine.connect())
    op = Operations(context)

    op.create_table(
        'external_identities',
        sa.Column('provider_name', sa.Unicode(255), primary_key=True),
        sa.Column('local_user_id',
                  sa.Integer(),
                  sa.ForeignKey('users.user_id'),
                  primary_key=True),
        sa.Column('external_id', sa.Unicode(255), primary_key=True),
        sa.Column('external_username', sa.Unicode(1024), default=u''),
        sa.Column('access_token', sa.String(1024), default=u''),
        sa.Column('alt_token', sa.String(1024), default=u''),
        sa.Column('token_secret', sa.String(1024), default=u''))
    op.create_index('local_user_id_idx', 'external_identities',
                    ['local_user_id'])
    op.create_index('external_id_idx', 'external_identities', ['external_id'])
Esempio n. 29
0
class PostgresqlInlineLiteralTest(TestBase):
    __only_on__ = "postgresql"
    __backend__ = True

    @classmethod
    def setup_class(cls):
        cls.bind = config.db
        with config.db.connect() as conn:
            conn.execute(
                text("""
                create table tab (
                    col varchar(50)
                )
            """))
            conn.execute(
                text("""
                insert into tab (col) values
                    ('old data 1'),
                    ('old data 2.1'),
                    ('old data 3')
            """))

    @classmethod
    def teardown_class(cls):
        with cls.bind.connect() as conn:
            conn.execute(text("drop table tab"))

    def setUp(self):
        self.conn = self.bind.connect()
        ctx = MigrationContext.configure(self.conn)
        self.op = Operations(ctx)

    def tearDown(self):
        self.conn.close()

    def test_inline_percent(self):
        # TODO: here's the issue, you need to escape this.
        tab = table("tab", column("col"))
        self.op.execute(
            tab.update().where(tab.c.col.like(
                self.op.inline_literal("%.%"))).values(
                    col=self.op.inline_literal("new data")),
            execution_options={"no_parameters": True},
        )
        eq_(
            self.conn.execute(
                text(
                    "select count(*) from tab where col='new data'")).scalar(),
            1,
        )
Esempio n. 30
0
    def test_missing_column(self):
        adapter = self._get_adapter()
        adapter.build_table()
        with adapter.engine.begin() as connection:
            context = MigrationContext.configure(connection)
            op = Operations(context)
            op.drop_column(adapter.get_table().name, 'name')

        doc = {
            "_id": '123',
            "domain": "domain",
            "doc_type": "CommCareCase",
            "name": 'bob'
        }
        with self.assertRaises(MissingColumnWarning):
            adapter.best_effort_save(doc)
Esempio n. 31
0
    def test_missing_column(self):
        adapter = get_indicator_adapter(self.config, raise_errors=True)
        adapter.build_table()
        with adapter.engine.begin() as connection:
            context = MigrationContext.configure(connection)
            op = Operations(context)
            op.drop_column(adapter.get_table().name, 'name')

        doc = {
            "_id": '123',
            "domain": "domain",
            "doc_type": "CommCareCase",
            "name": 'bob'
        }
        with self.assertRaises(MissingColumnWarning):
            adapter.best_effort_save(doc)
Esempio n. 32
0
 def migrate_db(self, eggs_in_order, explain=False):
     opts = {'target_metadata': metadata}
     with Operations.context(
             MigrationContext.configure(connection=Session.connection(),
                                        opts=opts)) as op:
         self.op = op
         return super().migrate_db(eggs_in_order, explain=explain)
Esempio n. 33
0
 def create_fk_constraint(self, fk_constraints: list,
                          const_columns: dict) -> bool:
     """ Get list of foreign keys from static list `fk_constraints` and created it  """
     try:
         conn = self.engine.connect()
         ctx = MigrationContext.configure(conn)
         op = Operations(ctx)
         for constraint in fk_constraints:
             dest_table_name = constraint.pop("table_name")
             column_name = constraint.pop("column_name")
             source_table = constraint.pop("source_table")
             dest_column = constraint.pop("dest_column")
             temp = [i for i in const_columns[source_table]]
             if not dest_column in temp:
                 op.create_foreign_key(
                     None,
                     source_table,
                     dest_table_name,
                     [dest_column],
                     [column_name],
                     **constraint,
                 )
         return True
     except Exception as err:
         logger.error("create_fk_constraint [error] -> %s" % err)
         return False
     finally:
         conn.close()
    def replace(cls, operations: Operations,
                target, replaces=None, replace_with=None) -> None:

        if replaces:
            old_obj = cls._get_object_from_version(operations, replaces)
            drop_old = cls(old_obj).reverse()
            create_new = cls(target)
        elif replace_with:
            old_obj = cls._get_object_from_version(operations, replace_with)
            drop_old = cls(target).reverse()
            create_new = cls(old_obj)
        else:
            raise TypeError("replaces or replace_with is required")

        operations.invoke(drop_old)
        operations.invoke(create_new)
Esempio n. 35
0
def upgrade(plugin_name=None, revision=None, lower="current"):
    database_url = current_app.config.get("SQLALCHEMY_DATABASE_URI")
    if database_url.startswith("sqlite"):
        current_app.db.create_all()
        return

    if plugin_name is None:
        # Get the directory name of the plugin if unspecified
        # Doing it this way doesn't waste the rest of the inspect.stack call
        frame = inspect.currentframe()
        caller_info = inspect.getframeinfo(frame.f_back)
        caller_path = caller_info[0]
        plugin_name = os.path.basename(os.path.dirname(caller_path))

    # Check if the plugin has migraitons
    migrations_path = os.path.join(current_app.plugins_dir, plugin_name,
                                   "migrations")
    if os.path.isdir(migrations_path) is False:
        return

    engine = create_engine(database_url, poolclass=pool.NullPool)
    conn = engine.connect()
    context = MigrationContext.configure(conn)
    op = Operations(context)

    # Find the list of migrations to run
    config = Config()
    config.set_main_option("script_location", migrations_path)
    config.set_main_option("version_locations", migrations_path)
    script = ScriptDirectory.from_config(config)

    # Choose base revision for plugin upgrade
    # "current" points to the current plugin version stored in config
    # None represents the absolute base layer (e.g. first installation)
    if lower == "current":
        lower = get_config(plugin_name + "_alembic_version")

    # Do we upgrade to head or to a specific revision
    if revision is None:
        upper = script.get_current_head()
    else:
        upper = revision

    # Apply from lower to upper
    revs = list(script.iterate_revisions(lower=lower, upper=upper))
    revs.reverse()

    try:
        for r in revs:
            with context.begin_transaction():
                r.module.upgrade(op=op)
            # Set revision that succeeded so we don't need
            # to start from the beginning on failure
            set_config(plugin_name + "_alembic_version", r.revision)
    finally:
        conn.close()

    # Set the new latest revision
    set_config(plugin_name + "_alembic_version", upper)
Esempio n. 36
0
File: db.py Progetto: jkunle/paul
def get_upgrade_op(session):
    """
    Create a migration context and an operations object for performing upgrades.

    :param session: The SQLAlchemy session object.
    """
    context = MigrationContext.configure(session.bind.connect())
    return Operations(context)
Esempio n. 37
0
def update_if_needed(db_location):
    logger.debug('update_if_needed')
    script_directory = get_script_directory()

    def upgrade(rev, context):
        revision = 'head'
        return script_directory._upgrade_revs(revision, rev)

    context = get_migration_context(db_location, script_directory,
                                    {'fn': upgrade})

    op = Operations(context)
    op._install_proxy()

    with context.begin_transaction():
        context.run_migrations()
    context.connection.close()
Esempio n. 38
0
    def rename_table(self, from_name, to_name, fields=None, keep_table=True):
        """Rename table.
        """
        from alembic.migration import MigrationContext
        from alembic.operations import Operations

        # Refresh the connection again
        self._engine = create_engine(self._connection)
        conn = self._engine.connect()
        ctx = MigrationContext.configure(conn)
        op = Operations(ctx)
        op.rename_table(from_name, to_name)

        if keep_table:
            assert fields is not None, (
                "Fields must be provided to create the table")
            self.create_table(table_name=from_name, fields=fields)
Esempio n. 39
0
    def _alter_column_name(self, base, old_name, new_name):
        # NOTE: Create alembic connection and operation object! By John Doe

        db_conn=config.ENGINE.connect()
        alembic_ctx=MigrationContext.configure(db_conn)
        alembic_op=Operations(alembic_ctx)
        doc_table=get_doc_table(
            base.metadata.name,
            config.METADATA,
            **base.relational_fields
        )
        alembic_op.alter_column(
            doc_table.name,
            old_name,
            new_column_name=new_name
        )
        db_conn.close()
Esempio n. 40
0
 def _make_alembic_op(self, conn):
     '''
     Return an instance Operations which is used to
     generate DDL statement
     '''
     migration_ctx = MigrationContext.configure(conn)
     op = Operations(migration_ctx)
     return op
Esempio n. 41
0
def alembic_tests():

    print(f'Alembic tests')
    conn = engine.connect()
    ctx = MigrationContext.configure(conn)
    op = Operations(ctx)

    try:
        op.drop_table('waste')
    except:
        pass

    t = op.create_table(
        'waste',
        Column('bools', sa.Boolean),
        Column('ubytes', sa.Tinyint),
        Column('shorts', sa.SmallInteger),
        Column('ints', sa.Integer),
        Column('bigints', sa.BigInteger),
        Column('floats', sa.REAL),
        Column('doubles', sa.Float),
        Column('dates', sa.Date),
        Column('datetimes', sa.DateTime),
        Column('varchars', sa.String(10)),
        Column('nvarchars', sa.UnicodeText),
        Column('numerics', sa.Numeric(38, 10)),
    )

    data = [{
        'bools': True,
        'ubytes': 5,
        'shorts': 55,
        'ints': 555,
        'bigints': 5555,
        'floats': 5.0,
        'doubles': 5.5555555,
        'dates': date(2012, 11, 23),
        'datetimes': datetime(2012, 11, 23, 16, 34, 56),
        'varchars': 'bla',
        'nvarchars': 'bla2',
        'numerics': Decimal("1.1")
    }, {
        'bools': False,
        'ubytes': 6,
        'shorts': 66,
        'ints': 666,
        'bigints': 6666,
        'floats': 6.0,
        'doubles': 6.6666666,
        'dates': date(2012, 11, 24),
        'datetimes': datetime(2012, 11, 24, 16, 34, 57),
        'varchars': 'bla',
        'nvarchars': 'bla2',
        'numerics': Decimal("-1.1")
    }]

    op.bulk_insert(t, data)

    res = engine.execute('select * from waste').fetchall()
    assert (res == [tuple(dikt.values()) for dikt in data])
Esempio n. 42
0
def recheck_alembic_table(conn):
    """check and update alembic version table.

    Should check current alembic version table against conf and rename the
    existing table if the two values don't match.
    """
    conf_table = getattr(CONF, 'version_table')
    conf_table_version = get_table_version(conn, conf_table)
    current_table, default_table = get_db_tables(conn)
    if current_table[0]:
        if current_table[0] != conf_table:
            context = alembic_migration.MigrationContext.configure(conn)
            op = Operations(context)
            if conf_table and not conf_table_version:
                # make sure there is not present-but-empty table
                # that will prevent us from renaming the current table
                op.drop_table(conf_table)
            op.rename_table(current_table[0], conf_table)
Esempio n. 43
0
def add_columns(engine, raw_diffs, table_names):
    changes = defaultdict(list)
    with engine.begin() as conn:
        ctx = get_migration_context(conn, table_names)
        op = Operations(ctx)
        columns = _get_columns_to_add(raw_diffs, table_names)
        for col in columns:
            table_name = col.table.name
            # the column has a reference to a table definition that already
            # has the column defined, so remove that and add the column
            col.table = None
            changes[table_name].append({
                'type': DiffTypes.ADD_COLUMN,
                'value': col.name
            })
            op.add_column(table_name, col)

    return dict(changes)
Esempio n. 44
0
class PostgresqlInlineLiteralTest(TestCase):

    @classmethod
    def setup_class(cls):
        cls.bind = db_for_dialect("postgresql")
        cls.bind.execute("""
            create table tab (
                col varchar(50)
            )
        """)
        cls.bind.execute("""
            insert into tab (col) values
                ('old data 1'),
                ('old data 2.1'),
                ('old data 3')
        """)

    @classmethod
    def teardown_class(cls):
        cls.bind.execute("drop table tab")

    def setUp(self):
        self.conn = self.bind.connect()
        ctx = MigrationContext.configure(self.conn)
        self.op = Operations(ctx)

    def tearDown(self):
        self.conn.close()

    def test_inline_percent(self):
        # TODO: here's the issue, you need to escape this.
        tab = table('tab', column('col'))
        self.op.execute(
            tab.update().where(
                tab.c.col.like(self.op.inline_literal('%.%'))
            ).values(col=self.op.inline_literal('new data')),
            execution_options={'no_parameters': True}
        )
        eq_(
            self.conn.execute(
                "select count(*) from tab where col='new data'").scalar(),
            1,
        )
Esempio n. 45
0
    def _fixture(self):
        self.metadata = MetaData()
        self.table = Table(
            'foo', self.metadata,
            Column('id', Integer, primary_key=True),
            Column('data', String(50)),
            Column('x', Integer),
        )

        context = op_fixture(dialect="sqlite", as_sql=True)
        self.op = Operations(context)
        return context
Esempio n. 46
0
    def run_migrations(self, **kw):
        """Run migrations as determined by the current command line
        configuration
        as well as versioning information present (or not) in the current
        database connection (if one is present).

        The function accepts optional ``**kw`` arguments.   If these are
        passed, they are sent directly to the ``upgrade()`` and
        ``downgrade()``
        functions within each target revision file.   By modifying the
        ``script.py.mako`` file so that the ``upgrade()`` and ``downgrade()``
        functions accept arguments, parameters can be passed here so that
        contextual information, usually information to identify a particular
        database in use, can be passed from a custom ``env.py`` script
        to the migration functions.

        This function requires that a :class:`.MigrationContext` has
        first been made available via :meth:`.configure`.

        """
        with Operations.context(self._migration_context):
            self.get_context().run_migrations(**kw)
Esempio n. 47
0
    def setUp(self):
        self.conn = config.db.connect()
        self.metadata = MetaData()
        t1 = Table(
            'foo', self.metadata,
            Column('id', Integer, primary_key=True),
            Column('data', String(50)),
            Column('x', Integer),
            mysql_engine='InnoDB'
        )
        t1.create(self.conn)

        self.conn.execute(
            t1.insert(),
            [
                {"id": 1, "data": "d1", "x": 5},
                {"id": 2, "data": "22", "x": 6},
                {"id": 3, "data": "8.5", "x": 7},
                {"id": 4, "data": "9.46", "x": 8},
                {"id": 5, "data": "d5", "x": 9}
            ]
        )
        context = MigrationContext.configure(self.conn)
        self.op = Operations(context)
Esempio n. 48
0
class BatchRoundTripTest(TestBase):
    __requires__ = ('sqlalchemy_08', )
    __only_on__ = "sqlite"

    def setUp(self):
        self.conn = config.db.connect()
        self.metadata = MetaData()
        t1 = Table(
            'foo', self.metadata,
            Column('id', Integer, primary_key=True),
            Column('data', String(50)),
            Column('x', Integer),
            mysql_engine='InnoDB'
        )
        t1.create(self.conn)

        self.conn.execute(
            t1.insert(),
            [
                {"id": 1, "data": "d1", "x": 5},
                {"id": 2, "data": "22", "x": 6},
                {"id": 3, "data": "8.5", "x": 7},
                {"id": 4, "data": "9.46", "x": 8},
                {"id": 5, "data": "d5", "x": 9}
            ]
        )
        context = MigrationContext.configure(self.conn)
        self.op = Operations(context)

    def _no_pk_fixture(self):
        nopk = Table(
            'nopk', self.metadata,
            Column('a', Integer),
            Column('b', Integer),
            Column('c', Integer),
            mysql_engine='InnoDB'
        )
        nopk.create(self.conn)
        self.conn.execute(
            nopk.insert(),
            [
                {"a": 1, "b": 2, "c": 3},
                {"a": 2, "b": 4, "c": 5},
            ]

        )
        return nopk

    def tearDown(self):
        self.metadata.drop_all(self.conn)
        self.conn.close()

    def _assert_data(self, data, tablename='foo'):
        eq_(
            [dict(row) for row
             in self.conn.execute("select * from %s" % tablename)],
            data
        )

    def test_fk_points_to_me_auto(self):
        self._test_fk_points_to_me("auto")

    # in particular, this tests that the failures
    # on PG and MySQL result in recovery of the batch system,
    # e.g. that the _alembic_batch_temp table is dropped
    @config.requirements.no_referential_integrity
    def test_fk_points_to_me_recreate(self):
        self._test_fk_points_to_me("always")

    def _test_fk_points_to_me(self, recreate):
        bar = Table(
            'bar', self.metadata,
            Column('id', Integer, primary_key=True),
            Column('foo_id', Integer, ForeignKey('foo.id')),
            mysql_engine='InnoDB'
        )
        bar.create(self.conn)
        self.conn.execute(bar.insert(), {'id': 1, 'foo_id': 3})

        with self.op.batch_alter_table("foo", recreate=recreate) as batch_op:
            batch_op.alter_column(
                'data', new_column_name='newdata', existing_type=String(50))

    def test_change_type(self):
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.alter_column('data', type_=Integer)

        self._assert_data([
            {"id": 1, "data": 0, "x": 5},
            {"id": 2, "data": 22, "x": 6},
            {"id": 3, "data": 8, "x": 7},
            {"id": 4, "data": 9, "x": 8},
            {"id": 5, "data": 0, "x": 9}
        ])

    def test_drop_column(self):
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.drop_column('data')

        self._assert_data([
            {"id": 1, "x": 5},
            {"id": 2, "x": 6},
            {"id": 3, "x": 7},
            {"id": 4, "x": 8},
            {"id": 5, "x": 9}
        ])

    def test_add_pk_constraint(self):
        self._no_pk_fixture()
        with self.op.batch_alter_table("nopk", recreate="always") as batch_op:
            batch_op.create_primary_key('newpk', ['a', 'b'])

        pk_const = Inspector.from_engine(self.conn).get_pk_constraint('nopk')
        with config.requirements.reflects_pk_names.fail_if():
            eq_(pk_const['name'], 'newpk')
        eq_(pk_const['constrained_columns'], ['a', 'b'])

    @config.requirements.check_constraints_w_enforcement
    def test_add_ck_constraint(self):
        with self.op.batch_alter_table("foo", recreate="always") as batch_op:
            batch_op.create_check_constraint("newck", text("x > 0"))

        # we dont support reflection of CHECK constraints
        # so test this by just running invalid data in
        foo = self.metadata.tables['foo']

        assert_raises_message(
            exc.IntegrityError,
            "newck",
            self.conn.execute,
            foo.insert(), {"id": 6, "data": 5, "x": -2}
        )

    @config.requirements.sqlalchemy_094
    @config.requirements.unnamed_constraints
    def test_drop_foreign_key(self):
        bar = Table(
            'bar', self.metadata,
            Column('id', Integer, primary_key=True),
            Column('foo_id', Integer, ForeignKey('foo.id')),
            mysql_engine='InnoDB'
        )
        bar.create(self.conn)
        self.conn.execute(bar.insert(), {'id': 1, 'foo_id': 3})

        naming_convention = {
            "fk":
            "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
        }
        with self.op.batch_alter_table(
                "bar", naming_convention=naming_convention) as batch_op:
            batch_op.drop_constraint(
                "fk_bar_foo_id_foo", type_="foreignkey")
        eq_(
            Inspector.from_engine(self.conn).get_foreign_keys('bar'),
            []
        )

    def test_drop_column_fk_recreate(self):
        with self.op.batch_alter_table("foo", recreate='always') as batch_op:
            batch_op.drop_column('data')

        self._assert_data([
            {"id": 1, "x": 5},
            {"id": 2, "x": 6},
            {"id": 3, "x": 7},
            {"id": 4, "x": 8},
            {"id": 5, "x": 9}
        ])

    def test_rename_column(self):
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.alter_column('x', new_column_name='y')

        self._assert_data([
            {"id": 1, "data": "d1", "y": 5},
            {"id": 2, "data": "22", "y": 6},
            {"id": 3, "data": "8.5", "y": 7},
            {"id": 4, "data": "9.46", "y": 8},
            {"id": 5, "data": "d5", "y": 9}
        ])

    def test_rename_column_boolean(self):
        bar = Table(
            'bar', self.metadata,
            Column('id', Integer, primary_key=True),
            Column('flag', Boolean()),
            mysql_engine='InnoDB'
        )
        bar.create(self.conn)
        self.conn.execute(bar.insert(), {'id': 1, 'flag': True})
        self.conn.execute(bar.insert(), {'id': 2, 'flag': False})

        with self.op.batch_alter_table(
            "bar"
        ) as batch_op:
            batch_op.alter_column(
                'flag', new_column_name='bflag', existing_type=Boolean)

        self._assert_data([
            {"id": 1, 'bflag': True},
            {"id": 2, 'bflag': False},
        ], 'bar')

    @config.requirements.non_native_boolean
    def test_rename_column_non_native_boolean_no_ck(self):
        bar = Table(
            'bar', self.metadata,
            Column('id', Integer, primary_key=True),
            Column('flag', Boolean(create_constraint=False)),
            mysql_engine='InnoDB'
        )
        bar.create(self.conn)
        self.conn.execute(bar.insert(), {'id': 1, 'flag': True})
        self.conn.execute(bar.insert(), {'id': 2, 'flag': False})
        self.conn.execute(bar.insert(), {'id': 3, 'flag': 5})

        with self.op.batch_alter_table(
            "bar",
            reflect_args=[Column('flag', Boolean(create_constraint=False))]
        ) as batch_op:
            batch_op.alter_column(
                'flag', new_column_name='bflag', existing_type=Boolean)

        self._assert_data([
            {"id": 1, 'bflag': True},
            {"id": 2, 'bflag': False},
            {'id': 3, 'bflag': 5}
        ], 'bar')

    def test_drop_column_pk(self):
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.drop_column('id')

        self._assert_data([
            {"data": "d1", "x": 5},
            {"data": "22", "x": 6},
            {"data": "8.5", "x": 7},
            {"data": "9.46", "x": 8},
            {"data": "d5", "x": 9}
        ])

    def test_rename_column_pk(self):
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.alter_column('id', new_column_name='ident')

        self._assert_data([
            {"ident": 1, "data": "d1", "x": 5},
            {"ident": 2, "data": "22", "x": 6},
            {"ident": 3, "data": "8.5", "x": 7},
            {"ident": 4, "data": "9.46", "x": 8},
            {"ident": 5, "data": "d5", "x": 9}
        ])

    def test_add_column_auto(self):
        # note this uses ALTER
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.add_column(
                Column('data2', String(50), server_default='hi'))

        self._assert_data([
            {"id": 1, "data": "d1", "x": 5, 'data2': 'hi'},
            {"id": 2, "data": "22", "x": 6, 'data2': 'hi'},
            {"id": 3, "data": "8.5", "x": 7, 'data2': 'hi'},
            {"id": 4, "data": "9.46", "x": 8, 'data2': 'hi'},
            {"id": 5, "data": "d5", "x": 9, 'data2': 'hi'}
        ])

    def test_add_column_recreate(self):
        with self.op.batch_alter_table("foo", recreate='always') as batch_op:
            batch_op.add_column(
                Column('data2', String(50), server_default='hi'))

        self._assert_data([
            {"id": 1, "data": "d1", "x": 5, 'data2': 'hi'},
            {"id": 2, "data": "22", "x": 6, 'data2': 'hi'},
            {"id": 3, "data": "8.5", "x": 7, 'data2': 'hi'},
            {"id": 4, "data": "9.46", "x": 8, 'data2': 'hi'},
            {"id": 5, "data": "d5", "x": 9, 'data2': 'hi'}
        ])

    def test_create_drop_index(self):
        insp = Inspector.from_engine(config.db)
        eq_(
            insp.get_indexes('foo'), []
        )

        with self.op.batch_alter_table("foo", recreate='always') as batch_op:
            batch_op.create_index(
                'ix_data', ['data'], unique=True)

        self._assert_data([
            {"id": 1, "data": "d1", "x": 5},
            {"id": 2, "data": "22", "x": 6},
            {"id": 3, "data": "8.5", "x": 7},
            {"id": 4, "data": "9.46", "x": 8},
            {"id": 5, "data": "d5", "x": 9}
        ])

        insp = Inspector.from_engine(config.db)
        eq_(
            [
                dict(unique=ix['unique'],
                     name=ix['name'],
                     column_names=ix['column_names'])
                for ix in insp.get_indexes('foo')
            ],
            [{'unique': True, 'name': 'ix_data', 'column_names': ['data']}]
        )

        with self.op.batch_alter_table("foo", recreate='always') as batch_op:
            batch_op.drop_index('ix_data')

        insp = Inspector.from_engine(config.db)
        eq_(
            insp.get_indexes('foo'), []
        )
Esempio n. 49
0
class CopyFromTest(TestBase):
    __requires__ = ('sqlalchemy_08', )

    def _fixture(self):
        self.metadata = MetaData()
        self.table = Table(
            'foo', self.metadata,
            Column('id', Integer, primary_key=True),
            Column('data', String(50)),
            Column('x', Integer),
        )

        context = op_fixture(dialect="sqlite", as_sql=True)
        self.op = Operations(context)
        return context

    def test_change_type(self):
        context = self._fixture()
        with self.op.batch_alter_table(
                "foo", copy_from=self.table) as batch_op:
            batch_op.alter_column('data', type_=Integer)

        context.assert_(
            'CREATE TABLE _alembic_batch_temp (id INTEGER NOT NULL, '
            'data INTEGER, x INTEGER, PRIMARY KEY (id))',
            'INSERT INTO _alembic_batch_temp (id, data, x) SELECT foo.id, '
            'CAST(foo.data AS INTEGER) AS anon_1, foo.x FROM foo',
            'DROP TABLE foo',
            'ALTER TABLE _alembic_batch_temp RENAME TO foo'
        )

    def test_create_drop_index_w_always(self):
        context = self._fixture()
        with self.op.batch_alter_table(
                "foo", copy_from=self.table, recreate='always') as batch_op:
            batch_op.create_index(
                'ix_data', ['data'], unique=True)

        context.assert_(
            'CREATE TABLE _alembic_batch_temp (id INTEGER NOT NULL, '
            'data VARCHAR(50), '
            'x INTEGER, PRIMARY KEY (id))',
            'CREATE UNIQUE INDEX ix_data ON _alembic_batch_temp (data)',
            'INSERT INTO _alembic_batch_temp (id, data, x) '
            'SELECT foo.id, foo.data, foo.x FROM foo',
            'DROP TABLE foo',
            'ALTER TABLE _alembic_batch_temp RENAME TO foo'
        )

        context.clear_assertions()

        Index('ix_data', self.table.c.data, unique=True)
        with self.op.batch_alter_table(
                "foo", copy_from=self.table, recreate='always') as batch_op:
            batch_op.drop_index('ix_data')

        context.assert_(
            'CREATE TABLE _alembic_batch_temp (id INTEGER NOT NULL, '
            'data VARCHAR(50), x INTEGER, PRIMARY KEY (id))',
            'INSERT INTO _alembic_batch_temp (id, data, x) '
            'SELECT foo.id, foo.data, foo.x FROM foo',
            'DROP TABLE foo',
            'ALTER TABLE _alembic_batch_temp RENAME TO foo'
        )

    def test_create_drop_index_wo_always(self):
        context = self._fixture()
        with self.op.batch_alter_table(
                "foo", copy_from=self.table) as batch_op:
            batch_op.create_index(
                'ix_data', ['data'], unique=True)

        context.assert_(
            'CREATE UNIQUE INDEX ix_data ON foo (data)'
        )

        context.clear_assertions()

        Index('ix_data', self.table.c.data, unique=True)
        with self.op.batch_alter_table(
                "foo", copy_from=self.table) as batch_op:
            batch_op.drop_index('ix_data')

        context.assert_(
            'DROP INDEX ix_data'
        )

    def test_create_drop_index_w_other_ops(self):
        context = self._fixture()
        with self.op.batch_alter_table(
                "foo", copy_from=self.table) as batch_op:
            batch_op.alter_column('data', type_=Integer)
            batch_op.create_index(
                'ix_data', ['data'], unique=True)

        context.assert_(
            'CREATE TABLE _alembic_batch_temp (id INTEGER NOT NULL, '
            'data INTEGER, x INTEGER, PRIMARY KEY (id))',
            'CREATE UNIQUE INDEX ix_data ON _alembic_batch_temp (data)',
            'INSERT INTO _alembic_batch_temp (id, data, x) SELECT foo.id, '
            'CAST(foo.data AS INTEGER) AS anon_1, foo.x FROM foo',
            'DROP TABLE foo',
            'ALTER TABLE _alembic_batch_temp RENAME TO foo'
        )

        context.clear_assertions()

        Index('ix_data', self.table.c.data, unique=True)
        with self.op.batch_alter_table(
                "foo", copy_from=self.table) as batch_op:
            batch_op.drop_index('ix_data')
            batch_op.alter_column('data', type_=String)

        context.assert_(
            'CREATE TABLE _alembic_batch_temp (id INTEGER NOT NULL, '
            'data VARCHAR, x INTEGER, PRIMARY KEY (id))',
            'INSERT INTO _alembic_batch_temp (id, data, x) SELECT foo.id, '
            'CAST(foo.data AS VARCHAR) AS anon_1, foo.x FROM foo',
            'DROP TABLE foo',
            'ALTER TABLE _alembic_batch_temp RENAME TO foo'
        )
Esempio n. 50
0
class BatchRoundTripTest(TestBase):
    __requires__ = ('sqlalchemy_08', )
    __only_on__ = "sqlite"

    def setUp(self):
        self.conn = config.db.connect()
        self.metadata = MetaData()
        t1 = Table(
            'foo', self.metadata,
            Column('id', Integer, primary_key=True),
            Column('data', String(50)),
            Column('x', Integer),
            mysql_engine='InnoDB'
        )
        t1.create(self.conn)

        self.conn.execute(
            t1.insert(),
            [
                {"id": 1, "data": "d1", "x": 5},
                {"id": 2, "data": "22", "x": 6},
                {"id": 3, "data": "8.5", "x": 7},
                {"id": 4, "data": "9.46", "x": 8},
                {"id": 5, "data": "d5", "x": 9}
            ]
        )
        context = MigrationContext.configure(self.conn)
        self.op = Operations(context)

    @contextmanager
    def _sqlite_referential_integrity(self):
        self.conn.execute("PRAGMA foreign_keys=ON")
        try:
            yield
        finally:
            self.conn.execute("PRAGMA foreign_keys=OFF")

    def _no_pk_fixture(self):
        nopk = Table(
            'nopk', self.metadata,
            Column('a', Integer),
            Column('b', Integer),
            Column('c', Integer),
            mysql_engine='InnoDB'
        )
        nopk.create(self.conn)
        self.conn.execute(
            nopk.insert(),
            [
                {"a": 1, "b": 2, "c": 3},
                {"a": 2, "b": 4, "c": 5},
            ]

        )
        return nopk

    def _table_w_index_fixture(self):
        t = Table(
            't_w_ix', self.metadata,
            Column('id', Integer, primary_key=True),
            Column('thing', Integer),
            Column('data', String(20)),
        )
        Index('ix_thing', t.c.thing)
        t.create(self.conn)
        return t

    def _boolean_fixture(self):
        t = Table(
            'hasbool', self.metadata,
            Column('x', Boolean(create_constraint=True, name='ck1')),
            Column('y', Integer)
        )
        t.create(self.conn)

    def _timestamp_fixture(self):
        t = Table(
            'hasts', self.metadata,
            Column('x', DateTime()),
        )
        t.create(self.conn)
        return t

    def _int_to_boolean_fixture(self):
        t = Table(
            'hasbool', self.metadata,
            Column('x', Integer)
        )
        t.create(self.conn)

    def test_change_type_boolean_to_int(self):
        self._boolean_fixture()
        with self.op.batch_alter_table(
                "hasbool"
        ) as batch_op:
            batch_op.alter_column(
                'x', type_=Integer, existing_type=Boolean(
                    create_constraint=True, name='ck1'))
        insp = Inspector.from_engine(config.db)

        eq_(
            [c['type']._type_affinity for c in insp.get_columns('hasbool')
             if c['name'] == 'x'],
            [Integer]
        )

    def test_no_net_change_timestamp(self):
        t = self._timestamp_fixture()

        import datetime
        self.conn.execute(
            t.insert(),
            {"x": datetime.datetime(2012, 5, 18, 15, 32, 5)}
        )

        with self.op.batch_alter_table("hasts") as batch_op:
            batch_op.alter_column("x", type_=DateTime())

        eq_(
            self.conn.execute(select([t.c.x])).fetchall(),
            [(datetime.datetime(2012, 5, 18, 15, 32, 5),)]
        )

    def test_drop_col_schematype(self):
        self._boolean_fixture()
        with self.op.batch_alter_table(
                "hasbool"
        ) as batch_op:
            batch_op.drop_column('x')
        insp = Inspector.from_engine(config.db)

        assert 'x' not in (c['name'] for c in insp.get_columns('hasbool'))

    def test_change_type_int_to_boolean(self):
        self._int_to_boolean_fixture()
        with self.op.batch_alter_table(
                "hasbool"
        ) as batch_op:
            batch_op.alter_column(
                'x', type_=Boolean(create_constraint=True, name='ck1'))
        insp = Inspector.from_engine(config.db)

        if exclusions.against(config, "sqlite"):
            eq_(
                [c['type']._type_affinity for
                 c in insp.get_columns('hasbool') if c['name'] == 'x'],
                [Boolean]
            )
        elif exclusions.against(config, "mysql"):
            eq_(
                [c['type']._type_affinity for
                 c in insp.get_columns('hasbool') if c['name'] == 'x'],
                [Integer]
            )

    def tearDown(self):
        self.metadata.drop_all(self.conn)
        self.conn.close()

    def _assert_data(self, data, tablename='foo'):
        eq_(
            [dict(row) for row
             in self.conn.execute("select * from %s" % tablename)],
            data
        )

    def test_ix_existing(self):
        self._table_w_index_fixture()

        with self.op.batch_alter_table("t_w_ix") as batch_op:
            batch_op.alter_column('data', type_=String(30))
            batch_op.create_index("ix_data", ["data"])

        insp = Inspector.from_engine(config.db)
        eq_(
            set(
                (ix['name'], tuple(ix['column_names'])) for ix in
                insp.get_indexes('t_w_ix')
            ),
            set([
                ('ix_data', ('data',)),
                ('ix_thing', ('thing', ))
            ])
        )

    def test_fk_points_to_me_auto(self):
        self._test_fk_points_to_me("auto")

    # in particular, this tests that the failures
    # on PG and MySQL result in recovery of the batch system,
    # e.g. that the _alembic_batch_temp table is dropped
    @config.requirements.no_referential_integrity
    def test_fk_points_to_me_recreate(self):
        self._test_fk_points_to_me("always")

    @exclusions.only_on("sqlite")
    @exclusions.fails(
        "intentionally asserting that this "
        "doesn't work w/ pragma foreign keys")
    def test_fk_points_to_me_sqlite_refinteg(self):
        with self._sqlite_referential_integrity():
            self._test_fk_points_to_me("auto")

    def _test_fk_points_to_me(self, recreate):
        bar = Table(
            'bar', self.metadata,
            Column('id', Integer, primary_key=True),
            Column('foo_id', Integer, ForeignKey('foo.id')),
            mysql_engine='InnoDB'
        )
        bar.create(self.conn)
        self.conn.execute(bar.insert(), {'id': 1, 'foo_id': 3})

        with self.op.batch_alter_table("foo", recreate=recreate) as batch_op:
            batch_op.alter_column(
                'data', new_column_name='newdata', existing_type=String(50))

        insp = Inspector.from_engine(self.conn)
        eq_(
            [(key['referred_table'],
             key['referred_columns'], key['constrained_columns'])
             for key in insp.get_foreign_keys('bar')],
            [('foo', ['id'], ['foo_id'])]
        )

    def test_selfref_fk_auto(self):
        self._test_selfref_fk("auto")

    @config.requirements.no_referential_integrity
    def test_selfref_fk_recreate(self):
        self._test_selfref_fk("always")

    @exclusions.only_on("sqlite")
    @exclusions.fails(
        "intentionally asserting that this "
        "doesn't work w/ pragma foreign keys")
    def test_selfref_fk_sqlite_refinteg(self):
        with self._sqlite_referential_integrity():
            self._test_selfref_fk("auto")

    def _test_selfref_fk(self, recreate):
        bar = Table(
            'bar', self.metadata,
            Column('id', Integer, primary_key=True),
            Column('bar_id', Integer, ForeignKey('bar.id')),
            Column('data', String(50)),
            mysql_engine='InnoDB'
        )
        bar.create(self.conn)
        self.conn.execute(bar.insert(), {'id': 1, 'data': 'x', 'bar_id': None})
        self.conn.execute(bar.insert(), {'id': 2, 'data': 'y', 'bar_id': 1})

        with self.op.batch_alter_table("bar", recreate=recreate) as batch_op:
            batch_op.alter_column(
                'data', new_column_name='newdata', existing_type=String(50))

        insp = Inspector.from_engine(self.conn)

        insp = Inspector.from_engine(self.conn)
        eq_(
            [(key['referred_table'],
             key['referred_columns'], key['constrained_columns'])
             for key in insp.get_foreign_keys('bar')],
            [('bar', ['id'], ['bar_id'])]
        )

    def test_change_type(self):
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.alter_column('data', type_=Integer)

        self._assert_data([
            {"id": 1, "data": 0, "x": 5},
            {"id": 2, "data": 22, "x": 6},
            {"id": 3, "data": 8, "x": 7},
            {"id": 4, "data": 9, "x": 8},
            {"id": 5, "data": 0, "x": 9}
        ])

    def test_drop_column(self):
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.drop_column('data')

        self._assert_data([
            {"id": 1, "x": 5},
            {"id": 2, "x": 6},
            {"id": 3, "x": 7},
            {"id": 4, "x": 8},
            {"id": 5, "x": 9}
        ])

    def test_add_pk_constraint(self):
        self._no_pk_fixture()
        with self.op.batch_alter_table("nopk", recreate="always") as batch_op:
            batch_op.create_primary_key('newpk', ['a', 'b'])

        pk_const = Inspector.from_engine(self.conn).get_pk_constraint('nopk')
        with config.requirements.reflects_pk_names.fail_if():
            eq_(pk_const['name'], 'newpk')
        eq_(pk_const['constrained_columns'], ['a', 'b'])

    @config.requirements.check_constraints_w_enforcement
    def test_add_ck_constraint(self):
        with self.op.batch_alter_table("foo", recreate="always") as batch_op:
            batch_op.create_check_constraint("newck", text("x > 0"))

        # we dont support reflection of CHECK constraints
        # so test this by just running invalid data in
        foo = self.metadata.tables['foo']

        assert_raises_message(
            exc.IntegrityError,
            "newck",
            self.conn.execute,
            foo.insert(), {"id": 6, "data": 5, "x": -2}
        )

    @config.requirements.sqlalchemy_094
    @config.requirements.unnamed_constraints
    def test_drop_foreign_key(self):
        bar = Table(
            'bar', self.metadata,
            Column('id', Integer, primary_key=True),
            Column('foo_id', Integer, ForeignKey('foo.id')),
            mysql_engine='InnoDB'
        )
        bar.create(self.conn)
        self.conn.execute(bar.insert(), {'id': 1, 'foo_id': 3})

        naming_convention = {
            "fk":
            "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
        }
        with self.op.batch_alter_table(
                "bar", naming_convention=naming_convention) as batch_op:
            batch_op.drop_constraint(
                "fk_bar_foo_id_foo", type_="foreignkey")
        eq_(
            Inspector.from_engine(self.conn).get_foreign_keys('bar'),
            []
        )

    def test_drop_column_fk_recreate(self):
        with self.op.batch_alter_table("foo", recreate='always') as batch_op:
            batch_op.drop_column('data')

        self._assert_data([
            {"id": 1, "x": 5},
            {"id": 2, "x": 6},
            {"id": 3, "x": 7},
            {"id": 4, "x": 8},
            {"id": 5, "x": 9}
        ])

    def test_rename_column(self):
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.alter_column('x', new_column_name='y')

        self._assert_data([
            {"id": 1, "data": "d1", "y": 5},
            {"id": 2, "data": "22", "y": 6},
            {"id": 3, "data": "8.5", "y": 7},
            {"id": 4, "data": "9.46", "y": 8},
            {"id": 5, "data": "d5", "y": 9}
        ])

    def test_rename_column_boolean(self):
        bar = Table(
            'bar', self.metadata,
            Column('id', Integer, primary_key=True),
            Column('flag', Boolean()),
            mysql_engine='InnoDB'
        )
        bar.create(self.conn)
        self.conn.execute(bar.insert(), {'id': 1, 'flag': True})
        self.conn.execute(bar.insert(), {'id': 2, 'flag': False})

        with self.op.batch_alter_table(
            "bar"
        ) as batch_op:
            batch_op.alter_column(
                'flag', new_column_name='bflag', existing_type=Boolean)

        self._assert_data([
            {"id": 1, 'bflag': True},
            {"id": 2, 'bflag': False},
        ], 'bar')

    @config.requirements.non_native_boolean
    def test_rename_column_non_native_boolean_no_ck(self):
        bar = Table(
            'bar', self.metadata,
            Column('id', Integer, primary_key=True),
            Column('flag', Boolean(create_constraint=False)),
            mysql_engine='InnoDB'
        )
        bar.create(self.conn)
        self.conn.execute(bar.insert(), {'id': 1, 'flag': True})
        self.conn.execute(bar.insert(), {'id': 2, 'flag': False})
        self.conn.execute(
            # override Boolean type which as of 1.1 coerces numerics
            # to 1/0
            text("insert into bar (id, flag) values (:id, :flag)"),
            {'id': 3, 'flag': 5})

        with self.op.batch_alter_table(
            "bar",
            reflect_args=[Column('flag', Boolean(create_constraint=False))]
        ) as batch_op:
            batch_op.alter_column(
                'flag', new_column_name='bflag', existing_type=Boolean)

        self._assert_data([
            {"id": 1, 'bflag': True},
            {"id": 2, 'bflag': False},
            {'id': 3, 'bflag': 5}
        ], 'bar')

    def test_drop_column_pk(self):
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.drop_column('id')

        self._assert_data([
            {"data": "d1", "x": 5},
            {"data": "22", "x": 6},
            {"data": "8.5", "x": 7},
            {"data": "9.46", "x": 8},
            {"data": "d5", "x": 9}
        ])

    def test_rename_column_pk(self):
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.alter_column('id', new_column_name='ident')

        self._assert_data([
            {"ident": 1, "data": "d1", "x": 5},
            {"ident": 2, "data": "22", "x": 6},
            {"ident": 3, "data": "8.5", "x": 7},
            {"ident": 4, "data": "9.46", "x": 8},
            {"ident": 5, "data": "d5", "x": 9}
        ])

    def test_add_column_auto(self):
        # note this uses ALTER
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.add_column(
                Column('data2', String(50), server_default='hi'))

        self._assert_data([
            {"id": 1, "data": "d1", "x": 5, 'data2': 'hi'},
            {"id": 2, "data": "22", "x": 6, 'data2': 'hi'},
            {"id": 3, "data": "8.5", "x": 7, 'data2': 'hi'},
            {"id": 4, "data": "9.46", "x": 8, 'data2': 'hi'},
            {"id": 5, "data": "d5", "x": 9, 'data2': 'hi'}
        ])

    def test_add_column_recreate(self):
        with self.op.batch_alter_table("foo", recreate='always') as batch_op:
            batch_op.add_column(
                Column('data2', String(50), server_default='hi'))

        self._assert_data([
            {"id": 1, "data": "d1", "x": 5, 'data2': 'hi'},
            {"id": 2, "data": "22", "x": 6, 'data2': 'hi'},
            {"id": 3, "data": "8.5", "x": 7, 'data2': 'hi'},
            {"id": 4, "data": "9.46", "x": 8, 'data2': 'hi'},
            {"id": 5, "data": "d5", "x": 9, 'data2': 'hi'}
        ])

    def test_create_drop_index(self):
        insp = Inspector.from_engine(config.db)
        eq_(
            insp.get_indexes('foo'), []
        )

        with self.op.batch_alter_table("foo", recreate='always') as batch_op:
            batch_op.create_index(
                'ix_data', ['data'], unique=True)

        self._assert_data([
            {"id": 1, "data": "d1", "x": 5},
            {"id": 2, "data": "22", "x": 6},
            {"id": 3, "data": "8.5", "x": 7},
            {"id": 4, "data": "9.46", "x": 8},
            {"id": 5, "data": "d5", "x": 9}
        ])

        insp = Inspector.from_engine(config.db)
        eq_(
            [
                dict(unique=ix['unique'],
                     name=ix['name'],
                     column_names=ix['column_names'])
                for ix in insp.get_indexes('foo')
            ],
            [{'unique': True, 'name': 'ix_data', 'column_names': ['data']}]
        )

        with self.op.batch_alter_table("foo", recreate='always') as batch_op:
            batch_op.drop_index('ix_data')

        insp = Inspector.from_engine(config.db)
        eq_(
            insp.get_indexes('foo'), []
        )
Esempio n. 51
0
 def setUp(self):
     self.conn = self.bind.connect()
     ctx = MigrationContext.configure(self.conn)
     self.op = Operations(ctx)
Esempio n. 52
0
def run_migrations_online():
    """Run migrations in 'online' mode.

    In this scenario we need to create an Engine
    and associate a connection with the context.

    """
    engine = engine_from_config(
                config.get_section(config.config_ini_section),
                prefix='sqlalchemy.',
                poolclass=pool.NullPool)

    logger.info('Testing for an old alembic_version table.')

    connection = engine.connect()
    context.configure(
                connection=connection,
                target_metadata=target_metadata,
                version_table='alembic_version'
                )

    script_location = config.get_main_option('script_location')
    found = False
    mc = context.get_context()
    current_db_revision = mc.get_current_revision()
    script = ScriptDirectory.from_config(config)
    """ If there was an existing alembic_version table, we need to
    check that it's current revision is in the history for the tree
    we're working with.
    """
    for x in script.iterate_revisions('head', 'base'):
        if x.revision == current_db_revision:
            """ An alembic_versions table was found and it belongs to
            this alembic tree
            """
            logger.info(
                ('An old alembic_version table at revision %s was '
                 'found for %s.  Renaming to alembic_version_%s.'),
                        current_db_revision, script_location,
                        script_location)
            op = Operations(mc)
            try:
                with context.begin_transaction():
                    op.rename_table(
                        'alembic_version', 'alembic_version_%s'
                        % script_location)
                found = True
            except:
                logger.error(('Unable to rename alembic_version to '
                             'alembic_version_%s.'),
                             script_location)
                connection.close()
                return

            break

    if not found:
        logger.info('Didn\'t find an old alembic_version table.')
    logger.info('Trying alembic_version_%s.' % script_location)

    """ We MAY have an alembic_version table that doesn't belong to
    this tree but if we still don't have an alembic_version_<tree>
    table, alembic will create it.
    """
    context.configure(
                connection=connection,
                target_metadata=target_metadata,
                version_table='alembic_version_' + script_location
                )
    mc = context.get_context()
    current_db_revision = mc.get_current_revision()
    if current_db_revision:
        logger.info(
            'Using the alembic_version_%s table at revision %s.',
            script_location, current_db_revision)
    else:
        logger.info('Creating new alembic_version_%s table.',
                    script_location)

    try:
        with context.begin_transaction():
            context.run_migrations()
    finally:
        connection.close()
Esempio n. 53
0
class BatchRoundTripTest(TestBase):
    __requires__ = ('sqlalchemy_08', )
    __only_on__ = "sqlite"

    def setUp(self):
        self.conn = config.db.connect()
        self.metadata = MetaData()
        t1 = Table(
            'foo', self.metadata,
            Column('id', Integer, primary_key=True),
            Column('data', String(50)),
            Column('x', Integer),
            mysql_engine='InnoDB'
        )
        t1.create(self.conn)

        self.conn.execute(
            t1.insert(),
            [
                {"id": 1, "data": "d1", "x": 5},
                {"id": 2, "data": "22", "x": 6},
                {"id": 3, "data": "8.5", "x": 7},
                {"id": 4, "data": "9.46", "x": 8},
                {"id": 5, "data": "d5", "x": 9}
            ]
        )
        context = MigrationContext.configure(self.conn)
        self.op = Operations(context)

    def tearDown(self):
        self.metadata.drop_all(self.conn)
        self.conn.close()

    def _assert_data(self, data):
        eq_(
            [dict(row) for row in self.conn.execute("select * from foo")],
            data
        )

    def test_fk_points_to_me_auto(self):
        self._test_fk_points_to_me("auto")

    # in particular, this tests that the failures
    # on PG and MySQL result in recovery of the batch system,
    # e.g. that the _alembic_batch_temp table is dropped
    @config.requirements.no_referential_integrity
    def test_fk_points_to_me_recreate(self):
        self._test_fk_points_to_me("always")

    def _test_fk_points_to_me(self, recreate):
        bar = Table(
            'bar', self.metadata,
            Column('id', Integer, primary_key=True),
            Column('foo_id', Integer, ForeignKey('foo.id')),
            mysql_engine='InnoDB'
        )
        bar.create(self.conn)
        self.conn.execute(bar.insert(), {'id': 1, 'foo_id': 3})

        with self.op.batch_alter_table("foo", recreate=recreate) as batch_op:
            batch_op.alter_column(
                'data', new_column_name='newdata', existing_type=String(50))

    def test_change_type(self):
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.alter_column('data', type_=Integer)

        self._assert_data([
            {"id": 1, "data": 0, "x": 5},
            {"id": 2, "data": 22, "x": 6},
            {"id": 3, "data": 8, "x": 7},
            {"id": 4, "data": 9, "x": 8},
            {"id": 5, "data": 0, "x": 9}
        ])

    def test_drop_column(self):
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.drop_column('data')

        self._assert_data([
            {"id": 1, "x": 5},
            {"id": 2, "x": 6},
            {"id": 3, "x": 7},
            {"id": 4, "x": 8},
            {"id": 5, "x": 9}
        ])

    def test_drop_column_fk_recreate(self):
        with self.op.batch_alter_table("foo", recreate='always') as batch_op:
            batch_op.drop_column('data')

        self._assert_data([
            {"id": 1, "x": 5},
            {"id": 2, "x": 6},
            {"id": 3, "x": 7},
            {"id": 4, "x": 8},
            {"id": 5, "x": 9}
        ])

    def test_rename_column(self):
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.alter_column('x', new_column_name='y')

        self._assert_data([
            {"id": 1, "data": "d1", "y": 5},
            {"id": 2, "data": "22", "y": 6},
            {"id": 3, "data": "8.5", "y": 7},
            {"id": 4, "data": "9.46", "y": 8},
            {"id": 5, "data": "d5", "y": 9}
        ])

    def test_drop_column_pk(self):
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.drop_column('id')

        self._assert_data([
            {"data": "d1", "x": 5},
            {"data": "22", "x": 6},
            {"data": "8.5", "x": 7},
            {"data": "9.46", "x": 8},
            {"data": "d5", "x": 9}
        ])

    def test_rename_column_pk(self):
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.alter_column('id', new_column_name='ident')

        self._assert_data([
            {"ident": 1, "data": "d1", "x": 5},
            {"ident": 2, "data": "22", "x": 6},
            {"ident": 3, "data": "8.5", "x": 7},
            {"ident": 4, "data": "9.46", "x": 8},
            {"ident": 5, "data": "d5", "x": 9}
        ])

    def test_add_column_auto(self):
        # note this uses ALTER
        with self.op.batch_alter_table("foo") as batch_op:
            batch_op.add_column(
                Column('data2', String(50), server_default='hi'))

        self._assert_data([
            {"id": 1, "data": "d1", "x": 5, 'data2': 'hi'},
            {"id": 2, "data": "22", "x": 6, 'data2': 'hi'},
            {"id": 3, "data": "8.5", "x": 7, 'data2': 'hi'},
            {"id": 4, "data": "9.46", "x": 8, 'data2': 'hi'},
            {"id": 5, "data": "d5", "x": 9, 'data2': 'hi'}
        ])

    def test_add_column_recreate(self):
        with self.op.batch_alter_table("foo", recreate='always') as batch_op:
            batch_op.add_column(
                Column('data2', String(50), server_default='hi'))

        self._assert_data([
            {"id": 1, "data": "d1", "x": 5, 'data2': 'hi'},
            {"id": 2, "data": "22", "x": 6, 'data2': 'hi'},
            {"id": 3, "data": "8.5", "x": 7, 'data2': 'hi'},
            {"id": 4, "data": "9.46", "x": 8, 'data2': 'hi'},
            {"id": 5, "data": "d5", "x": 9, 'data2': 'hi'}
        ])
Esempio n. 54
0
 def setUp(self):
     self.op = Operations(mock.Mock(opts={}))
Esempio n. 55
0
class BatchApplyTest(TestBase):
    __requires__ = ('sqlalchemy_08', )

    def setUp(self):
        self.op = Operations(mock.Mock(opts={}))

    def _simple_fixture(self, table_args=(), table_kwargs={}):
        m = MetaData()
        t = Table(
            'tname', m,
            Column('id', Integer, primary_key=True),
            Column('x', String(10)),
            Column('y', Integer)
        )
        return ApplyBatchImpl(t, table_args, table_kwargs)

    def _uq_fixture(self, table_args=(), table_kwargs={}):
        m = MetaData()
        t = Table(
            'tname', m,
            Column('id', Integer, primary_key=True),
            Column('x', String()),
            Column('y', Integer),
            UniqueConstraint('y', name='uq1')
        )
        return ApplyBatchImpl(t, table_args, table_kwargs)

    def _ix_fixture(self, table_args=(), table_kwargs={}):
        m = MetaData()
        t = Table(
            'tname', m,
            Column('id', Integer, primary_key=True),
            Column('x', String()),
            Column('y', Integer),
            Index('ix1', 'y')
        )
        return ApplyBatchImpl(t, table_args, table_kwargs)

    def _fk_fixture(self, table_args=(), table_kwargs={}):
        m = MetaData()
        t = Table(
            'tname', m,
            Column('id', Integer, primary_key=True),
            Column('email', String()),
            Column('user_id', Integer, ForeignKey('user.id'))
        )
        return ApplyBatchImpl(t, table_args, table_kwargs)

    def _named_fk_fixture(self, table_args=(), table_kwargs={}):
        m = MetaData()
        t = Table(
            'tname', m,
            Column('id', Integer, primary_key=True),
            Column('email', String()),
            Column('user_id', Integer, ForeignKey('user.id', name='ufk'))
        )
        return ApplyBatchImpl(t, table_args, table_kwargs)

    def _selfref_fk_fixture(self, table_args=(), table_kwargs={}):
        m = MetaData()
        t = Table(
            'tname', m,
            Column('id', Integer, primary_key=True),
            Column('parent_id', Integer, ForeignKey('tname.id')),
            Column('data', String)
        )
        return ApplyBatchImpl(t, table_args, table_kwargs)

    def _assert_impl(self, impl, colnames=None,
                     ddl_contains=None, ddl_not_contains=None,
                     dialect='default'):
        context = op_fixture(dialect=dialect)

        impl._create(context.impl)

        if colnames is None:
            colnames = ['id', 'x', 'y']
        eq_(impl.new_table.c.keys(), colnames)

        pk_cols = [col for col in impl.new_table.c if col.primary_key]
        eq_(list(impl.new_table.primary_key), pk_cols)

        create_stmt = str(
            CreateTable(impl.new_table).compile(dialect=context.dialect))
        create_stmt = re.sub(r'[\n\t]', '', create_stmt)

        idx_stmt = ""
        for idx in impl.new_table.indexes:
            idx_stmt += str(CreateIndex(idx).compile(dialect=context.dialect))
        idx_stmt = re.sub(r'[\n\t]', '', idx_stmt)

        if ddl_contains:
            assert ddl_contains in create_stmt + idx_stmt
        if ddl_not_contains:
            assert ddl_not_contains not in create_stmt + idx_stmt

        expected = [
            create_stmt,
        ]
        if impl.new_table.indexes:
            expected.append(idx_stmt)

        expected.extend([
            'INSERT INTO _alembic_batch_temp (%(colnames)s) '
            'SELECT %(tname_colnames)s FROM tname' % {
                "colnames": ", ".join([
                    impl.new_table.c[name].name
                    for name in colnames
                    if name in impl.table.c]),
                "tname_colnames":
                ", ".join(
                    "CAST(tname.%s AS %s) AS anon_1" % (
                        name, impl.new_table.c[name].type)
                    if (
                        impl.new_table.c[name].type
                        is not impl.table.c[name].type)
                    else "tname.%s" % name
                    for name in colnames if name in impl.table.c
                )
            },
            'DROP TABLE tname',
            'ALTER TABLE _alembic_batch_temp RENAME TO tname'
        ])
        context.assert_(*expected)
        return impl.new_table

    def test_change_type(self):
        impl = self._simple_fixture()
        impl.alter_column('tname', 'x', type_=Integer)
        new_table = self._assert_impl(impl)
        assert new_table.c.x.type._type_affinity is Integer

    def test_rename_col(self):
        impl = self._simple_fixture()
        impl.alter_column('tname', 'x', name='q')
        new_table = self._assert_impl(impl)
        eq_(new_table.c.x.name, 'q')

    def test_add_col(self):
        impl = self._simple_fixture()
        col = Column('g', Integer)
        # operations.add_column produces a table
        t = self.op._table('tname', col)  # noqa
        impl.add_column('tname', col)
        new_table = self._assert_impl(impl, colnames=['id', 'x', 'y', 'g'])
        eq_(new_table.c.g.name, 'g')

    def test_rename_col_pk(self):
        impl = self._simple_fixture()
        impl.alter_column('tname', 'id', name='foobar')
        new_table = self._assert_impl(
            impl, ddl_contains="PRIMARY KEY (foobar)")
        eq_(new_table.c.id.name, 'foobar')
        eq_(list(new_table.primary_key), [new_table.c.id])

    def test_rename_col_fk(self):
        impl = self._fk_fixture()
        impl.alter_column('tname', 'user_id', name='foobar')
        new_table = self._assert_impl(
            impl, colnames=['id', 'email', 'user_id'],
            ddl_contains='FOREIGN KEY(foobar) REFERENCES "user" (id)')
        eq_(new_table.c.user_id.name, 'foobar')
        eq_(
            list(new_table.c.user_id.foreign_keys)[0]._get_colspec(),
            "user.id"
        )

    def test_drop_col(self):
        impl = self._simple_fixture()
        impl.drop_column('tname', column('x'))
        new_table = self._assert_impl(impl, colnames=['id', 'y'])
        assert 'y' in new_table.c
        assert 'x' not in new_table.c

    def test_drop_col_remove_pk(self):
        impl = self._simple_fixture()
        impl.drop_column('tname', column('id'))
        new_table = self._assert_impl(
            impl, colnames=['x', 'y'], ddl_not_contains="PRIMARY KEY")
        assert 'y' in new_table.c
        assert 'id' not in new_table.c
        assert not new_table.primary_key

    def test_drop_col_remove_fk(self):
        impl = self._fk_fixture()
        impl.drop_column('tname', column('user_id'))
        new_table = self._assert_impl(
            impl, colnames=['id', 'email'], ddl_not_contains="FOREIGN KEY")
        assert 'user_id' not in new_table.c
        assert not new_table.foreign_keys

    def test_drop_col_retain_fk(self):
        impl = self._fk_fixture()
        impl.drop_column('tname', column('email'))
        new_table = self._assert_impl(
            impl, colnames=['id', 'user_id'],
            ddl_contains='FOREIGN KEY(user_id) REFERENCES "user" (id)')
        assert 'email' not in new_table.c
        assert new_table.c.user_id.foreign_keys

    def test_drop_col_retain_fk_selfref(self):
        impl = self._selfref_fk_fixture()
        impl.drop_column('tname', column('data'))
        new_table = self._assert_impl(impl, colnames=['id', 'parent_id'])
        assert 'data' not in new_table.c
        assert new_table.c.parent_id.foreign_keys

    def test_add_fk(self):
        impl = self._simple_fixture()
        impl.add_column('tname', Column('user_id', Integer))
        fk = self.op._foreign_key_constraint(
            'fk1', 'tname', 'user',
            ['user_id'], ['id'])
        impl.add_constraint(fk)
        new_table = self._assert_impl(
            impl, colnames=['id', 'x', 'y', 'user_id'],
            ddl_contains='CONSTRAINT fk1 FOREIGN KEY(user_id) '
            'REFERENCES "user" (id)')
        eq_(
            list(new_table.c.user_id.foreign_keys)[0]._get_colspec(),
            'user.id'
        )

    def test_drop_fk(self):
        impl = self._named_fk_fixture()
        fk = ForeignKeyConstraint([], [], name='ufk')
        impl.drop_constraint(fk)
        new_table = self._assert_impl(
            impl, colnames=['id', 'email', 'user_id'],
            ddl_not_contains="CONSTRANT fk1")
        eq_(
            list(new_table.foreign_keys),
            []
        )

    def test_add_uq(self):
        impl = self._simple_fixture()
        uq = self.op._unique_constraint(
            'uq1', 'tname', ['y']
        )

        impl.add_constraint(uq)
        self._assert_impl(
            impl, colnames=['id', 'x', 'y'],
            ddl_contains="CONSTRAINT uq1 UNIQUE")

    def test_drop_uq(self):
        impl = self._uq_fixture()

        uq = self.op._unique_constraint(
            'uq1', 'tname', ['y']
        )
        impl.drop_constraint(uq)
        self._assert_impl(
            impl, colnames=['id', 'x', 'y'],
            ddl_not_contains="CONSTRAINT uq1 UNIQUE")

    def test_add_index(self):
        impl = self._simple_fixture()
        ix = self.op._index('ix1', 'tname', ['y'])

        impl.add_index(ix)
        self._assert_impl(
            impl, colnames=['id', 'x', 'y'],
            ddl_contains="CREATE INDEX ix1")

    def test_drop_index(self):
        impl = self._ix_fixture()

        ix = self.op._index('ix1', 'tname', ['y'])
        impl.drop_index(ix)
        self._assert_impl(
            impl, colnames=['id', 'x', 'y'],
            ddl_not_contains="CONSTRAINT uq1 UNIQUE")

    def test_add_table_opts(self):
        impl = self._simple_fixture(table_kwargs={'mysql_engine': 'InnoDB'})
        self._assert_impl(
            impl, ddl_contains="ENGINE=InnoDB",
            dialect='mysql'
        )
Esempio n. 56
0
    def update_member(self, id, data):
        member=self.get_member(id)
        if member is None:
            return None

        # NOTE: BaseContext's init method sets its base to the base
        # struct contained in the request, so we need to reset it here
        # to the base struct that is actually in the database - DCarv

        # NOTE: Remove base struct from cache! By John Doe
        model.BASES.bases.pop(member.name)

        # NOTE: Set old base struct as active! By John Doe
        self.set_base(member.struct)

        # NOTE: Check for base content changes! By John Doe
        old_base=json2base(member.struct)

        new_base=json2base(data['struct'])

        # NOTE: List all fields that should be deleted! By John Doe
        del_cols=[]
        for old_col_name, old_col in old_base.content.__allstructs__.items():
            if old_col_name not in new_base.content.__allsnames__:
                del_cols.append(old_col)

        # NOTE: If any field will be deleted, delete it from all documents in
        # the base! By John Doe
        if len(del_cols) > 0:
            # NOTE: Create a fake request for DocumentCustomView and
            # DocumentContext! By John Doe

            url="/%s/doc&$$={\"limit\":null}" % new_base.metadata.name
            for col in del_cols:
                params={
                    'path': "[{\"path\":\"%s\",\"fn\":null,\"mode\":" +\
                            "\"delete\",\"args\":[]}]" % ("/".join(col.path))
                }
                request=DummyRequest(path=url, params=params)
                request.method='PUT'
                request.matchdict={"base": new_base.metadata.name}
                doc_view=DocumentCustomView(
                    DocumentContextFactory(request), 
                    request
                )
                doc_view.update_collection()

        # NOTE: Check for relation field changes (to ALTER table if needed)!
        # By John Doe
        old_doc_table=get_doc_table(old_base.metadata.name, config.METADATA,
            **old_base.relational_fields)

        new_doc_table=get_doc_table(new_base.metadata.name, config.METADATA,
            **new_base.relational_fields)

        # NOTE: List relational fields that should be deleted! By John Doe
        del_cols=[]
        for old_col in old_doc_table.columns:
            if old_col.name not in new_doc_table.columns:
                del_cols.append(old_col)

        # NOTE: List relational fields that should be added! By John Doe
        new_cols=[]
        for new_col in new_doc_table.columns:
            if new_col.name not in old_doc_table.columns:
                # NOTE: Get liblightbase.lbbase.fields object! By John Doe

                field=new_base.relational_fields[new_col.name]
                custom_col=get_custom_column(field)
                new_cols.append(custom_col)

        # NOTE: Create alembic connection and operation object! By John Doe
        db_conn=config.ENGINE.connect()

        alembic_ctx=MigrationContext.configure(db_conn)
        alembic_op=Operations(alembic_ctx)

        # NOTE: Drop columns! By John Doe
        for col in del_cols:
            alembic_op.drop_column(new_doc_table.name, col.name)

        # TODO: New_col cannot be required! By John Doe

        # NOTE: Add columns! By John Doe
        for col in new_cols:
            alembic_op.add_column(new_doc_table.name, col)

        # TODO: Alter columns? By John Doe

        db_conn.close()

        # NOTE: Check for base name change! By John Doe
        if member.name != data['name']:
            old_name='lb_doc_%s' %(member.name)
            new_name='lb_doc_%s' %(data['name'])
            self.session.execute('ALTER TABLE %s RENAME TO %s' %(
                    old_name, 
                    new_name
                )
            )
            old_name='lb_file_%s' %(member.name)
            new_name='lb_file_%s' %(data['name'])
            self.session.execute('ALTER TABLE %s RENAME TO %s' %(
                    old_name, 
                    new_name
                )
            )
            old_name='lb_doc_%s_id_doc_seq' %(member.name)
            new_name='lb_doc_%s_id_doc_seq' %(data['name'])
            self.session.execute('ALTER SEQUENCE %s RENAME TO %s' %(
                    old_name, 
                    new_name
                )
            )

        # NOTE: This will add any new fields to the base struct! By John Doe
        for name in data:
            setattr(member, name, data[name])

        # NOTE: Now commits and closes session in the view instead of here
        # flush() pushes operations to DB's buffer - DCarv
        self.session.flush()

        # NOTE: Por alguma razão o objeto "session" estando com
        # "autocommit=True" não "commita" no "close" e por isso executamos um
        # "commit" aqui! "autocommit=True" não comita mais de uma operação em
        # sequência? By Questor
        # session_factory: sessionmaker(
            # class_='Session', 
            # autoflush=True, 
            # bind=Engine(postgresql://lbu:***@127.0.0.1/lb), 
            # autocommit=True, 
            # expire_on_commit=True
        # )
        # registry: <sqlalchemy.\
                # util.\
                # _collections.\
                # ThreadLocalRegistry object at 0x4143f90>
        # ! By Questor
        self.session.commit()

        model.HISTORY.create_member(**{
            'id_base': member.id_base, 
            'author': 'Author', 
            'date': datetime.datetime.now().strftime('%d/%m/%Y %H:%M:%S'), 
            'name': member.name, 
            'structure': utils.json2object(member.struct), 
            'status': 'UPDATED'
        })

        self.lbirestart()

        # NOTE: Remove base struct from cache! By John Doe
        model.BASES.bases.pop(member.name)

        return member
 def invoke_for_target(cls, operations: Operations, target):
     op = cls(target)
     return operations.invoke(op)
    def _get_object_from_version(cls, operations: Operations, ident):
        version, objname = ident.split(".")

        module = operations.get_context().script.get_revision(version).module
        obj = getattr(module, objname)
        return obj
Esempio n. 59
0
class BatchApplyTest(TestBase):
    __requires__ = ('sqlalchemy_08', )

    def setUp(self):
        self.op = Operations(mock.Mock(opts={}))

    def _simple_fixture(self, table_args=(), table_kwargs={}):
        m = MetaData()
        t = Table(
            'tname', m,
            Column('id', Integer, primary_key=True),
            Column('x', String(10)),
            Column('y', Integer)
        )
        return ApplyBatchImpl(t, table_args, table_kwargs)

    def _uq_fixture(self, table_args=(), table_kwargs={}):
        m = MetaData()
        t = Table(
            'tname', m,
            Column('id', Integer, primary_key=True),
            Column('x', String()),
            Column('y', Integer),
            UniqueConstraint('y', name='uq1')
        )
        return ApplyBatchImpl(t, table_args, table_kwargs)

    def _ix_fixture(self, table_args=(), table_kwargs={}):
        m = MetaData()
        t = Table(
            'tname', m,
            Column('id', Integer, primary_key=True),
            Column('x', String()),
            Column('y', Integer),
            Index('ix1', 'y')
        )
        return ApplyBatchImpl(t, table_args, table_kwargs)

    def _literal_ck_fixture(
            self, copy_from=None, table_args=(), table_kwargs={}):
        m = MetaData()
        if copy_from is not None:
            t = copy_from
        else:
            t = Table(
                'tname', m,
                Column('id', Integer, primary_key=True),
                Column('email', String()),
                CheckConstraint("email LIKE '%@%'")
            )
        return ApplyBatchImpl(t, table_args, table_kwargs)

    def _sql_ck_fixture(self, table_args=(), table_kwargs={}):
        m = MetaData()
        t = Table(
            'tname', m,
            Column('id', Integer, primary_key=True),
            Column('email', String())
        )
        t.append_constraint(CheckConstraint(t.c.email.like('%@%')))
        return ApplyBatchImpl(t, table_args, table_kwargs)

    def _fk_fixture(self, table_args=(), table_kwargs={}):
        m = MetaData()
        t = Table(
            'tname', m,
            Column('id', Integer, primary_key=True),
            Column('email', String()),
            Column('user_id', Integer, ForeignKey('user.id'))
        )
        return ApplyBatchImpl(t, table_args, table_kwargs)

    def _multi_fk_fixture(self, table_args=(), table_kwargs={}, schema=None):
        m = MetaData()
        if schema:
            schemaarg = "%s." % schema
        else:
            schemaarg = ""

        t = Table(
            'tname', m,
            Column('id', Integer, primary_key=True),
            Column('email', String()),
            Column('user_id_1', Integer, ForeignKey('%suser.id' % schemaarg)),
            Column('user_id_2', Integer, ForeignKey('%suser.id' % schemaarg)),
            Column('user_id_3', Integer),
            Column('user_id_version', Integer),
            ForeignKeyConstraint(
                ['user_id_3', 'user_id_version'],
                ['%suser.id' % schemaarg, '%suser.id_version' % schemaarg]),
            schema=schema
        )
        return ApplyBatchImpl(t, table_args, table_kwargs)

    def _named_fk_fixture(self, table_args=(), table_kwargs={}):
        m = MetaData()
        t = Table(
            'tname', m,
            Column('id', Integer, primary_key=True),
            Column('email', String()),
            Column('user_id', Integer, ForeignKey('user.id', name='ufk'))
        )
        return ApplyBatchImpl(t, table_args, table_kwargs)

    def _selfref_fk_fixture(self, table_args=(), table_kwargs={}):
        m = MetaData()
        t = Table(
            'tname', m,
            Column('id', Integer, primary_key=True),
            Column('parent_id', Integer, ForeignKey('tname.id')),
            Column('data', String)
        )
        return ApplyBatchImpl(t, table_args, table_kwargs)

    def _boolean_fixture(self, table_args=(), table_kwargs={}):
        m = MetaData()
        t = Table(
            'tname', m,
            Column('id', Integer, primary_key=True),
            Column('flag', Boolean)
        )
        return ApplyBatchImpl(t, table_args, table_kwargs)

    def _boolean_no_ck_fixture(self, table_args=(), table_kwargs={}):
        m = MetaData()
        t = Table(
            'tname', m,
            Column('id', Integer, primary_key=True),
            Column('flag', Boolean(create_constraint=False))
        )
        return ApplyBatchImpl(t, table_args, table_kwargs)

    def _enum_fixture(self, table_args=(), table_kwargs={}):
        m = MetaData()
        t = Table(
            'tname', m,
            Column('id', Integer, primary_key=True),
            Column('thing', Enum('a', 'b', 'c'))
        )
        return ApplyBatchImpl(t, table_args, table_kwargs)

    def _assert_impl(self, impl, colnames=None,
                     ddl_contains=None, ddl_not_contains=None,
                     dialect='default', schema=None):
        context = op_fixture(dialect=dialect)

        impl._create(context.impl)

        if colnames is None:
            colnames = ['id', 'x', 'y']
        eq_(impl.new_table.c.keys(), colnames)

        pk_cols = [col for col in impl.new_table.c if col.primary_key]
        eq_(list(impl.new_table.primary_key), pk_cols)

        create_stmt = str(
            CreateTable(impl.new_table).compile(dialect=context.dialect))
        create_stmt = re.sub(r'[\n\t]', '', create_stmt)

        idx_stmt = ""
        for idx in impl.new_table.indexes:
            idx_stmt += str(CreateIndex(idx).compile(dialect=context.dialect))
        idx_stmt = re.sub(r'[\n\t]', '', idx_stmt)

        if ddl_contains:
            assert ddl_contains in create_stmt + idx_stmt
        if ddl_not_contains:
            assert ddl_not_contains not in create_stmt + idx_stmt

        expected = [
            create_stmt,
        ]
        if impl.new_table.indexes:
            expected.append(idx_stmt)

        if schema:
            args = {"schema": "%s." % schema}
        else:
            args = {"schema": ""}

        args['colnames'] = ", ".join([
            impl.new_table.c[name].name
            for name in colnames
            if name in impl.table.c])
        args['tname_colnames'] = ", ".join(
            "CAST(%(schema)stname.%(name)s AS %(type)s) AS anon_1" % {
                'schema': args['schema'],
                'name': name,
                'type': impl.new_table.c[name].type
            }
            if (
                impl.new_table.c[name].type._type_affinity
                is not impl.table.c[name].type._type_affinity)
            else "%(schema)stname.%(name)s" % {
                'schema': args['schema'], 'name': name}
            for name in colnames if name in impl.table.c
        )

        expected.extend([
            'INSERT INTO %(schema)s_alembic_batch_temp (%(colnames)s) '
            'SELECT %(tname_colnames)s FROM %(schema)stname' % args,
            'DROP TABLE %(schema)stname' % args,
            'ALTER TABLE %(schema)s_alembic_batch_temp '
            'RENAME TO %(schema)stname' % args
        ])
        context.assert_(*expected)
        return impl.new_table

    def test_change_type(self):
        impl = self._simple_fixture()
        impl.alter_column('tname', 'x', type_=Integer)
        new_table = self._assert_impl(impl)
        assert new_table.c.x.type._type_affinity is Integer

    def test_rename_col(self):
        impl = self._simple_fixture()
        impl.alter_column('tname', 'x', name='q')
        new_table = self._assert_impl(impl)
        eq_(new_table.c.x.name, 'q')

    def test_rename_col_boolean(self):
        impl = self._boolean_fixture()
        impl.alter_column('tname', 'flag', name='bflag')
        new_table = self._assert_impl(
            impl, ddl_contains="CHECK (bflag IN (0, 1)",
            colnames=["id", "flag"])
        eq_(new_table.c.flag.name, 'bflag')
        eq_(
            len([
                const for const
                in new_table.constraints
                if isinstance(const, CheckConstraint)]),
            1)

    def test_rename_col_boolean_no_ck(self):
        impl = self._boolean_no_ck_fixture()
        impl.alter_column('tname', 'flag', name='bflag')
        new_table = self._assert_impl(
            impl, ddl_not_contains="CHECK",
            colnames=["id", "flag"])
        eq_(new_table.c.flag.name, 'bflag')
        eq_(
            len([
                const for const
                in new_table.constraints
                if isinstance(const, CheckConstraint)]),
            0)

    def test_rename_col_enum(self):
        impl = self._enum_fixture()
        impl.alter_column('tname', 'thing', name='thang')
        new_table = self._assert_impl(
            impl, ddl_contains="CHECK (thang IN ('a', 'b', 'c')",
            colnames=["id", "thing"])
        eq_(new_table.c.thing.name, 'thang')
        eq_(
            len([
                const for const
                in new_table.constraints
                if isinstance(const, CheckConstraint)]),
            1)

    def test_rename_col_literal_ck(self):
        impl = self._literal_ck_fixture()
        impl.alter_column('tname', 'email', name='emol')
        new_table = self._assert_impl(
            # note this is wrong, we don't dig into the SQL
            impl, ddl_contains="CHECK (email LIKE '%@%')",
            colnames=["id", "email"])
        eq_(
            len([c for c in new_table.constraints
                if isinstance(c, CheckConstraint)]), 1)

        eq_(new_table.c.email.name, 'emol')

    def test_rename_col_literal_ck_workaround(self):
        impl = self._literal_ck_fixture(
            copy_from=Table(
                'tname', MetaData(),
                Column('id', Integer, primary_key=True),
                Column('email', String),
            ),
            table_args=[CheckConstraint("emol LIKE '%@%'")])

        impl.alter_column('tname', 'email', name='emol')
        new_table = self._assert_impl(
            impl, ddl_contains="CHECK (emol LIKE '%@%')",
            colnames=["id", "email"])
        eq_(
            len([c for c in new_table.constraints
                if isinstance(c, CheckConstraint)]), 1)
        eq_(new_table.c.email.name, 'emol')

    def test_rename_col_sql_ck(self):
        impl = self._sql_ck_fixture()

        impl.alter_column('tname', 'email', name='emol')
        new_table = self._assert_impl(
            impl, ddl_contains="CHECK (emol LIKE '%@%')",
            colnames=["id", "email"])
        eq_(
            len([c for c in new_table.constraints
                if isinstance(c, CheckConstraint)]), 1)

        eq_(new_table.c.email.name, 'emol')

    def test_add_col(self):
        impl = self._simple_fixture()
        col = Column('g', Integer)
        # operations.add_column produces a table
        t = self.op._table('tname', col)  # noqa
        impl.add_column('tname', col)
        new_table = self._assert_impl(impl, colnames=['id', 'x', 'y', 'g'])
        eq_(new_table.c.g.name, 'g')

    def test_rename_col_pk(self):
        impl = self._simple_fixture()
        impl.alter_column('tname', 'id', name='foobar')
        new_table = self._assert_impl(
            impl, ddl_contains="PRIMARY KEY (foobar)")
        eq_(new_table.c.id.name, 'foobar')
        eq_(list(new_table.primary_key), [new_table.c.id])

    def test_rename_col_fk(self):
        impl = self._fk_fixture()
        impl.alter_column('tname', 'user_id', name='foobar')
        new_table = self._assert_impl(
            impl, colnames=['id', 'email', 'user_id'],
            ddl_contains='FOREIGN KEY(foobar) REFERENCES "user" (id)')
        eq_(new_table.c.user_id.name, 'foobar')
        eq_(
            list(new_table.c.user_id.foreign_keys)[0]._get_colspec(),
            "user.id"
        )

    def test_regen_multi_fk(self):
        impl = self._multi_fk_fixture()
        self._assert_impl(
            impl, colnames=[
                'id', 'email', 'user_id_1', 'user_id_2',
                'user_id_3', 'user_id_version'],
            ddl_contains='FOREIGN KEY(user_id_3, user_id_version) '
            'REFERENCES "user" (id, id_version)')

    # _get_colspec() in 0.8 calls upon fk.column when schema is
    # present.  not sure if we want to try to fix this
    @config.requirements.fail_before_sqla_09
    def test_regen_multi_fk_schema(self):
        impl = self._multi_fk_fixture(schema='foo_schema')
        self._assert_impl(
            impl, colnames=[
                'id', 'email', 'user_id_1', 'user_id_2',
                'user_id_3', 'user_id_version'],
            ddl_contains='FOREIGN KEY(user_id_3, user_id_version) '
            'REFERENCES foo_schema."user" (id, id_version)',
            schema='foo_schema')

    def test_drop_col(self):
        impl = self._simple_fixture()
        impl.drop_column('tname', column('x'))
        new_table = self._assert_impl(impl, colnames=['id', 'y'])
        assert 'y' in new_table.c
        assert 'x' not in new_table.c

    def test_drop_col_remove_pk(self):
        impl = self._simple_fixture()
        impl.drop_column('tname', column('id'))
        new_table = self._assert_impl(
            impl, colnames=['x', 'y'], ddl_not_contains="PRIMARY KEY")
        assert 'y' in new_table.c
        assert 'id' not in new_table.c
        assert not new_table.primary_key

    def test_drop_col_remove_fk(self):
        impl = self._fk_fixture()
        impl.drop_column('tname', column('user_id'))
        new_table = self._assert_impl(
            impl, colnames=['id', 'email'], ddl_not_contains="FOREIGN KEY")
        assert 'user_id' not in new_table.c
        assert not new_table.foreign_keys

    def test_drop_col_retain_fk(self):
        impl = self._fk_fixture()
        impl.drop_column('tname', column('email'))
        new_table = self._assert_impl(
            impl, colnames=['id', 'user_id'],
            ddl_contains='FOREIGN KEY(user_id) REFERENCES "user" (id)')
        assert 'email' not in new_table.c
        assert new_table.c.user_id.foreign_keys

    def test_drop_col_retain_fk_selfref(self):
        impl = self._selfref_fk_fixture()
        impl.drop_column('tname', column('data'))
        new_table = self._assert_impl(impl, colnames=['id', 'parent_id'])
        assert 'data' not in new_table.c
        assert new_table.c.parent_id.foreign_keys

    def test_add_fk(self):
        impl = self._simple_fixture()
        impl.add_column('tname', Column('user_id', Integer))
        fk = self.op._foreign_key_constraint(
            'fk1', 'tname', 'user',
            ['user_id'], ['id'])
        impl.add_constraint(fk)
        new_table = self._assert_impl(
            impl, colnames=['id', 'x', 'y', 'user_id'],
            ddl_contains='CONSTRAINT fk1 FOREIGN KEY(user_id) '
            'REFERENCES "user" (id)')
        eq_(
            list(new_table.c.user_id.foreign_keys)[0]._get_colspec(),
            'user.id'
        )

    def test_drop_fk(self):
        impl = self._named_fk_fixture()
        fk = ForeignKeyConstraint([], [], name='ufk')
        impl.drop_constraint(fk)
        new_table = self._assert_impl(
            impl, colnames=['id', 'email', 'user_id'],
            ddl_not_contains="CONSTRANT fk1")
        eq_(
            list(new_table.foreign_keys),
            []
        )

    def test_add_uq(self):
        impl = self._simple_fixture()
        uq = self.op._unique_constraint(
            'uq1', 'tname', ['y']
        )

        impl.add_constraint(uq)
        self._assert_impl(
            impl, colnames=['id', 'x', 'y'],
            ddl_contains="CONSTRAINT uq1 UNIQUE")

    def test_drop_uq(self):
        impl = self._uq_fixture()

        uq = self.op._unique_constraint(
            'uq1', 'tname', ['y']
        )
        impl.drop_constraint(uq)
        self._assert_impl(
            impl, colnames=['id', 'x', 'y'],
            ddl_not_contains="CONSTRAINT uq1 UNIQUE")

    def test_create_index(self):
        impl = self._simple_fixture()
        ix = self.op._index('ix1', 'tname', ['y'])

        impl.create_index(ix)
        self._assert_impl(
            impl, colnames=['id', 'x', 'y'],
            ddl_contains="CREATE INDEX ix1")

    def test_drop_index(self):
        impl = self._ix_fixture()

        ix = self.op._index('ix1', 'tname', ['y'])
        impl.drop_index(ix)
        self._assert_impl(
            impl, colnames=['id', 'x', 'y'],
            ddl_not_contains="CONSTRAINT uq1 UNIQUE")

    def test_add_table_opts(self):
        impl = self._simple_fixture(table_kwargs={'mysql_engine': 'InnoDB'})
        self._assert_impl(
            impl, ddl_contains="ENGINE=InnoDB",
            dialect='mysql'
        )