Esempio n. 1
0
def upgrade(engine):
    session_maker = sessionmaker(bind=engine)
    session = session_maker()

    r = (session.query(model.ByteHubVersion).order_by(
        model.ByteHubVersion.timestamp.desc()).first())
    if not r:
        current_version = "0"
    else:
        current_version = r.version

    if parse(current_version) >= parse(version.__version__):
        # Up-to-date: nothing to do
        session.close()
        return

    if parse(current_version) < parse(version.__version__):
        print(f"Upgrading ByteHub database schema...")
        with engine.connect() as connection:
            context = MigrationContext.configure(connection)
            op = Operations(context)
            try:
                op.add_column("namespace", Column("backend", String(128)))
            except OperationalError:
                pass

    # Add version number
    obj = model.ByteHubVersion()
    obj.version = version.__version__
    session.add(obj)
    session.commit()

    session.close()
Esempio n. 2
0
def create_operations(ctx=None, **kwargs):
    """Create an alembic operations object."""
    if ctx is None:
        ctx = create_migration_ctx(**kwargs)
    operations = Operations(ctx)
    operations.has_table = has_table
    return operations
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata
    """
    _reset_base(migrate_engine)
    from rhodecode.lib.dbmigrate.schema import db_3_7_0_0

    init_model_encryption(db_3_7_0_0)

    context = MigrationContext.configure(migrate_engine.connect())
    op = Operations(context)

    op.create_table(
        'external_identities',
        sa.Column('provider_name', sa.Unicode(255), primary_key=True),
        sa.Column('local_user_id',
                  sa.Integer(),
                  sa.ForeignKey('users.user_id'),
                  primary_key=True),
        sa.Column('external_id', sa.Unicode(255), primary_key=True),
        sa.Column('external_username', sa.Unicode(1024), default=u''),
        sa.Column('access_token', sa.String(1024), default=u''),
        sa.Column('alt_token', sa.String(1024), default=u''),
        sa.Column('token_secret', sa.String(1024), default=u''))
    op.create_index('local_user_id_idx', 'external_identities',
                    ['local_user_id'])
    op.create_index('external_id_idx', 'external_identities', ['external_id'])
Esempio n. 4
0
 def create_fk_constraint(self, fk_constraints: list,
                          const_columns: dict) -> bool:
     """ Get list of foreign keys from static list `fk_constraints` and created it  """
     try:
         conn = self.engine.connect()
         ctx = MigrationContext.configure(conn)
         op = Operations(ctx)
         for constraint in fk_constraints:
             dest_table_name = constraint.pop("table_name")
             column_name = constraint.pop("column_name")
             source_table = constraint.pop("source_table")
             dest_column = constraint.pop("dest_column")
             temp = [i for i in const_columns[source_table]]
             if not dest_column in temp:
                 op.create_foreign_key(
                     None,
                     source_table,
                     dest_table_name,
                     [dest_column],
                     [column_name],
                     **constraint,
                 )
         return True
     except Exception as err:
         logger.error("create_fk_constraint [error] -> %s" % err)
         return False
     finally:
         conn.close()
Esempio n. 5
0
    def add_new_columns(self):
        """
        Adds columns present in df but not in the SQL table
        for given instance of PandasSpecialEngine.

        Notes
        -----
        Sadly, it seems that we cannot create JSON columns.
        """
        # create deepcopies of the column because we are going to unbound
        # them for the table model (otherwise alembic would think we add
        # a column that already exists in the database)
        cols_to_add = [deepcopy(col) for col in self.table.columns
                       if col.name not in self.get_db_columns_names()]
        # check columns are not index levels
        if any((c.name in self.df.index.names for c in cols_to_add)):
            raise ValueError(('Cannot add any column that is part of the df index!\n'
                              "You'll have to update your table primary key or change your "
                              "df index"))
        
        with self.engine.connect() as con:
            ctx = MigrationContext.configure(con)
            op = Operations(ctx)
            for col in cols_to_add:
                col.table = None # Important! unbound column from table
                op.add_column(self.table.name, col, schema=self.schema)
                log(f"Added column {col} (type: {col.type}) in table {self.table.name} "
                    f'(schema="{self.schema}")')
Esempio n. 6
0
    def setUp(self):
        self.conn = config.db.connect()
        self.metadata = MetaData()
        t1 = Table('foo',
                   self.metadata,
                   Column('id', Integer, primary_key=True),
                   Column('data', String(50)),
                   Column('x', Integer),
                   mysql_engine='InnoDB')
        t1.create(self.conn)

        self.conn.execute(t1.insert(), [{
            "id": 1,
            "data": "d1",
            "x": 5
        }, {
            "id": 2,
            "data": "22",
            "x": 6
        }, {
            "id": 3,
            "data": "8.5",
            "x": 7
        }, {
            "id": 4,
            "data": "9.46",
            "x": 8
        }, {
            "id": 5,
            "data": "d5",
            "x": 9
        }])
        context = MigrationContext.configure(self.conn)
        self.op = Operations(context)
Esempio n. 7
0
def alembic_tests():

    print(f'Alembic tests')
    conn = engine.connect()
    ctx = MigrationContext.configure(conn)
    op = Operations(ctx)

    try:
        op.drop_table('waste')
    except:
        pass

    t = op.create_table(
        'waste',
        Column('bools', sa.Boolean),
        Column('ubytes', sa.Tinyint),
        Column('shorts', sa.SmallInteger),
        Column('ints', sa.Integer),
        Column('bigints', sa.BigInteger),
        Column('floats', sa.REAL),
        Column('doubles', sa.Float),
        Column('dates', sa.Date),
        Column('datetimes', sa.DateTime),
        Column('varchars', sa.String(10)),
        Column('nvarchars', sa.UnicodeText),
        Column('numerics', sa.Numeric(38, 10)),
    )

    data = [{
        'bools': True,
        'ubytes': 5,
        'shorts': 55,
        'ints': 555,
        'bigints': 5555,
        'floats': 5.0,
        'doubles': 5.5555555,
        'dates': date(2012, 11, 23),
        'datetimes': datetime(2012, 11, 23, 16, 34, 56),
        'varchars': 'bla',
        'nvarchars': 'bla2',
        'numerics': Decimal("1.1")
    }, {
        'bools': False,
        'ubytes': 6,
        'shorts': 66,
        'ints': 666,
        'bigints': 6666,
        'floats': 6.0,
        'doubles': 6.6666666,
        'dates': date(2012, 11, 24),
        'datetimes': datetime(2012, 11, 24, 16, 34, 57),
        'varchars': 'bla',
        'nvarchars': 'bla2',
        'numerics': Decimal("-1.1")
    }]

    op.bulk_insert(t, data)

    res = engine.execute('select * from waste').fetchall()
    assert (res == [tuple(dikt.values()) for dikt in data])
def test_alembic_nextbigid_ops(engine):
    # Test the migration operations work
    with engine.connect() as conn:
        context = MigrationContext.configure(conn)
        op = Operations(context)
        op.create_nextbigid_function()
        op.drop_nextbigid_function()
Esempio n. 9
0
def upgrade(plugin_name=None, revision=None, lower="current"):
    database_url = current_app.config.get("SQLALCHEMY_DATABASE_URI")
    if database_url.startswith("sqlite"):
        current_app.db.create_all()
        return

    if plugin_name is None:
        # Get the directory name of the plugin if unspecified
        # Doing it this way doesn't waste the rest of the inspect.stack call
        frame = inspect.currentframe()
        caller_info = inspect.getframeinfo(frame.f_back)
        caller_path = caller_info[0]
        plugin_name = os.path.basename(os.path.dirname(caller_path))

    # Check if the plugin has migraitons
    migrations_path = os.path.join(current_app.plugins_dir, plugin_name,
                                   "migrations")
    if os.path.isdir(migrations_path) is False:
        return

    engine = create_engine(database_url, poolclass=pool.NullPool)
    conn = engine.connect()
    context = MigrationContext.configure(conn)
    op = Operations(context)

    # Find the list of migrations to run
    config = Config()
    config.set_main_option("script_location", migrations_path)
    config.set_main_option("version_locations", migrations_path)
    script = ScriptDirectory.from_config(config)

    # Choose base revision for plugin upgrade
    # "current" points to the current plugin version stored in config
    # None represents the absolute base layer (e.g. first installation)
    if lower == "current":
        lower = get_config(plugin_name + "_alembic_version")

    # Do we upgrade to head or to a specific revision
    if revision is None:
        upper = script.get_current_head()
    else:
        upper = revision

    # Apply from lower to upper
    revs = list(script.iterate_revisions(lower=lower, upper=upper))
    revs.reverse()

    try:
        for r in revs:
            with context.begin_transaction():
                r.module.upgrade(op=op)
            # Set revision that succeeded so we don't need
            # to start from the beginning on failure
            set_config(plugin_name + "_alembic_version", r.revision)
    finally:
        conn.close()

    # Set the new latest revision
    set_config(plugin_name + "_alembic_version", upper)
Esempio n. 10
0
    def adapt_dtype_of_empty_db_columns(self):
        """
        Changes the data types of empty columns in the SQL table defined
        in given instance of a PandasSpecialEngine.

        This should only happen in case of data type mismatches.
        This means with columns for which the sqlalchemy table
        model for df and the model for the SQL table have different data types.
        """
        empty_db_columns = self.get_empty_columns()
        db_table = self.get_db_table_schema()
        # if column does not have value in db and there are values
        # in the frame then change the column type if needed
        for col in empty_db_columns:
            # check if the column also exists in df
            if col.name not in self.df.columns:
                continue
            # check same type
            orig_type = db_table.columns[col.name].type.compile(self.engine.dialect)
            dest_type = self.table.columns[col.name].type.compile(self.engine.dialect)
            # remove character count e.g. "VARCHAR(50)" -> "VARCHAR" 
            orig_type = RE_CHARCOUNT_COL_TYPE.sub('', orig_type)
            dest_type = RE_CHARCOUNT_COL_TYPE.sub('', dest_type)
            # if same type or we want to insert TEXT instead of JSON continue
            # (JSON is not supported on some DBs so it's normal to have TEXT instead)
            if ((orig_type == dest_type) or
                ((orig_type == 'JSON') and (dest_type == 'TEXT'))):
                continue
            # grab the col/index from the df
            # so we can check if there are any values
            if col.name in self.df.index.names:
                df_col = self.df.index.get_level_values(col.name)
            else:
                df_col = self.df[col.name]
            if df_col.notna().any():
                # raise error if we have to modify the dtype but we have a SQlite engine
                # (SQLite does not support data type alteration)
                if self._db_type == 'sqlite':
                    raise ValueError('SQlite does not support column data type alteration!')
                with self.engine.connect() as con:
                    ctx = MigrationContext.configure(con)
                    op = Operations(ctx)
                    new_col = self.table.columns[col.name]
                    # check if postgres (in which case we have to use "using" syntax
                    # to alter columns data types)
                    if self._db_type == 'postgres':
                        escaped_col = str(new_col.compile(dialect=self.engine.dialect))
                        compiled_type = new_col.type.compile(dialect=self.engine.dialect)
                        alter_kwargs = {'postgresql_using':f'{escaped_col}::{compiled_type}'}
                    else:
                        alter_kwargs = {}
                    op.alter_column(table_name=self.table.name,
                                    column_name=new_col.name,
                                    type_=new_col.type,
                                    schema=self.schema,
                                    **alter_kwargs)
                    log(f"Changed type of column {new_col.name} "
                        f"from {col.type} to {new_col.type} "
                        f'in table {self.table.name} (schema="{self.schema}")')
Esempio n. 11
0
 def _make_alembic_op(self, conn):
     '''
     Return an instance Operations which is used to
     generate DDL statement
     '''
     migration_ctx = MigrationContext.configure(conn)
     op = Operations(migration_ctx)
     return op
Esempio n. 12
0
File: db.py Progetto: jkunle/paul
def get_upgrade_op(session):
    """
    Create a migration context and an operations object for performing upgrades.

    :param session: The SQLAlchemy session object.
    """
    context = MigrationContext.configure(session.bind.connect())
    return Operations(context)
Esempio n. 13
0
def test_standalone_op():
    eng, buf = capture_db()

    env = MigrationContext.configure(eng)
    op = Operations(env)

    op.alter_column("t", "c", nullable=True)
    eq_(buf, ['ALTER TABLE t ALTER COLUMN c DROP NOT NULL'])
Esempio n. 14
0
    def alter_add_foreign_key_to_backtest_summary(self):
        ctx = MigrationContext.configure(self.db_client.connector)
        op = Operations(ctx)

        with op.batch_alter_table(
                self.backtest_management_table_name) as batch_op:
            batch_op.create_foreign_key("fk_management_summary",
                                        "backtest_summary",
                                        ["backtest_summary_id"], ["id"])
Esempio n. 15
0
def upgrade():
    dburl = "mysql+mysqlconnector://user:password@localhost/dbname?charset=utf8"
    syncinst = SyncDB(Base, dburl)
    engine = syncinst.getengine()
    conn = engine.connect()
    ctx = MigrationContext.configure(conn)
    op = Operations(ctx)
    op.add_column('nt_plfbgcsjb',
                  Column('FILECOUNT', TINYINT, nullable=True, default=0))
Esempio n. 16
0
    def _fixture(self):
        migration_context = mock.Mock(opts={})
        op = Operations(migration_context)
        batch = op.batch_alter_table('tname', recreate='never').__enter__()

        with mock.patch("alembic.operations.sa_schema") as mock_schema:
            yield batch
        batch.impl.flush()
        self.mock_schema = mock_schema
Esempio n. 17
0
def init_online(app: Flask):
    db: SQLAlchemy = app.extensions['sqlalchemy'].db
    conn = db.engine.connect()
    ctx = MigrationContext.configure(conn)
    op = Operations(ctx)
    try:
        op.drop_table('seeds')
    except:
        pass
    op.create_table('seeds', sa.Column('file', sa.String(255), primary_key=True))
Esempio n. 18
0
File: sanity.py Progetto: kozec/cum
 def fix(self):
     context = MigrationContext.configure(self.parent.engine.connect())
     op = Operations(context)
     for table in self.parent.base.metadata.sorted_tables:
         if table.name == self.table:
             for column in table.columns:
                 if column.name == self.name:
                     with op.batch_alter_table(table.name) as batch_op:
                         batch_op.add_column(column.copy())
                     return
Esempio n. 19
0
def update_ego_grid_ehv_substation(**kwargs):

    postgres_hook = PostgresHook(postgres_conn_id='postgres_oedb')
    engine = postgres_hook.get_sqlalchemy_engine()
    conn = engine.connect()

    #alembic
    ctx = MigrationContext.configure(conn)
    op = Operations(ctx)

    #add column
    op.add_column('ego_grid_ehv_substation',
                  Column('otg_id', BigInteger),
                  schema='model_draft')

    # load the tables
    meta = MetaData()
    conn.execute("SET search_path TO model_draft, grid, public")

    ego_grid_hvmv_substation = Table('ego_grid_hvmv_substation',
                                     meta,
                                     autoload=True,
                                     autoload_with=conn,
                                     postgresql_ignore_search_path=True)
    ego_grid_ehv_substation = Table('ego_grid_ehv_substation',
                                    meta,
                                    autoload=True,
                                    autoload_with=conn,
                                    postgresql_ignore_search_path=True)
    otg_ehvhv_bus_data = Table('otg_ehvhv_bus_data',
                               meta,
                               autoload=True,
                               autoload_with=conn,
                               postgresql_ignore_search_path=True)

    #operations with "ego_grid_ehv_substation" table

    #update
    ehv_substation_update = ego_grid_ehv_substation.update().values(
        otg_id=otg_ehvhv_bus_data.c.bus_i).where(
            and_(
                otg_ehvhv_bus_data.c.base_kv > 110,
                otg_ehvhv_bus_data.c.osm_substation_id == cast(
                    func.trim(ego_grid_ehv_substation.c.osm_id, 'nwr'),
                    BigInteger)))

    #delete
    ehv_substation_delete = ego_grid_ehv_substation.delete().where(
        ego_grid_ehv_substation.c.otg_id.is_(None))

    #execution
    conn.execute(ehv_substation_update)
    conn.execute(ehv_substation_delete)
Esempio n. 20
0
def change_column_nullable(table_oid, column_index, nullable, engine):
    table = tables.reflect_table_from_oid(table_oid, engine)
    column = table.columns[column_index]
    with engine.begin() as conn:
        ctx = MigrationContext.configure(conn)
        op = Operations(ctx)
        op.alter_column(table.name,
                        column.name,
                        nullable=nullable,
                        schema=table.schema)
    return tables.reflect_table_from_oid(table_oid,
                                         engine).columns[column_index]
Esempio n. 21
0
 def drop_fk(self, fk_constraints: str):
     try:
         conn = self.engine.connect()
         ctx = MigrationContext.configure(conn)
         op = Operations(ctx)
         # [('todo', 'todo_ibfk_1')]
         for fk in fk_constraints:
             op.drop_constraint(fk[1], fk[0], type_="foreignkey")
     except Exception as err:
         logger.error("fk_drop [error] -> %s" % err)
     finally:
         conn.close()
Esempio n. 22
0
    def update(self):
        code = self.migration_code
        migrate = _get_migrate_function(code)
        schema_name = self.context.content_metadata().schema

        with self.engine.connect() as conn:
            migration_context = MigrationContext.configure(
                conn, opts={"include_schemas": False})
            op = Operations(migration_context)
            if not self.engine.dialect.has_schema(self.engine, schema_name):
                conn.execute(CreateSchema(schema_name))
            migrate(op)
Esempio n. 23
0
def drop_column(
    engine,
    table_oid,
    column_index,
):
    column_index = int(column_index)
    table = tables.reflect_table_from_oid(table_oid, engine)
    column = table.columns[column_index]
    with engine.begin() as conn:
        ctx = MigrationContext.configure(conn)
        op = Operations(ctx)
        op.drop_column(table.name, column.name, schema=table.schema)
Esempio n. 24
0
def add_columns(engine, raw_diffs, table_names):
    with engine.begin() as conn:
        ctx = get_migration_context(conn, table_names)
        op = Operations(ctx)
        columns = _get_columns_to_add(raw_diffs, table_names)

        for col in columns:
            table_name = col.table.name
            # the column has a reference to a table definition that already
            # has the column defined, so remove that and add the column
            col.table = None
            op.add_column(table_name, col)
Esempio n. 25
0
    def _fixture(self):
        self.metadata = MetaData()
        self.table = Table(
            'foo', self.metadata,
            Column('id', Integer, primary_key=True),
            Column('data', String(50)),
            Column('x', Integer),
        )

        context = op_fixture(dialect="sqlite", as_sql=True)
        self.op = Operations(context)
        return context
Esempio n. 26
0
    def _fixture(self, schema=None):
        migration_context = mock.Mock(
            opts={}, impl=mock.MagicMock(__dialect__='sqlite'))
        op = Operations(migration_context)
        batch = op.batch_alter_table('tname', recreate='never',
                                     schema=schema).__enter__()

        mock_schema = mock.MagicMock()
        with mock.patch("alembic.operations.schemaobj.sa_schema", mock_schema):
            yield batch
        batch.impl.flush()
        self.mock_schema = mock_schema
Esempio n. 27
0
    def update_column(self, table_name, column_name, col_type, **options):
        """ Updated existing table column with new column """
        try:
            conn = self.engine.connect()
            ctx = MigrationContext.configure(conn)
            op = Operations(ctx)

            # op.alter_column(table_name, column_name,type_=col_type,postgresql_using=f"{column_name}::{col_type}") #FIXME not working
        except Exception as err:
            logger.error("update_column [error] -> %s" % err)
        finally:
            conn.close()
Esempio n. 28
0
    def _alter_column_name(self, base, old_name, new_name):
        # NOTE: Create alembic connection and operation object! By John Doe

        db_conn = config.ENGINE.connect()
        alembic_ctx = MigrationContext.configure(db_conn)
        alembic_op = Operations(alembic_ctx)
        doc_table = get_doc_table(base.metadata.name, config.METADATA,
                                  **base.relational_fields)
        alembic_op.alter_column(doc_table.name,
                                old_name,
                                new_column_name=new_name)
        db_conn.close()
Esempio n. 29
0
def get_db_tables(conn):
    """Get current and default table values from the db.

    :param engine: Initialized alembic engine object.
    :type engine: object
    :returns: tuple
    """
    query = text("SELECT TABLE_NAME from information_schema.tables\
                  WHERE TABLE_NAME\
                  LIKE '%alembic_version%'\
                  AND table_schema = 'refstack'")
    context = alembic_migration.MigrationContext.configure(conn)
    op = Operations(context)
    connection = op.get_bind()
    search = connection.execute(query)
    result = search.fetchall()
    if isinstance(result, Iterable):
        result = [table[0] for table in result]
    else:
        result = None
    # if there is more than one version table, modify the
    # one that does not have the default name, because subunit2sql uses the
    # default name.
    if result:
        current_name =\
            next((table for table in result if table != "alembic_version"),
                 result[0])
        current_name = current_name.decode('utf-8')
        current_version = get_table_version(conn, current_name)
        default_name =\
            next((table for table in result
                  if table == "alembic_version"), None)
        default_version = get_table_version(conn, default_name)
        if len(result) > 1 and not current_version:
            if not default_name:
                # this is the case where there is more than one
                # nonstandard-named alembic table, and no default
                current_name = next(
                    (table for table in result if table != current_name),
                    result[0])
                current_name = current_name.decode('utf-8')
            elif current_name:
                # this is the case where the current-named table
                # exists, but is empty
                current_name = default_name
                current_version = default_version
        current_table = (current_name, current_version)
        default_table = (default_name, default_version)
    else:
        default_table = (None, None)
        current_table = default_table
    return current_table, default_table
Esempio n. 30
0
File: sanity.py Progetto: kozec/cum
 def fix(self):
     """Uses Alembic batch operations to alter the column datatype in the table.
     """
     context = MigrationContext.configure(self.parent.engine.connect())
     op = Operations(context)
     for table in self.parent.base.metadata.sorted_tables:
         if table.name == self.table:
             for column in table.columns:
                 if column.name == self.column['name']:
                     with op.batch_alter_table(table.name) as batch_op:
                         batch_op.alter_column(column.name,
                                               type_=column.type)
                     return