def upgrade(migrate_engine): """ Upgrade operations go here. Don't create your own engine; bind migrate_engine to your metadata """ _reset_base(migrate_engine) from rhodecode.lib.dbmigrate.schema import db_3_7_0_0 init_model_encryption(db_3_7_0_0) context = MigrationContext.configure(migrate_engine.connect()) op = Operations(context) op.create_table( 'external_identities', sa.Column('provider_name', sa.Unicode(255), primary_key=True), sa.Column('local_user_id', sa.Integer(), sa.ForeignKey('users.user_id'), primary_key=True), sa.Column('external_id', sa.Unicode(255), primary_key=True), sa.Column('external_username', sa.Unicode(1024), default=u''), sa.Column('access_token', sa.String(1024), default=u''), sa.Column('alt_token', sa.String(1024), default=u''), sa.Column('token_secret', sa.String(1024), default=u'')) op.create_index('local_user_id_idx', 'external_identities', ['local_user_id']) op.create_index('external_id_idx', 'external_identities', ['external_id'])
def init_online(app: Flask): db: SQLAlchemy = app.extensions['sqlalchemy'].db conn = db.engine.connect() ctx = MigrationContext.configure(conn) op = Operations(ctx) try: op.drop_table('seeds') except: pass op.create_table('seeds', sa.Column('file', sa.String(255), primary_key=True))
def alembic_tests(): print(f'Alembic tests') conn = engine.connect() ctx = MigrationContext.configure(conn) op = Operations(ctx) try: op.drop_table('waste') except: pass t = op.create_table( 'waste', Column('bools', sa.Boolean), Column('ubytes', sa.Tinyint), Column('shorts', sa.SmallInteger), Column('ints', sa.Integer), Column('bigints', sa.BigInteger), Column('floats', sa.REAL), Column('doubles', sa.Float), Column('dates', sa.Date), Column('datetimes', sa.DateTime), Column('varchars', sa.String(10)), Column('nvarchars', sa.UnicodeText), Column('numerics', sa.Numeric(38, 10)), ) data = [{ 'bools': True, 'ubytes': 5, 'shorts': 55, 'ints': 555, 'bigints': 5555, 'floats': 5.0, 'doubles': 5.5555555, 'dates': date(2012, 11, 23), 'datetimes': datetime(2012, 11, 23, 16, 34, 56), 'varchars': 'bla', 'nvarchars': 'bla2', 'numerics': Decimal("1.1") }, { 'bools': False, 'ubytes': 6, 'shorts': 66, 'ints': 666, 'bigints': 6666, 'floats': 6.0, 'doubles': 6.6666666, 'dates': date(2012, 11, 24), 'datetimes': datetime(2012, 11, 24, 16, 34, 57), 'varchars': 'bla', 'nvarchars': 'bla2', 'numerics': Decimal("-1.1") }] op.bulk_insert(t, data) res = engine.execute('select * from waste').fetchall() assert (res == [tuple(dikt.values()) for dikt in data])
def create_missing_database_entities(Model, engine): m = Model.metadata current_info = get_current_database_info(engine) print(current_info) conn = engine.connect() ctx = MigrationContext.configure(conn) op = Operations(ctx) print "metadata", m for table_name in m.tables: table = m.tables[table_name] if current_info.has_key(table_name): for col in table.columns: print "col", col if not col.name in current_info[table_name]: print " IN TABLE: %s CREATING COLUMN: %s"%(table_name, col.name) op.add_column(table_name, mimic_column(col)) print " ... done" else: args = [table_name] + map(mimic_column, list(table.columns)) print "CREATING TABLE: " + repr(args) op.create_table(*args)