def downgrade(): # Was unable to find a way to use op.alter_column() to remove the # unique index property. if op.get_context().bind.dialect.name == 'mssql': op.drop_constraint('uq_queue_members_uniqueid', 'queue_members') op.drop_column('queue_members', 'uniqueid') op.add_column('queue_members', sa.Column(name='uniqueid', type_=sa.String(80), nullable=False))
def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_column('testjob', 'caseorder') op.create_table('test', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('jobName', sa.VARCHAR(length=64), nullable=True), sa.Column('jobType', sa.INTEGER(), nullable=True), sa.Column('relateCases', sa.BLOB(), nullable=True), sa.Column('relateDevices', sa.BLOB(), nullable=True), sa.Column('testapk', sa.VARCHAR(length=512), nullable=True), sa.Column('appPackage', sa.VARCHAR(length=64), nullable=True), sa.Column('appActivity', sa.VARCHAR(length=64), nullable=True), sa.Column('result', sa.INTEGER(), nullable=True), sa.Column('reportID', sa.INTEGER(), nullable=True), sa.Column('status', sa.INTEGER(), nullable=True), sa.Column('createdtime', sa.DATETIME(), nullable=True), sa.PrimaryKeyConstraint('id') ) op.create_table('user', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('email', sa.VARCHAR(length=64), nullable=True), sa.Column('username', sa.VARCHAR(length=64), nullable=True), sa.Column('password', sa.VARCHAR(length=128), nullable=True), sa.Column('createdtime', sa.DATETIME(), nullable=True), sa.PrimaryKeyConstraint('id') )
def downgrade(active_plugins=None, options=None): enums = _define_enums() op.drop_column('tasks', 'status') op.add_column('tasks', sa.Column('status', enums['task_status_old'], nullable=True))
def downgrade(): op.drop_constraint( 'fk_environments_zone_id_zones', 'environments', type_='foreignkey' ) op.drop_column('environments', 'zone_id')
def downgrade(): connection = op.get_bind() # Create old AZ fields op.add_column('services', Column('availability_zone', String(length=255))) op.add_column('share_instances', Column('availability_zone', String(length=255))) # Migrate data az_table = utils.load_table('availability_zones', connection) share_instances_table = utils.load_table('share_instances', connection) services_table = utils.load_table('services', connection) for az in connection.execute(az_table.select()): op.execute( share_instances_table.update().where( share_instances_table.c.availability_zone_id == az.id ).values({'availability_zone': az.name}) ) op.execute( services_table.update().where( services_table.c.availability_zone_id == az.id ).values({'availability_zone': az.name}) ) # Remove AZ_id columns and AZ table op.drop_constraint('service_az_id_fk', 'services', type_='foreignkey') op.drop_column('services', 'availability_zone_id') op.drop_constraint('si_az_id_fk', 'share_instances', type_='foreignkey') op.drop_column('share_instances', 'availability_zone_id') op.drop_table('availability_zones')
def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.alter_column('statistic_visitor', 'referred', existing_type=mysql.VARCHAR(collation=u'utf8_unicode_ci', length=128), nullable=False, existing_server_default=sa.text(u"''")) op.drop_column('machine_statistic', 'version')
def upgrade(): conn = op.get_bind() op.add_column('external_identities', sa.Column('local_user_id', sa.Integer())) external_identities_t = table('external_identities', sa.Column('local_user_name', sa.Unicode(50)), sa.Column('local_user_id', sa.Integer)) users_t = table('users', sa.Column('user_name', sa.Unicode(50)), sa.Column('id', sa.Integer)) stmt = external_identities_t.update().values(local_user_id=users_t.c.id). \ where(users_t.c.user_name == external_identities_t.c.local_user_name) conn.execute(stmt) op.drop_constraint('pk_external_identities', 'external_identities', type='primary') op.drop_constraint('fk_external_identities_local_user_name_users', 'external_identities', type='foreignkey') op.drop_column('external_identities', 'local_user_name') op.create_primary_key('pk_external_identities', 'external_identities', cols=['external_id', 'local_user_id', 'provider_name']) op.create_foreign_key(None, 'external_identities', 'users', remote_cols=['id'], local_cols=['local_user_id'], onupdate='CASCADE', ondelete='CASCADE')
def upgrade(pyramid_env): # Do stuff with the app's models here. from assembl import models as m db = m.get_session_maker()() with transaction.manager: # take the first sysadmin as creator sysadmin_role = db.query(m.Role).filter(m.Role.name == R_SYSADMIN).first() creator_id = m.User.default_db.query(m.User).join( m.User.roles).filter(m.Role.id == sysadmin_role.id)[0:1][0].id columns_headers = dict(list(db.execute( "SELECT id, header_id FROM idea_message_column"))) columns = db.query(m.IdeaMessageColumn).all() for column in columns: synthesis = column.get_column_synthesis() header_id = columns_headers.get(column.id, None) if header_id is not None and synthesis is None: name_en = column.name.closest_entry('en') or column.name.first_original() name_fr = column.name.closest_entry('fr') or column.name.first_original() subject_ls = m.LangString.create(u"Synthesis: {}".format(name_en.value), 'en') subject_ls.add_value(u"Synthèse : {}".format(name_fr.value), 'fr') body_ls = m.LangString.get(header_id) # don't clone, reuse the same langstring column.create_column_synthesis( subject=subject_ls, body=body_ls, creator_id=creator_id) with context.begin_transaction(): op.drop_column('idea_message_column', 'header_id')
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### # print("Getting bind...") conn = op.get_bind() # print("Adding flags column back to challenges table") op.add_column('challenges', sa.Column('flags', sa.TEXT(), nullable=True)) # print("Dropping type column from challenges table") op.drop_column('challenges', 'type') # print("Executing: SELECT id, flags from challenges") res = conn.execute("SELECT id, flags from challenges") results = res.fetchall() # print("There are {} results".format(len(results))) for chal_id in results: new_keys = Keys.query.filter_by(chal=chal_id[0]).all() old_flags = [] for new_key in new_keys: flag_dict = {'flag': new_key.flag, 'type': new_key.key_type} old_flags.append(flag_dict) old_flags =json.dumps(old_flags) # print("Updating challenge {} to insert {}".format(chal_id[0], flag_dict)) conn.execute(text('UPDATE challenges SET flags=:flags WHERE id=:id'), id=chal_id[0], flags=old_flags)
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.alter_column('investments', 'value', type_=mysql.FLOAT(display_width=11), ) op.create_table('industries', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=50), nullable=False), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_industries_name'), 'industries', ['name'], unique=True) op.create_table('involvements', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=50), nullable=False), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_involvements_name'), 'involvements', ['name'], unique=True) op.create_table('value_units', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=50), nullable=False), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_value_units_name'), 'value_units', ['name'], unique=True) op.add_column(u'investments', sa.Column('fdi_notes', sa.String(length=1024), nullable=True)) op.add_column(u'investments', sa.Column('industry_id', sa.Integer(), nullable=True)) op.add_column(u'investments', sa.Column('involvement_id', sa.Integer(), nullable=True)) op.add_column(u'investments', sa.Column('value_unit_id', sa.Integer(), nullable=True)) op.create_index(op.f('ix_investments_industry_id'), 'investments', ['industry_id'], unique=False) op.create_index(op.f('ix_investments_involvement_id'), 'investments', ['involvement_id'], unique=False) op.create_index(op.f('ix_investments_value_unit_id'), 'investments', ['value_unit_id'], unique=False) op.create_foreign_key(None, 'investments', 'industries', ['industry_id'], ['id']) op.create_foreign_key(None, 'investments', 'involvements', ['involvement_id'], ['id']) op.create_foreign_key(None, 'investments', 'value_units', ['value_unit_id'], ['id']) op.drop_column(u'investments', 'government')
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table('associate_disruption_pt_object', sa.Column('disruption_id', postgresql.UUID(), nullable=False), sa.Column('pt_object_id', postgresql.UUID(), nullable=False), sa.ForeignKeyConstraint(['disruption_id'], ['disruption.id'], ), sa.ForeignKeyConstraint(['pt_object_id'], ['pt_object.id'], ), sa.PrimaryKeyConstraint('disruption_id', 'pt_object_id', name='disruption_pt_object_pk') ) connection = op.get_bind() result = connection.execute('select pt.id as pt_id, dd.id as dis_id, dd.created_at as created_at,' ' dd.localization_id as loc_id ' 'from disruption as dd left OUTER join pt_object as pt ' 'on pt.uri = dd.localization_id') for row in result: # Pt_object exist in database if row['pt_id']: pt_object_id = row['pt_id'] else: # Pt_object not exist in database op.execute("INSERT INTO pt_object (created_at, id, type, uri) VALUES ('{}', '{}', '{}', '{}')". format(row['created_at'], row['dis_id'], 'stop_area', row['loc_id'])) pt_object_id = row['dis_id'] op.execute("INSERT INTO associate_disruption_pt_object (disruption_id, pt_object_id) VALUES ('{}', '{}')". format(row['dis_id'], pt_object_id)) op.drop_column(u'disruption', 'localization_id')
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column(u'messages_drawing_move', sa.Column('special_message_finish_drawing_id', sa.String(length=2000), nullable=True)) op.add_column(u'messages_drawing_move', sa.Column('special_message_start_drawing_id', sa.String(length=2000), nullable=True)) op.drop_column(u'messages_drawing_move', 'special_message_id') op.create_index(op.f('ix_messages_drawing_move_special_message_finish_drawing_id'), 'messages_drawing_move', ['special_message_finish_drawing_id'], unique=False) op.create_index(op.f('ix_messages_drawing_move_special_message_start_drawing_id'), 'messages_drawing_move', ['special_message_start_drawing_id'], unique=False)
def downgrade(): for query in ("alter table company DROP FOREIGN KEY company_ibfk_1", "alter table company DROP FOREIGN KEY company_ibfk_2",): op.execute(query) op.drop_column('company', 'logo_id') op.drop_column('company', 'header_id') op.drop_table('config_files')
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_table('privilege') op.add_column('engagement_order', sa.Column('uuid', sa.String(length=2), nullable=False)) op.drop_column('engagement_order', 'id') op.create_unique_constraint(None, 'job_label', ['name']) op.create_unique_constraint(None, 'personal_label', ['name'])
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_constraint(u'notes_section_id_fkey', 'notes', type_='foreignkey') op.drop_column('notes', 'section_id') op.drop_table('sections') op.add_column('notes', sa.Column('notebook_id', sa.Integer(), nullable=True)) op.create_foreign_key(None, 'notes', 'notebooks', ['notebook_id'], ['id'])
def upgrade(): resource_type = op.create_table( 'resource_type', sa.Column('name', sa.String(length=255), nullable=False), sa.PrimaryKeyConstraint('name'), mysql_charset='utf8', mysql_engine='InnoDB' ) resource = sa.Table('resource', sa.MetaData(), type_string_col("type", "resource")) op.execute(resource_type.insert().from_select( ['name'], sa.select([resource.c.type]).distinct())) for table in ["resource", "resource_history"]: op.alter_column(table, "type", new_column_name="old_type", existing_type=type_enum) op.add_column(table, type_string_col("type", table)) sa_table = sa.Table(table, sa.MetaData(), type_string_col("type", table), type_enum_col('old_type')) op.execute(sa_table.update().values( {sa_table.c.type: sa_table.c.old_type})) op.drop_column(table, "old_type") op.alter_column(table, "type", nullable=False, existing_type=type_string)
def downgrade(): """Downgrade database schema and/or data back to the previous revision.""" op.drop_constraint("fk_assessments_audits", "assessments", type_="foreignkey") op.drop_constraint("fk_issues_audits", "issues", type_="foreignkey") op.drop_column("assessments", "audit_id") op.drop_column("issues", "audit_id")
def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_constraint(None, 'relationships', type_='foreignkey') op.drop_constraint(None, 'relationships', type_='foreignkey') op.drop_constraint('unique_idx_user_id_followed_by_id', 'relationships', type_='unique') op.drop_column('relationships', 'user_id') op.drop_column('relationships', 'followed_by_id')
def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_column(u'menu', 'price') op.drop_table('store_comment') op.drop_table('menu_store_relation') op.drop_table('board_category') op.drop_table('store')
def upgrade(): for t_name in TABLE_NAMES: try: op.drop_column(t_name, COLUMN_NAME) except Exception: LOG.error(_LE("Column '%s' could not be dropped"), COLUMN_NAME) raise
def downgrade(): op.execute(""" ALTER TABLE tg_group MODIFY group_name VARCHAR(16) DEFAULT NULL, DROP COLUMN ldap, DROP COLUMN root_password """) op.drop_column('user_group', 'is_owner') op.execute(""" ALTER TABLE job DROP FOREIGN KEY job_group_id_fk, DROP COLUMN group_id, DROP INDEX status, DROP INDEX result """) op.execute(""" ALTER TABLE recipe_set DROP INDEX status, DROP INDEX result, DROP INDEX priority """) op.execute(""" ALTER TABLE recipe DROP INDEX status, DROP INDEX result """)
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table('permissions', sa.Column('id', sa.Integer(), nullable=False), sa.Column('role', sa.String(length=20), nullable=True), sa.Column('module', sa.String(length=3), nullable=True), sa.Column('permission', sa.String(length=80), nullable=False), sa.Column('read', sa.Boolean(), nullable=False), sa.Column('write', sa.Boolean(), nullable=False), sa.Column('update', sa.Boolean(), nullable=False), sa.Column('delete', sa.Boolean(), nullable=False), sa.PrimaryKeyConstraint('id') ) op.create_table('roles', sa.Column('role', sa.String(length=20), nullable=False), sa.Column('module_abbreviation', sa.String(length=3), nullable=False), sa.PrimaryKeyConstraint('role', 'module_abbreviation') ) op.drop_table('admin_roles') op.drop_table('group_roles') op.alter_column('modules', 'abbreviation', existing_type=sa.VARCHAR(length=20), nullable=False) op.alter_column('modules', 'active', existing_type=sa.BOOLEAN(), nullable=False) op.alter_column('modules', 'description', existing_type=sa.VARCHAR(length=200), nullable=False) op.drop_column('modules', 'id')
def upgrade(active_plugins=None, options=None): op.drop_column('stories', 'priority') # Need to explicitly delete enums during migrations for Postgres enums = _define_enums() for enum in enums.values(): enum.drop(op.get_bind())
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table('radio_ttsaudioformat', sa.Column('id', sa.Integer(), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('name', sa.String(length=30), nullable=False), sa.PrimaryKeyConstraint('id') ) op.create_table('radio_ttssamplerate', sa.Column('id', sa.Integer(), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('value', sa.Integer(), nullable=False), sa.PrimaryKeyConstraint('id') ) op.create_table('radio_ttsvoice', sa.Column('id', sa.Integer(), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('name', sa.String(length=30), nullable=False), sa.Column('language_id', sa.Integer(), nullable=True), sa.Column('gender_code', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['language_id'], ['radio_language.id'], ), sa.PrimaryKeyConstraint('id') ) op.add_column(u'radio_station', sa.Column('tts_audioformat_id', sa.Integer(), nullable=True)) op.add_column(u'radio_station', sa.Column('tts_samplerate_id', sa.Integer(), nullable=True)) op.add_column(u'radio_station', sa.Column('tts_voice_id', sa.Integer(), nullable=True)) op.drop_column(u'radio_station', 'tts_sample_rate') op.drop_column(u'radio_station', 'tts_gender') op.drop_column(u'radio_station', 'tts_audio_format') op.drop_column(u'radio_station', 'tts_accent') op.drop_column(u'radio_station', 'tts_language_id')
def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.alter_column('user', 'mobile', existing_type=mysql.VARCHAR(length=11), nullable=False, existing_server_default=sa.text(u"''")) op.drop_column('order', 'is_quick')
def downgrade(): engine = settings.engine if engine.dialect.has_table(engine, 'task_instance'): connection = op.get_bind() sessionmaker = sa.orm.sessionmaker() session = sessionmaker(bind=connection) dagbag = DagBag(settings.DAGS_FOLDER) query = session.query(sa.func.count(TaskInstance.max_tries)).filter( TaskInstance.max_tries != -1 ) while query.scalar(): tis = session.query(TaskInstance).filter( TaskInstance.max_tries != -1 ).limit(BATCH_SIZE).all() for ti in tis: dag = dagbag.get_dag(ti.dag_id) if not dag or not dag.has_task(ti.task_id): ti.try_number = 0 else: task = dag.get_task(ti.task_id) # max_tries - try_number is number of times a task instance # left to retry by itself. So the current try_number should be # max number of self retry (task.retries) minus number of # times left for task instance to try the task. ti.try_number = max(0, task.retries - (ti.max_tries - ti.try_number)) ti.max_tries = -1 session.merge(ti) session.commit() session.commit() op.drop_column('task_instance', 'max_tries')
def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_constraint('releases_uniq', 'releases', type_='unique') op.create_unique_constraint('releases_name_group_id_posted_key', 'releases', ['name', 'group_id', 'posted']) op.drop_column('releases', 'uniqhash') op.drop_constraint('pres_uniq', 'pres', type_='unique') op.create_unique_constraint('pres_name_key', 'pres', ['name'])
def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_index( op.f('ix_installation_structures_release'), table_name='installation_structures' ) op.drop_column('installation_structures', 'release')
def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_column('series', 'analytic_context_key') op.drop_index('ix_homepath_census_block', table_name='homepath_listing') op.drop_index(op.f('ix_craigslist_listing_subdomain'), table_name='craigslist_listing') op.create_table('census_block_series', sa.Column('state_fp', sa.VARCHAR(length=2), autoincrement=False, nullable=False), sa.Column('county_fp', sa.VARCHAR(length=3), autoincrement=False, nullable=False), sa.Column('tract_ce', sa.VARCHAR(length=6), autoincrement=False, nullable=False), sa.Column('block_ce', sa.VARCHAR(length=1), autoincrement=False, nullable=False), sa.Column('series_key', sa.VARCHAR(length=40), autoincrement=False, nullable=False), sa.ForeignKeyConstraint(['series_key'], [u'series.key'], name=u'fk_census_block_series_series_key_series'), sa.ForeignKeyConstraint(['state_fp', 'county_fp', 'tract_ce', 'block_ce'], [u'census_block.state_fp', u'census_block.county_fp', u'census_block.tract_ce', u'census_block.block_ce'], name=u'fk_census_block_series'), sa.PrimaryKeyConstraint('state_fp', 'county_fp', 'tract_ce', 'block_ce', 'series_key', name=u'pk_census_block_series') ) op.create_table('series_segment', sa.Column('series_key', sa.VARCHAR(length=40), autoincrement=False, nullable=False), sa.Column('dimension_id', sa.VARCHAR(length=20), autoincrement=False, nullable=False), sa.Column('segment_id', sa.VARCHAR(length=20), autoincrement=False, nullable=False), sa.ForeignKeyConstraint(['dimension_id', 'segment_id'], [u'segment.dimension_id', u'segment.id'], name=u'fk_series_segment'), sa.ForeignKeyConstraint(['dimension_id'], [u'dimension.id'], name=u'fk_series_segment_dimension_id_dimension'), sa.ForeignKeyConstraint(['series_key'], [u'series.key'], name=u'fk_series_segment_series_key_series'), sa.PrimaryKeyConstraint('series_key', 'dimension_id', 'segment_id', name=u'pk_series_segment') ) op.drop_index('ix_census_block_segment_census_block', table_name='census_block_segment') op.drop_table('census_block_segment') op.drop_index(op.f('ix_analytic_context_segment_analytic_context_key'), table_name='analytic_context_segment') op.drop_index('ix_analytic_context_segment', table_name='analytic_context_segment') op.drop_table('analytic_context_segment') op.drop_table('analytic_context') op.execute(segment.delete(segment.c.dimension_id == 'census_block')) op.alter_column('segment', 'sort_value', type_=sa.Unicode(10))
def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_column('users', 'name') op.drop_column('users', 'member_since') op.drop_column('users', 'location') op.drop_column('users', 'last_seen') op.drop_column('users', 'about_me')
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_column('users', 'salt')
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_column('Artist', 'seeking_talent') op.drop_column('Artist', 'seeking_description')
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_column('venues', 'genres') op.drop_column('artists', 'genres')
def downgrade(): op.drop_column('test_sets', 'exclusive_testsets')
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_constraint(None, 'transactions', type_='foreignkey') op.drop_column('transactions', 'transaction_month_id') op.drop_table('transaction_months')
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_column('users', 'avatar_hash')
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_column('user', 'language')
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column('post', sa.Column('photo', sa.String(length=256), nullable=True)) op.drop_column('post', 'upload')
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_column('user', 'last_seen') op.drop_column('user', 'about_me')
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column('post', sa.Column('upload', sa.VARCHAR(length=256), autoincrement=False, nullable=True)) op.drop_column('post', 'photo')
def downgrade(): op.drop_column('journeys', 'last_pt_id', schema='stat') op.drop_column('journeys', 'last_pt_name', schema='stat') op.drop_column('journeys', 'last_pt_coord', schema='stat') op.drop_column('journeys', 'last_pt_admin_id', schema='stat') op.drop_column('journeys', 'last_pt_admin_name', schema='stat') op.drop_column('journeys', 'last_pt_admin_insee', schema='stat') op.drop_column('journeys', 'first_pt_id', schema='stat') op.drop_column('journeys', 'first_pt_name', schema='stat') op.drop_column('journeys', 'first_pt_coord', schema='stat') op.drop_column('journeys', 'first_pt_admin_id', schema='stat') op.drop_column('journeys', 'first_pt_admin_name', schema='stat') op.drop_column('journeys', 'first_pt_admin_insee', schema='stat') op.drop_column('journey_sections', 'from_admin_insee', schema='stat') op.drop_column('journey_sections', 'to_admin_insee', schema='stat') op.drop_column('journey_request', 'arrival_admin_name', schema='stat') op.drop_column('journey_request', 'departure_admin_name', schema='stat')
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_column('organisation', 'custom_welcome_message_key')
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_constraint(None, 'containers', type_='foreignkey') op.drop_column('containers', 'user_id')
def downgrade(): op.drop_column('ps_endpoints', 'notify_early_inuse_ringing')
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_constraint(None, 'todos', type_='foreignkey') op.drop_column('todos', 'listId') op.drop_table('todolist')
def downgrade(): op.drop_column('agent_login_status', 'paused') op.drop_column('agent_login_status', 'paused_reason')
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_column('machine', 'is_active') op.drop_column('machine', 'CreatedAt') op.drop_column('identifier', 'CreatedAt') op.drop_column('cluster', 'ModifiedAt') op.drop_column('cluster', 'DeletedAt') op.drop_column('cluster', 'CreatedAt')
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_column('contest_user', 'login_name') op.drop_column('contest_user', 'password_hash') op.drop_index('ix_solution_contest_user_id', table_name='solution') op.drop_column('solution', 'contest_user_id')
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_column('TOUR', 'category')
def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_column('bundle', 'owner_id')
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_constraint('travels_ibfk_2', 'travels', type_='foreignkey') op.drop_column('travels', 'user_id')
def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_column('station', 'short_description')
def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_column('users', 'private_favorites')
def downgrade(): op.alter_column('import_tables', 'truncate_hive', existing_type=mysql.TINYINT(display_width=4), comment='Truncate Hive table before loading it. ', existing_comment='<NOT USED>', existing_nullable=False, existing_server_default=sa.text("'1'")) op.drop_column('import_statistics_last', 'copy_schema_stop') op.drop_column('import_statistics_last', 'copy_schema_start') op.drop_column('import_statistics_last', 'copy_schema_duration') op.drop_column('import_statistics_last', 'copy_data_stop') op.drop_column('import_statistics_last', 'copy_data_start') op.drop_column('import_statistics_last', 'copy_data_duration') op.drop_column('import_statistics', 'copy_schema_stop') op.drop_column('import_statistics', 'copy_schema_start') op.drop_column('import_statistics', 'copy_schema_duration') op.drop_column('import_statistics', 'copy_data_stop') op.drop_column('import_statistics', 'copy_data_start') op.drop_column('import_statistics', 'copy_data_duration') op.drop_column('export_statistics', 'update_statistics_duration') op.drop_column('export_statistics', 'update_statistics_start') op.drop_column('export_statistics', 'update_statistics_stop') op.drop_column('export_statistics_last', 'update_statistics_duration') op.drop_column('export_statistics_last', 'update_statistics_start') op.drop_column('export_statistics_last', 'update_statistics_stop')
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_column('study', 'short_title')
def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_column('plans', 'amount')
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_column('image', 'created_date')
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_index(op.f('ix_users_email'), table_name='users') op.drop_column('users', 'password_hash') op.drop_column('users', 'email')
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_column('goal', 'goal_name')
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_index(op.f('ix_product_timestamp'), table_name='product') op.drop_column('product', 'timestamp')