def upgrade(): op.add_column('listener_statistics', sa.Column('amphora_id', sa.String(36), nullable=False) ) op.drop_constraint('fk_listener_statistics_listener_id', 'listener_statistics', type_='foreignkey') op.drop_constraint('PRIMARY', 'listener_statistics', type_='primary') op.create_primary_key('pk_listener_statistics', 'listener_statistics', ['listener_id', 'amphora_id']) op.create_foreign_key('fk_listener_statistics_listener_id', 'listener_statistics', 'listener', ['listener_id'], ['id']) op.create_foreign_key('fk_listener_statistic_amphora_id', 'listener_statistics', 'amphora', ['amphora_id'], ['id'])
def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_constraint(u'industry_index_ibfk_1', 'industry_index', type_='foreignkey') op.drop_constraint('PRIMARY', 'industry_index', type_='primary') op.add_column('industry_index', sa.Column('date', mysql.DATETIME(), nullable=False)) op.create_primary_key("industry_index_pk", "industry_index", ['solarsystem_id', 'activity', 'date']) op.create_foreign_key("industry_index_ibfk_1", 'industry_index', 'solar_system', ['solarsystem_id'], ['id'])
def downgrade(): #temporarily drop fkey constraints so we can drop the primary key op.drop_constraint( 'actions_task_id_fkey', 'actions') op.drop_constraint( 'task_geometries_task_id_fkey', 'task_geometries') op.drop_constraint( 'tasks_id_unique', 'tasks') op.drop_constraint( 'tasks_pkey', 'tasks') # change the challenge slug column to be nullable again op.alter_column('tasks', 'challenge_slug', existing_type=sa.VARCHAR(), nullable=True) op.create_primary_key( 'tasks_pkey', 'tasks', ['id']) # recreate the foreign keys op.create_foreign_key( 'actions_task_id_fkey', 'actions', 'tasks', ['task_id'], ['id']) op.create_foreign_key( 'task_geometries_task_id_fkey', 'task_geometries', 'tasks', ['task_id'], ['id'])
def upgrade(): conn = op.get_bind() op.add_column('external_identities', sa.Column('local_user_id', sa.Integer())) external_identities_t = table('external_identities', sa.Column('local_user_name', sa.Unicode(50)), sa.Column('local_user_id', sa.Integer)) users_t = table('users', sa.Column('user_name', sa.Unicode(50)), sa.Column('id', sa.Integer)) stmt = external_identities_t.update().values(local_user_id=users_t.c.id). \ where(users_t.c.user_name == external_identities_t.c.local_user_name) conn.execute(stmt) op.drop_constraint('pk_external_identities', 'external_identities', type='primary') op.drop_constraint('fk_external_identities_local_user_name_users', 'external_identities', type='foreignkey') op.drop_column('external_identities', 'local_user_name') op.create_primary_key('pk_external_identities', 'external_identities', cols=['external_id', 'local_user_id', 'provider_name']) op.create_foreign_key(None, 'external_identities', 'users', remote_cols=['id'], local_cols=['local_user_id'], onupdate='CASCADE', ondelete='CASCADE')
def downgrade(): context = op.get_context() dialect = context.bind.dialect.name op.drop_constraint( name=PK_NAME, table_name=TABLE_NAME, type_='primary' ) op.add_column( TABLE_NAME, sa.Column('id', sa.String(32)) ) if dialect == 'ibm_db_sa': # DB2 doesn't support nullable column in primary key op.alter_column( table_name=TABLE_NAME, column_name='id', nullable=False ) with migration.remove_fks_from_table(TABLE_NAME): op.create_primary_key( name=PK_NAME, table_name=TABLE_NAME, cols=['id'] )
def upgrade(): # Turn state_id into state op.add_column('bank_transactions', sa.Column('state', sa.Unicode, nullable=False, server_default='pending')) for (state_id, state) in states: op.execute( bank_transactions.update() .where(bank_transactions.c.state_id == state_id) .values({'state': state}) ) op.alter_column('bank_transactions', 'state', server_default=None) op.drop_column('bank_transactions', 'state_id') # Drop old keys and ID column op.drop_constraint('bank_transaction_states_identifier_key', 'bank_transaction_states') op.drop_constraint('bank_transaction_states_pkey', 'bank_transaction_states') op.drop_column('bank_transaction_states', 'id') # Create new keys op.create_primary_key('bank_transaction_states_pkey', 'bank_transaction_states', ['identifier']) op.create_foreign_key('bank_transactions_state_fkey', 'bank_transactions', 'bank_transaction_states', ['state'], ['identifier'])
def upgrade(): ''' Upgrade the database to this revision ''' if context.get_context().dialect.name in ['oracle', 'mysql', 'postgresql']: create_table( 'lifetime_except', sa.Column('id', GUID()), sa.Column('scope', sa.String(25)), sa.Column('name', sa.String(255)), sa.Column('did_type', DIDType.db_type()), sa.Column('account', sa.String(25)), sa.Column('comments', sa.String(4000)), sa.Column('pattern', sa.String(255)), sa.Column('state', LifetimeExceptionsState.db_type()), sa.Column('created_at', sa.DateTime, default=datetime.datetime.utcnow), sa.Column('updated_at', sa.DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow), sa.Column('expires_at', sa.DateTime)) create_primary_key('LIFETIME_EXCEPT_PK', 'lifetime_except', ['id', 'scope', 'name', 'did_type', 'account']) create_check_constraint('LIFETIME_EXCEPT_SCOPE_NN', 'lifetime_except', 'scope is not null') create_check_constraint('LIFETIME_EXCEPT_NAME_NN', 'lifetime_except', 'name is not null') create_check_constraint('LIFETIME_EXCEPT_DID_TYPE_NN', 'lifetime_except', 'did_type is not null')
def upgrade(): schema = context.get_context().config.get_main_option('schema') op.create_primary_key( 'layergroup_treeitem_pkey', 'layergroup_treeitem', ['id'], schema=schema )
def downgrade(): op.drop_constraint('FOIMinistryRequestDocuments_pkey', 'FOIMinistryRequestDocuments', type_='primary') op.create_primary_key("FOIMinistryRequestDocuments_pkey", "FOIMinistryRequestDocuments", ["foiministrydocumentid"])
def table_upgrade(table_name, table_info): columns = table_info.get('columns') keys = table_info.get('keys') seq_column = table_info.get('seq_column') # Delete the old primary key if one was specified if keys: op.drop_constraint('%s_pkey' % table_name, table_name, type_='primary') # Create a sequence and add associated column if specified if seq_column: seq = Sequence('%s_%s_seq' % (table_name, seq_column)) op.execute(CreateSequence(seq)) op.add_column(table_name, sa.Column(seq_column, sa.Integer(), server_default=seq.next_value(), nullable=False)) # Apply the new column settings if specified if columns: for column_name, column_info in columns.items(): op.alter_column(table_name, column_name, existing_type=column_info['type'], nullable=column_info['nullable']['new']) # Create the new primary key if one was specified if keys: op.create_primary_key('%s_pkey' % table_name, table_name, keys['new'])
def downgrade(): op.drop_constraint('board_auto_location_pkey', 'board_auto_location', type_='primary') op.drop_index('ix_board_auto_location_geonameid', 'board_auto_location') op.drop_constraint('board_auto_location_board_id_fkey', 'board_auto_location', type_='foreignkey') op.rename_table('board_auto_location', 'board_location') op.create_primary_key('board_location_pkey', 'board_location', ['board_id', 'geonameid']) op.create_index('ix_board_location_geonameid', 'board_location', ['geonameid']) op.create_foreign_key('board_location_board_id_fkey', 'board_location', 'board', ['board_id'], ['id']) op.drop_constraint('board_auto_domain_pkey', 'board_auto_domain', type_='primary') op.drop_index('ix_board_auto_domain_domain', 'board_auto_domain') op.drop_constraint('board_auto_domain_board_id_fkey', 'board_auto_domain', type_='foreignkey') op.rename_table('board_auto_domain', 'board_domain') op.create_primary_key('board_domain_pkey', 'board_domain', ['board_id', 'domain']) op.create_index('ix_board_domain_domain', 'board_domain', ['domain']) op.create_foreign_key('board_domain_board_id_fkey', 'board_domain', 'board', ['board_id'], ['id'])
def upgrade(): # OpenStack has decided that "down" migrations are not supported. # The downgrade() method has been omitted for this reason. op.add_column('listener_statistics', sa.Column('amphora_id', sa.String(36), nullable=False) ) op.drop_constraint('fk_listener_statistics_listener_id', 'listener_statistics', type_='foreignkey') op.drop_constraint('PRIMARY', 'listener_statistics', type_='primary') op.create_primary_key('pk_listener_statistics', 'listener_statistics', ['listener_id', 'amphora_id']) op.create_foreign_key('fk_listener_statistics_listener_id', 'listener_statistics', 'listener', ['listener_id'], ['id']) op.create_foreign_key('fk_listener_statistic_amphora_id', 'listener_statistics', 'amphora', ['amphora_id'], ['id'])
def upgrade(): ''' upgrade method ''' # Create new bad_pfns table create_table('bad_pfns', sa.Column('path', sa.String(2048)), sa.Column('state', BadPFNStatus.db_type(name='BAD_PFNS_STATE_CHK'), default=BadPFNStatus.SUSPICIOUS), sa.Column('reason', sa.String(255)), sa.Column('account', sa.String(25)), sa.Column('expires_at', sa.DateTime), sa.Column('updated_at', sa.DateTime), sa.Column('created_at', sa.DateTime)) if context.get_context().dialect.name != 'sqlite': create_primary_key('BAD_PFNS_PK', 'bad_pfns', ['path', 'state']) create_foreign_key('BAD_PFNS_ACCOUNT_FK', 'bad_pfns', 'accounts', ['account'], ['account']) # Add new state to bad_replicas table if context.get_context().dialect.name != 'sqlite': drop_constraint('BAD_REPLICAS_STATE_CHK', 'bad_replicas', type_='check') create_check_constraint(name='BAD_REPLICAS_STATE_CHK', source='bad_replicas', condition="state in ('B', 'D', 'L', 'R', 'S', 'T')") # Add new column to bad_replicas table add_column('bad_replicas', sa.Column('expires_at', sa.DateTime())) # Change PK drop_constraint('BAD_REPLICAS_STATE_PK', 'bad_replicas', type_='primary') create_primary_key('BAD_REPLICAS_STATE_PK', 'bad_replicas', ['scope', 'name', 'rse_id', 'state', 'created_at']) # Add new Index to Table create_index('BAD_REPLICAS_EXPIRES_AT_IDX', 'bad_replicas', ['expires_at'])
def downgrade(): op.drop_column('cm_survey_question','question_category_id',mssql_drop_foreign_key=True) op.add_column('cm_survey_question',sa.Column('question_category_id',sa.Integer(), sa.ForeignKey('question_category.question_category_id'))) op.drop_constraint('response_pk','cm_response') op.drop_column('cm_response','survey_question_id',mssql_drop_foreign_key=True) op.add_column('cm_response',sa.Column('survey_question_id',sa.Integer(), sa.ForeignKey('survey_question.survey_question_id'), nullable=False)) op.create_primary_key('response_pk', 'cm_response', ['survey_question_id','person_id'])
def upgrade(): op.add_column( source_table_name, sa.Column( 'uuid', sa.String(UUID_LENGTH), nullable=False, server_default=sa.text('uuid_generate_v4()'), )) op.add_column(favorite_table_name, sa.Column( 'source_uuid', sa.String(UUID_LENGTH), )) source_table = sql.table( source_table_name, sql.column('id'), sql.column('uuid'), ) id_uuid_query = sql.select([source_table.c.id, source_table.c.uuid]) id_uuid_rows = op.get_bind().execute(id_uuid_query).fetchall() favorite_table = sql.table( favorite_table_name, sql.column('source_id'), sql.column('source_uuid'), ) for id_, uuid in id_uuid_rows: op.execute(favorite_table.update().where( favorite_table.c.source_id == id_).values(source_uuid=uuid)) op.alter_column(favorite_table_name, 'source_uuid', nullable=False) op.execute( 'ALTER TABLE dird_favorite DROP CONSTRAINT dird_favorite_pkey CASCADE') op.execute( 'ALTER TABLE dird_favorite DROP CONSTRAINT dird_favorite_source_id_fkey CASCADE' ) op.execute( 'ALTER TABLE dird_source DROP CONSTRAINT dird_source_pkey CASCADE') op.create_primary_key( 'dird_source_pkey', source_table_name, ['uuid'], ) op.create_primary_key( 'dird_favorite_pkey', favorite_table_name, ['source_uuid', 'contact_id', 'user_uuid'], ) op.create_foreign_key( 'dird_favorite_source_uuid_fkey', 'dird_favorite', 'dird_source', ['source_uuid'], ['uuid'], ondelete='CASCADE', ) op.drop_column(source_table_name, 'id') op.drop_column(favorite_table_name, 'source_id')
def upgrade(): ''' Upgrade the database to this revision ''' if context.get_context().dialect.name in ['oracle', 'mysql', 'postgresql']: create_table('collection_replicas', sa.Column('scope', sa.String(25)), sa.Column('name', sa.String(255)), sa.Column('did_type', sa.Enum(DIDType, name='COLLECTION_REPLICAS_TYPE_CHK', create_constraint=True, values_callable=lambda obj: [e.value for e in obj])), sa.Column('rse_id', GUID()), sa.Column('bytes', sa.BigInteger), sa.Column('length', sa.BigInteger), sa.Column('state', sa.Enum(ReplicaState, name='COLLECTION_REPLICAS_STATE_CHK', create_constraint=True, values_callable=lambda obj: [e.value for e in obj]), default=ReplicaState.UNAVAILABLE), sa.Column('accessed_at', sa.DateTime), sa.Column('created_at', sa.DateTime, default=datetime.datetime.utcnow), sa.Column('updated_at', sa.DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow)) create_primary_key('COLLECTION_REPLICAS_PK', 'collection_replicas', ['scope', 'name', 'rse_id']) create_foreign_key('COLLECTION_REPLICAS_LFN_FK', 'collection_replicas', 'dids', ['scope', 'name'], ['scope', 'name']) create_foreign_key('COLLECTION_REPLICAS_RSE_ID_FK', 'collection_replicas', 'rses', ['rse_id'], ['id']) create_check_constraint('COLLECTION_REPLICAS_SIZE_NN', 'collection_replicas', 'bytes IS NOT NULL') create_check_constraint('COLLECTION_REPLICAS_STATE_NN', 'collection_replicas', 'state IS NOT NULL') create_index('COLLECTION_REPLICAS_RSE_ID_IDX', 'collection_replicas', ['rse_id'])
def downgrade(): op.create_table( 'related', sa.Column('id', sa.UnicodeText, nullable=False), sa.Column('type', sa.UnicodeText, nullable=False), sa.Column('title', sa.UnicodeText), sa.Column('description', sa.UnicodeText), sa.Column('image_url', sa.UnicodeText), sa.Column('url', sa.UnicodeText), sa.Column('created', sa.TIMESTAMP), sa.Column('owner_id', sa.UnicodeText), sa.Column( 'view_count', sa.Integer, nullable=False, server_default='0' ), sa.Column('featured', sa.Integer, nullable=False, server_default='0') ) op.create_table( 'related_dataset', sa.Column('id', sa.UnicodeText, nullable=False), sa.Column('dataset_id', sa.UnicodeText, nullable=False), sa.Column('related_id', sa.UnicodeText, nullable=False), sa.Column('status', sa.UnicodeText) ) op.create_primary_key('related_pkey', 'related', ['id']) op.create_primary_key('related_dataset_pkey', 'related_dataset', ['id']) op.create_foreign_key( 'related_dataset_dataset_id_fkey', 'related_dataset', 'package', ['dataset_id'], ['id'] ) op.create_foreign_key( 'related_dataset_related_id_fkey', 'related_dataset', 'related', ['related_id'], ['id'] )
def test_create_pk_legacy_kwarg(self): context = op_fixture() op.create_primary_key(name=None, table_name='sometable', cols=['router_id', 'l3_agent_id']) context.assert_( "ALTER TABLE sometable ADD PRIMARY KEY (router_id, l3_agent_id)")
def create_offers_table() -> None: op.create_table( 'user_offers_for_cleanings', sa.Column( 'user_id', # 'user' is a reserved word in postgres, so going with user_id instead sa.Integer, sa.ForeignKey('users.id', ondelete='CASCADE'), nullable=False, ), sa.Column( 'cleaning_id', # going with cleaning_id for consistency sa.Integer, sa.ForeignKey('cleanings.id', ondelete='CASCADE'), nullable=False, ), sa.Column('status', sa.Text, nullable=False, server_default='pending'), *timestamps(), ) op.create_primary_key('pk_user_offers_for_cleanings', 'user_offers_for_cleanings', ['user_id', 'cleaning_id']) op.execute(""" CREATE TRIGGER update_user_offers_for_cleanings_modtime BEFORE UPDATE ON user_offers_for_cleanings FOR EACH ROW EXECUTE PROCEDURE update_updated_at_column(); """)
def upgrade(): if skip_based_on_legacy_engine_version(op, __name__): return op.create_table( 'user_following_group', sa.Column('follower_id', sa.UnicodeText, nullable=False), sa.Column('object_id', sa.UnicodeText, nullable=False), sa.Column('datetime', sa.TIMESTAMP, nullable=False) ) op.create_primary_key( 'user_following_group_pkey', 'user_following_group', ['follower_id', 'object_id'] ) op.create_foreign_key( 'user_following_group_user_id_fkey', 'user_following_group', 'user', ['follower_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE' ) op.create_foreign_key( 'user_following_group_group_id_fkey', 'user_following_group', 'group', ['object_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE' )
def upgrade(): # n.b. all this does is move the current methods from the species to the # default form; I'm just gonna add ones for alternate forms manually and # let the CSV reload take care of that op.rename_table('pokemon_species_evolution', 'evolution_methods') op.add_column( 'evolution_methods', sa.Column('evolved_form_id', sa.Integer, sa.ForeignKey('pokemon_forms.id')) ) subquery = sa.select( [pokemon_forms.c.id], sa.and_( pokemon_forms.c.species_id == evolution_methods.c.evolved_species_id, pokemon_forms.c.is_default ) ) op.execute(evolution_methods.update() .values({'evolved_form_id': subquery})) op.drop_column('evolution_methods', 'evolved_species_id') op.alter_column('evolution_methods', 'evolved_form_id', nullable=False) op.create_primary_key(None, 'evolution_methods', ['evolved_form_id'])
def downgrade(): op.drop_constraint('fk_vfolder_attachment_vfolder_vfolders', 'vfolder_attachment', type_='foreignkey') op.drop_constraint('fk_vfolder_attachment_kernel_kernels', 'vfolder_attachment', type_='foreignkey') op.drop_constraint('pk_kernels', 'kernels', type_='primary') op.drop_index(op.f('ix_kernels_sess_id'), table_name='kernels') op.drop_index(op.f('ix_kernels_sess_id_role'), table_name='kernels') op.alter_column('kernels', 'sess_id', existing_type=sa.String(length=64), type_=postgresql.UUID(), nullable=False, existing_server_default=sa.text('uuid_generate_v4()'), postgresql_using='sess_id::uuid') op.create_primary_key('pk_kernels', 'kernels', ['sess_id']) op.drop_column('kernels', 'id') op.drop_column('kernels', 'role') op.create_foreign_key('fk_vfolder_attachment_vfolder_vfolders', 'vfolder_attachment', 'vfolders', ['vfolder'], ['id']) op.create_foreign_key('fk_vfolder_attachment_kernel_kernels', 'vfolder_attachment', 'kernels', ['kernel'], ['sess_id'])
def downgrade(): ''' downgrade method ''' drop_table('bad_pfns') drop_index('BAD_REPLICAS_EXPIRES_AT_IDX', 'bad_replicas') if context.get_context().dialect.name != 'sqlite': if context.get_context().dialect.name == 'postgresql': # For Postgres the ENUM Type needs to be renamed first op.execute( "ALTER TYPE 'BAD_REPLICAS_STATE_CHK' RENAME TO 'BAD_REPLICAS_STATE_CHK_OLD'" ) # pylint: disable=no-member else: drop_constraint('BAD_REPLICAS_STATE_CHK', 'bad_replicas', type_='check') create_check_constraint(name='BAD_REPLICAS_STATE_CHK', source='bad_replicas', condition="state in ('B', 'D', 'L', 'R', 'S')") if context.get_context().dialect.name == 'postgresql': # For Postgres the ENUM Type needs to be changed to the new one and the old one needs to be dropped op.execute( "ALTER TABLE bad_replicas ALTER COLUMN state TYPE 'BAD_REPLICAS_STATE_CHK'" ) # pylint: disable=no-member op.execute("DROP TYPE 'BAD_REPLICAS_STATE_CHK_OLD'") # pylint: disable=no-member drop_column('bad_replicas', 'expires_at') drop_constraint('BAD_REPLICAS_STATE_PK', 'bad_replicas', type_='primary') create_primary_key('BAD_REPLICAS_STATE_PK', 'bad_replicas', ['scope', 'name', 'rse_id', 'created_at'])
def upgrade_data_broker(): ### commands auto generated by Alembic - please adjust! ### op.add_column('user_affiliation', sa.Column('frec_id', sa.Integer(), nullable=True)) op.add_column( 'user_affiliation', sa.Column('user_affiliation_id', sa.Integer(), nullable=False, primary_key=True)) op.create_index(op.f('ix_user_affiliation_cgac_id'), 'user_affiliation', ['cgac_id'], unique=False) op.create_index(op.f('ix_user_affiliation_frec_id'), 'user_affiliation', ['frec_id'], unique=False) op.create_index(op.f('ix_user_affiliation_user_id'), 'user_affiliation', ['user_id'], unique=False) op.create_foreign_key('user_affiliation_frec_fk', 'user_affiliation', 'frec', ['frec_id'], ['frec_id'], ondelete='CASCADE') op.drop_constraint('user_affiliation_pkey', 'user_affiliation', type_='primary') op.create_primary_key('user_affiliation_pkey', 'user_affiliation', ['user_affiliation_id']) op.alter_column('user_affiliation', 'cgac_id', existing_type=sa.INTEGER(), nullable=True)
def upgrade(): print "Renaming tables: " print " group_member_role -> member_role" op.rename_table("group_member_role", "member_role", "public") print " group_member -> member" op.rename_table("group_member", "member", "public") print "Reworking table: group_document_assignment -> doc_principal" op.rename_table("group_document_assignment", "doc_principal", "public") op.drop_constraint("group_document_assignment_doc_id_fkey", "doc_principal") op.drop_constraint("group_document_assignment_group_id_fkey", "doc_principal") op.drop_constraint("group_document_assignment_pkey", "doc_principal") op.add_column("doc_principal", sa.Column("activity", sa.Unicode(16), primary_key=True, server_default="group_assignment", nullable=False)), op.add_column("doc_principal", sa.Column("date", sa.DateTime(timezone=False), server_default=sa.sql.text("now()"), nullable=False)), op.alter_column("doc_principal", "group_id", new_column_name="principal_id") op.create_foreign_key("doc_principal_doc_id_fkey", "doc_principal", "doc", ["doc_id"], ["doc_id"]) op.create_foreign_key("doc_principal_principal_id_fkey", "doc_principal", "principal", ["principal_id"], ["principal_id"]) op.create_primary_key("doc_principal_pkey", "doc_principal", ["doc_id", "principal_id", "activity"]) print "Dropping table: user_doc" op.drop_table("user_doc") print "Dropping table: currently_editing_document" op.drop_table("currently_editing_document")
def upgrade(): op.drop_constraint("radio_observation_pkey", "radio_observation") op.create_primary_key("radio_observation_pkey", "radio_observation", ["classroom_id", "observed_at", "relationship_id"]) op.drop_column('radio_observation', 'remote_id') op.drop_column('radio_observation', 'id') op.drop_column('radio_observation', 'local_id')
def upgrade(active_plugins=None, options=None): inspector = reflection.Inspector.from_engine(op.get_bind()) unique_constraints = inspector.get_unique_constraints( 'gp_policy_target_groups') op.drop_constraint('gp_policy_target_groups_ibfk_nsp', 'gp_policy_target_groups', 'foreignkey') for constraint in unique_constraints: if constraint['column_names'] == ['network_service_policy_id']: op.drop_constraint(constraint['name'], 'gp_policy_target_groups', 'unique') break op.create_foreign_key('gp_policy_target_groups_ibfk_nsp', source='gp_policy_target_groups', referent='gp_network_service_policies', local_cols=['network_service_policy_id'], remote_cols=['id']) with migration.remove_fks_from_table( 'gpm_service_policy_ipaddress_mappings'): op.drop_constraint(None, table_name='gpm_service_policy_ipaddress_mappings', type_='primary') op.create_primary_key( name='pk_policytargetgroup_servicepolicyid', table_name='gpm_service_policy_ipaddress_mappings', cols=['policy_target_group', 'service_policy_id'])
def upgrade(): ''' upgrade method ''' create_table('sources', sa.Column('request_id', GUID()), sa.Column('scope', sa.String(25)), sa.Column('name', sa.String(255)), sa.Column('rse_id', GUID()), sa.Column('dest_rse_id', GUID()), sa.Column('url', sa.String(2048)), sa.Column('ranking', sa.Integer), sa.Column('bytes', sa.BigInteger), sa.Column('updated_at', sa.DateTime), sa.Column('created_at', sa.DateTime)) if context.get_context().dialect.name != 'sqlite': create_primary_key('SOURCES_PK', 'sources', ['request_id', 'rse_id', 'scope', 'name']) create_foreign_key('SOURCES_REQ_ID_FK', 'sources', 'requests', ['request_id'], ['id']) create_foreign_key('SOURCES_REPLICAS_FK', 'sources', 'replicas', ['scope', 'name', 'rse_id'], ['scope', 'name', 'rse_id']) create_foreign_key('SOURCES_RSES_FK', 'sources', 'rses', ['rse_id'], ['id']) create_foreign_key('SOURCES_DST_RSES_FK', 'sources', 'rses', ['dest_rse_id'], ['id']) create_check_constraint('SOURCES_CREATED_NN', 'sources', 'created_at is not null') create_check_constraint('SOURCES_UPDATED_NN', 'sources', 'updated_at is not null') create_index('SOURCES_SRC_DST_IDX', 'sources', ['rse_id', 'dest_rse_id'])
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_constraint(u'industry_index_ibfk_1', 'industry_index', type_='foreignkey') op.drop_constraint('PRIMARY', 'industry_index', type_='primary') op.drop_column('industry_index', 'date') op.create_primary_key("industry_index_pk", "industry_index", ['solarsystem_id', 'activity']) op.create_foreign_key("industry_index_ibfk_1", 'industry_index', 'solar_system', ['solarsystem_id'], ['id'])
def upgrade(): ''' Upgrade the database to this revision ''' if context.get_context().dialect.name in ['oracle', 'mysql', 'postgresql']: create_table('sources', sa.Column('request_id', GUID()), sa.Column('scope', sa.String(25)), sa.Column('name', sa.String(255)), sa.Column('rse_id', GUID()), sa.Column('dest_rse_id', GUID()), sa.Column('url', sa.String(2048)), sa.Column('ranking', sa.Integer), sa.Column('bytes', sa.BigInteger), sa.Column('created_at', sa.DateTime, default=datetime.datetime.utcnow), sa.Column('updated_at', sa.DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow)) create_primary_key('SOURCES_PK', 'sources', ['request_id', 'rse_id', 'scope', 'name']) create_foreign_key('SOURCES_REQ_ID_FK', 'sources', 'requests', ['request_id'], ['id']) create_foreign_key('SOURCES_REPLICA_FK', 'sources', 'replicas', ['scope', 'name', 'rse_id'], ['scope', 'name', 'rse_id']) create_foreign_key('SOURCES_RSES_FK', 'sources', 'rses', ['rse_id'], ['id']) create_foreign_key('SOURCES_DST_RSES_FK', 'sources', 'rses', ['dest_rse_id'], ['id']) create_check_constraint('SOURCES_CREATED_NN', 'sources', 'created_at is not null') create_check_constraint('SOURCES_UPDATED_NN', 'sources', 'updated_at is not null') create_index('SOURCES_SRC_DST_IDX', 'sources', ['rse_id', 'dest_rse_id'])
def upgrade(): if skip_based_on_legacy_engine_version(op, __name__): return op.create_table( 'related', sa.Column('id', sa.UnicodeText, nullable=False), sa.Column('type', sa.UnicodeText, nullable=False), sa.Column('title', sa.UnicodeText), sa.Column('description', sa.UnicodeText), sa.Column('image_url', sa.UnicodeText), sa.Column('url', sa.UnicodeText), sa.Column('created', sa.TIMESTAMP), sa.Column('owner_id', sa.UnicodeText) ) op.create_table( 'related_dataset', sa.Column('id', sa.UnicodeText, nullable=False), sa.Column('dataset_id', sa.UnicodeText, nullable=False), sa.Column('related_id', sa.UnicodeText, nullable=False), sa.Column('status', sa.UnicodeText) ) op.create_primary_key('related_pkey', 'related', ['id']) op.create_primary_key('related_dataset_pkey', 'related_dataset', ['id']) op.create_foreign_key( 'related_dataset_dataset_id_fkey', 'related_dataset', 'package', ['dataset_id'], ['id'] ) op.create_foreign_key( 'related_dataset_related_id_fkey', 'related_dataset', 'related', ['related_id'], ['id'] )
def upgrade(): # Turn state_id into state op.add_column( 'bank_transactions', sa.Column('state', sa.Unicode, nullable=False, server_default='pending')) for (state_id, state) in states: op.execute(bank_transactions.update().where( bank_transactions.c.state_id == state_id).values({'state': state})) op.alter_column('bank_transactions', 'state', server_default=None) op.drop_column('bank_transactions', 'state_id') # Drop old keys and ID column op.drop_constraint('bank_transaction_states_identifier_key', 'bank_transaction_states') op.drop_constraint('bank_transaction_states_pkey', 'bank_transaction_states') op.drop_column('bank_transaction_states', 'id') # Create new keys op.create_primary_key('bank_transaction_states_pkey', 'bank_transaction_states', ['identifier']) op.create_foreign_key('bank_transactions_state_fkey', 'bank_transactions', 'bank_transaction_states', ['state'], ['identifier'])
def downgrade(): op.drop_constraint('lecture_allocation_criteria_pkey', 'lecture_allocation_criteria', type_='primary') op.create_primary_key('lecture_allocation_criteria_pkey', 'lecture_allocation_criteria', ['lecture', 'criterion'])
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_table("bike_usage") op.drop_constraint("bike_pkey", "bike") op.add_column( "bike", sa.Column("id", sa.Integer(), autoincrement=True, primary_key=True)) op.add_column( "bike", sa.Column("readable_name", sa.String(length=255), nullable=False)) op.create_primary_key( "pk_bike_id", "bike", [ "id", ], ) op.create_index(op.f("ix_bike_uuid"), "bike", ["uuid"], unique=True) op.create_unique_constraint(None, "bike", ["readable_name"]) op.create_table( "bike_usages", sa.Column("bike_id", sa.BigInteger(), nullable=False), sa.Column("availability_id", sa.BigInteger(), nullable=False), sa.ForeignKeyConstraint( ["availability_id"], ["availability.id"], ), sa.ForeignKeyConstraint( ["bike_id"], ["bike.id"], ), sa.PrimaryKeyConstraint("bike_id", "availability_id"), )
def upgrade(active_plugins=None, options=None): inspector = reflection.Inspector.from_engine(op.get_bind()) unique_constraints = inspector.get_unique_constraints( 'gp_policy_target_groups') op.drop_constraint('gp_policy_target_groups_ibfk_nsp', 'gp_policy_target_groups', 'foreignkey') for constraint in unique_constraints: if constraint['column_names'] == ['network_service_policy_id']: op.drop_constraint(constraint['name'], 'gp_policy_target_groups', 'unique') break op.create_foreign_key('gp_policy_target_groups_ibfk_nsp', source='gp_policy_target_groups', referent='gp_network_service_policies', local_cols=['network_service_policy_id'], remote_cols=['id']) with migration.remove_fks_from_table( 'gpm_service_policy_ipaddress_mappings'): op.drop_constraint( None, table_name='gpm_service_policy_ipaddress_mappings', type_='primary') op.create_primary_key( name='pk_policytargetgroup_servicepolicyid', table_name='gpm_service_policy_ipaddress_mappings', cols=['policy_target_group', 'service_policy_id'])
def upgrade(): if skip_based_on_legacy_engine_version(op, __name__): return op.add_column('authorization_group_user', sa.Column('id', sa.UnicodeText)) op.create_primary_key( 'authorization_group_user_pkey', 'authorization_group_user', ['id'] )
def upgrade(): op.add_column( 'groups_tenants', sa.Column('role_id', sa.Integer()), ) op.create_foreign_key( 'groups_tenants_role_id_fkey', 'groups_tenants', 'roles', ['role_id'], ['id'], ) op.create_primary_key( 'groups_tenants_pkey', 'groups_tenants', ['group_id', 'tenant_id'], ) # Define tables with just the columns needed # to generate the UPDATE sql expression below groups_tenants = sa.table( 'groups_tenants', sa.column('group_id', sa.Integer), sa.column('role_id', sa.Integer), ) roles = sa.table( 'roles', sa.column('id', sa.Integer), sa.column('name', sa.Text), ) # Set 'user' role as the default for every group in a tenant op.execute(groups_tenants.update().values( role_id=(sa.select([roles.c.id]).where(roles.c.name == 'user')))) op.alter_column('groups_tenants', 'role_id', nullable=False)
def upgrade(): ''' Upgrade the database to this revision ''' if context.get_context().dialect.name in ['oracle', 'mysql', 'postgresql']: create_table( 'configs', sa.Column('section', sa.String(128)), sa.Column('opt', sa.String(128)), sa.Column('value', sa.String(4000)), sa.Column('created_at', sa.DateTime, default=datetime.datetime.utcnow), sa.Column('updated_at', sa.DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow)) create_primary_key('configs_pk', 'configs', ['section', 'opt']) create_check_constraint('configs_created_nn', 'configs', 'created_at is not null') create_check_constraint('configs_updated_nn', 'configs', 'updated_at is not null') create_table('configs_history', sa.Column('section', sa.String(128)), sa.Column('opt', sa.String(128)), sa.Column('value', sa.String(4000)), sa.Column('created_at', sa.DateTime), sa.Column('updated_at', sa.DateTime)) create_primary_key('configs_history_pk', 'configs_history', ['section', 'opt', 'updated_at'])
def upgrade(): if skip_based_on_legacy_engine_version(op, __name__): return op.create_table( 'activity', sa.Column('id', sa.UnicodeText, nullable=False), sa.Column('timestamp', sa.TIMESTAMP), sa.Column('user_id', sa.UnicodeText), sa.Column('object_id', sa.UnicodeText), sa.Column('revision_id', sa.UnicodeText), sa.Column('activity_type', sa.UnicodeText), sa.Column('data', sa.UnicodeText), ) op.create_table( 'activity_detail', sa.Column('id', sa.UnicodeText, nullable=False), sa.Column('activity_id', sa.UnicodeText, nullable=False), sa.Column('object_id', sa.UnicodeText), sa.Column('object_type', sa.UnicodeText), sa.Column('activity_type', sa.UnicodeText), sa.Column('data', sa.UnicodeText), ) op.create_primary_key('activity_pkey', 'activity', ['id']) op.create_primary_key('activity_detail_pkey', 'activity_detail', ['id']) op.create_foreign_key( 'activity_detail_activity_id_fkey', 'activity_detail', 'activity', ['activity_id'], ['id'] )
def downgrade(): ''' downgrade method ''' if context.get_context().dialect.name not in ('sqlite', 'mysql'): drop_constraint('messages_event_type_nn', 'messages', type_='check') drop_constraint('messages_payload_nn', 'messages', type_='check') drop_constraint('messages_created_nn', 'messages', type_='check') drop_constraint('messages_updated_nn', 'messages', type_='check') if context.get_context().dialect.name != 'sqlite': drop_constraint('messages_pk', 'messages', type_='primary') rename_table('messages', 'callbacks') if context.get_context().dialect.name != 'sqlite': create_primary_key('callbacks_pk', 'callbacks', ['id']) create_check_constraint('callbacks_event_type_nn', 'callbacks', 'event_type is not null') create_check_constraint('callbacks_payload_nn', 'callbacks', 'payload is not null') create_check_constraint('callbacks_created_nn', 'callbacks', 'created_at is not null') create_check_constraint('callbacks_updated_nn', 'callbacks', 'updated_at is not null')
def upgrade(): ''' Upgrade the database to this revision ''' if context.get_context().dialect.name in ['oracle', 'mysql', 'postgresql']: schema = context.get_context( ).version_table_schema if context.get_context( ).version_table_schema else '' add_column('dids', sa.Column('closed_at', sa.DateTime), schema=schema) add_column('contents_history', sa.Column('deleted_at', sa.DateTime), schema=schema) create_table( 'naming_conventions', sa.Column('scope', sa.String(get_schema_value('SCOPE_LENGTH'))), sa.Column('regexp', sa.String(255)), sa.Column('convention_type', KeyType.db_type()), sa.Column('updated_at', sa.DateTime, default=datetime.datetime.utcnow), sa.Column('created_at', sa.DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow)) create_primary_key('NAMING_CONVENTIONS_PK', 'naming_conventions', ['scope']) create_foreign_key('NAMING_CONVENTIONS_SCOPE_FK', 'naming_conventions', 'scopes', ['scope'], ['scope']) create_check_constraint('NAMING_CONVENTIONS_CREATED_NN', 'naming_conventions', 'created_at is not null') create_check_constraint('NAMING_CONVENTIONS_UPDATED_NN', 'naming_conventions', 'updated_at is not null')
def upgrade(): bind = op.get_bind() engine = bind.engine op.add_column( ML2_PORT_BINDING, sa.Column('status', sa.String(length=16), nullable=False, server_default=constants.PORT_BINDING_STATUS_ACTIVE)) if (engine.name == MYSQL_ENGINE and not ndb.ndb_status(engine)): op.execute("ALTER TABLE ml2_port_bindings DROP PRIMARY KEY," "ADD PRIMARY KEY(port_id, host);") else: inspector = insp.from_engine(bind) fk_name = utils.get_foreign_key_constraint_name( engine, 'ml2_port_bindings', 'port_id') op.drop_constraint(fk_name, ML2_PORT_BINDING, type_='foreignkey') pk_constraint = inspector.get_pk_constraint(ML2_PORT_BINDING) op.drop_constraint(pk_constraint.get('name'), ML2_PORT_BINDING, type_='primary') op.create_primary_key(op.f('pk_ml2_port_bindings'), ML2_PORT_BINDING, ['port_id', 'host']) op.create_foreign_key(fk_name, ML2_PORT_BINDING, 'ports', ["port_id"], ["id"], ondelete='CASCADE')
def upgrade(): # rename information -> data op.alter_column('projects', 'information', type_=sa.JSON, new_column_name='data') # connect to the database bind = op.get_bind() session = Session(bind=bind) # add the new keys as columns op.add_column('ci_commits', sa.Column('hexsha', sa.String)) op.add_column('ci_commits', sa.Column('new_id', sa.Integer)) op.add_column('batches', sa.Column('ci_commit_new_id', sa.Integer)) total = 8286 # +/- ci_commits = session.query(CiCommitOld) for ci_commit_id, ci_commit in enumerate(ci_commits): print(f"{100 * ci_commit_id / 8286:.2f}%") ci_commit.hexsha = ci_commit.id ci_commit.new_id = ci_commit_id for batch in ci_commit.batches: batch.ci_commit_new_id = ci_commit_id op.alter_column('ci_commits', 'hexsha', nullable=False) print("remove the old key") op.execute( 'ALTER TABLE ci_commits DROP CONSTRAINT ci_commits_pkey CASCADE') op.drop_column('ci_commits', 'id') op.drop_column('batches', 'ci_commit_id') print("replace the old keys") op.alter_column('ci_commits', 'new_id', type_=sa.Integer, new_column_name='id') op.alter_column('batches', 'ci_commit_new_id', type_=sa.Integer, new_column_name='ci_commit_id') op.create_primary_key('ci_commits_pkey', 'ci_commits', ['id']) op.create_foreign_key('fk_batchs_ci_commits_id', 'batches', 'ci_commits', ['ci_commit_id'], ['id'], ondelete='CASCADE') print("we need a new index") op.drop_index('ix_project_it') op.create_index('ci_commits_hexsha_idx', 'ci_commits', ['project_id', 'hexsha']) # auto-increment op.execute( "CREATE SEQUENCE ci_commits_id_seq START WITH 10000 INCREMENT BY 1 NO MINVALUE NO MAXVALUE CACHE 1" ) op.execute( "ALTER TABLE ci_commits ALTER COLUMN id SET DEFAULT nextval('ci_commits_id_seq'::regclass)" )
def upgrade(): ''' upgrade method ''' if context.get_context().dialect.name != 'sqlite': add_column('collection_replicas', sa.Column('available_replicas_cnt', sa.BigInteger())) add_column('collection_replicas', sa.Column('available_bytes', sa.BigInteger())) create_table( 'updated_col_rep', sa.Column('id', GUID()), sa.Column('scope', sa.String(25)), sa.Column('name', sa.String(255)), sa.Column('did_type', DIDType.db_type(name='UPDATED_COL_REP_TYPE_CHK')), sa.Column('rse_id', GUID()), sa.Column('updated_at', sa.DateTime), sa.Column('created_at', sa.DateTime)) if context.get_context().dialect.name != 'sqlite': create_primary_key('UPDATED_COL_REP_PK', 'updated_col_rep', ['id']) create_check_constraint('UPDATED_COL_REP_SCOPE_NN', 'updated_col_rep', 'scope IS NOT NULL') create_check_constraint('UPDATED_COL_REP_NAME_NN', 'updated_col_rep', 'name IS NOT NULL') create_index('UPDATED_COL_REP_SNR_IDX', 'updated_col_rep', ['scope', 'name', 'rse_id'])
def downgrade(): # Remove columns from component run association tables op.drop_constraint( "fk_component_runs_inputs_io_pointers", "component_runs_inputs", type_="foreignkey", ) op.drop_constraint( "fk_component_runs_outputs_io_pointers", "component_runs_outputs", type_="foreignkey", ) op.drop_column("component_runs_inputs", "input_path_value") op.drop_column("component_runs_outputs", "output_path_value") # Remove column from iopointer op.drop_constraint("io_pointers_pkey", "io_pointers", type_="primary") op.drop_column("io_pointers", "value") op.create_primary_key("io_pointers_pkey", "io_pointers", ["name"]) # Add fk constraints op.create_foreign_key( "component_runs_inputs_input_path_name_fkey", "component_runs_inputs", "io_pointers", ["input_path_name"], ["name"], ) op.create_foreign_key( "component_runs_outputs_output_path_name_fkey", "component_runs_outputs", "io_pointers", ["output_path_name"], ["name"], )
def upgrade(): c = get_context() if isinstance(c.connection.engine.dialect, MySQLDialect): insp = Inspector.from_engine(c.connection.engine) for t in ['groups_permissions', 'groups_resources_permissions', 'users_groups', 'resources']: for constraint in insp.get_foreign_keys(t): if constraint['referred_columns'] == ['group_name']: op.drop_constraint(constraint['name'], t, type='foreignkey') op.drop_column('groups', 'id') op.alter_column('groups', 'group_name', type_=sa.String(128), existing_type=sa.String(50), ) op.create_primary_key('groups_pkey', 'groups', cols=['group_name']) if isinstance(c.connection.engine.dialect, MySQLDialect): op.create_foreign_key(None, 'groups_permissions', 'groups', remote_cols=['group_name'], local_cols=['group_name'], onupdate='CASCADE', ondelete='CASCADE') op.create_foreign_key(None, 'groups_resources_permissions', 'groups', remote_cols=['group_name'], local_cols=['group_name'], onupdate='CASCADE', ondelete='CASCADE') op.create_foreign_key(None, 'users_groups', 'groups', remote_cols=['group_name'], local_cols=['group_name'], onupdate='CASCADE', ondelete='CASCADE') op.create_foreign_key(None, 'resources', 'groups', remote_cols=['group_name'], local_cols=['owner_group_name'], onupdate='CASCADE', ondelete='SET NULL')
def upgrade(): ''' Upgrade the database to this revision ''' if context.get_context().dialect.name in ['oracle', 'mysql', 'postgresql']: create_table( 'heartbeats', sa.Column('executable', sa.String(512)), sa.Column('hostname', sa.String(128)), sa.Column('pid', sa.Integer(), autoincrement=False), sa.Column('thread_id', sa.BigInteger(), autoincrement=False), sa.Column('thread_name', sa.String(64)), sa.Column('created_at', sa.DateTime, default=datetime.datetime.utcnow), sa.Column('updated_at', sa.DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow)) create_primary_key('heartbeats_pk', 'heartbeats', ['executable', 'hostname', 'pid', 'thread_id']) create_index('HEARTBEATS_UPDATED_AT', 'heartbeats', ['updated_at']) create_check_constraint('heartbeats_created_nn', 'heartbeats', 'created_at is not null') create_check_constraint('heartbeats_updated_nn', 'heartbeats', 'updated_at is not null')
def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_constraint('organisation_ref_name_type_key', 'organisation') op.drop_constraint('activity_reporting_org_id_fkey', 'activity', type="foreignkey") op.drop_constraint('participation_organisation_id_fkey', 'participation', type="foreignkey") op.drop_constraint('transaction_provider_org_id_fkey', 'transaction', type="foreignkey") op.drop_constraint('transaction_receiver_org_id_fkey', 'transaction', type="foreignkey") op.add_column('transaction', sa.Column(u'receiver_org_ref', sa.VARCHAR(), nullable=True)) op.add_column('transaction', sa.Column(u'provider_org_ref', sa.VARCHAR(), nullable=True)) op.drop_column('transaction', 'receiver_org_id') op.drop_column('transaction', 'provider_org_id') op.add_column('participation', sa.Column(u'organisation_ref', sa.VARCHAR(), nullable=False)) op.drop_column('participation', 'organisation_id') op.add_column('activity', sa.Column(u'reporting_org_ref', sa.VARCHAR(), nullable=False)) op.drop_column('activity', 'reporting_org_id') op.drop_constraint('organisation_pkey', 'organisation', type="primary") op.drop_column('organisation', 'id') op.create_primary_key('organisation_pkey', 'organisation', ['ref']) op.create_foreign_key('activity_reporting_org_ref_fkey', 'activity', 'organisation', ["reporting_org_ref"], ["ref"]) op.create_foreign_key('participation_organisation_ref_fkey', 'participation', 'organisation', ["organisation_ref"], ["ref"]) op.create_foreign_key('transaction_provider_org_ref_fkey', 'transaction', 'organisation', ["provider_org_ref"], ["ref"]) op.create_foreign_key('transaction_receiver_org_ref_fkey', 'transaction', 'organisation', ["receiver_org_ref"], ["ref"])
def upgrade(): if driver_name == 'postgresql': temp_credits_role.create(op.get_bind(), checkfirst=False) op.execute( 'ALTER TABLE credits ALTER COLUMN role TYPE temp_credits_role' ' USING role::text::temp_credits_role' ) old_credits_role.drop(op.get_bind(), checkfirst=False) new_credits_role.create(op.get_bind(), checkfirst=False) op.execute( 'ALTER TABLE credits ALTER COLUMN role TYPE credits_role' ' USING role::text::credits_role' ) temp_credits_role.drop(op.get_bind(), checkfirst=False) else: op.alter_column( 'credits', 'role', existing_type=old_credits_role, type_=new_credits_role, ) op.drop_constraint('credits_pkey', 'credits', 'primary') op.create_primary_key('credits_pkey', 'credits', ['work_id', 'person_id', 'role'])
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_constraint('activity_reporting_org_ref_fkey', 'activity', type="foreignkey") op.drop_constraint('participation_organisation_ref_fkey', 'participation', type="foreignkey") op.drop_constraint('transaction_provider_org_ref_fkey', 'transaction', type="foreignkey") op.drop_constraint('transaction_receiver_org_ref_fkey', 'transaction', type="foreignkey") op.drop_constraint('organisation_pkey', 'organisation', type="primary") op.add_column('organisation', sa.Column('id', sa.Integer(), nullable=False, primary_key=True)) op.create_primary_key('organisation_pkey', 'organisation', ['id']) op.add_column('activity', sa.Column('reporting_org_id', sa.Integer(), nullable=True)) op.drop_column('activity', u'reporting_org_ref') op.add_column('participation', sa.Column('organisation_id', sa.Integer(), nullable=False)) op.drop_column('participation', u'organisation_ref') op.add_column('transaction', sa.Column('provider_org_id', sa.Integer(), nullable=True)) op.add_column('transaction', sa.Column('receiver_org_id', sa.Integer(), nullable=True)) op.drop_column('transaction', u'provider_org_ref') op.drop_column('transaction', u'receiver_org_ref') op.create_foreign_key('activity_reporting_org_id_fkey', 'activity', 'organisation', ["reporting_org_id"], ["id"]) op.create_foreign_key('participation_organisation_id_fkey', 'participation', 'organisation', ["organisation_id"], ["id"]) op.create_foreign_key('transaction_provider_org_id_fkey', 'transaction', 'organisation', ["provider_org_id"], ["id"]) op.create_foreign_key('transaction_receiver_org_id_fkey', 'transaction', 'organisation', ["receiver_org_id"], ["id"]) op.create_unique_constraint("organisation_ref_name_type_key", "organisation", ["ref", "name", "type"])
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table( "status", sa.Column("user", sa.String(), nullable=False), sa.Column("worker", sa.String(), nullable=False), sa.Column("status", sa.String(), nullable=True), sa.PrimaryKeyConstraint("user", "worker"), ) op.drop_table("payout") op.drop_constraint("block_pkey", "block") op.create_primary_key("block_pkey", "block", ["hash"]) op.create_table( "payout", sa.Column("id", sa.Integer(), nullable=False), sa.Column("blockhash", sa.String(), nullable=True), sa.Column("user", sa.String(), nullable=True), sa.Column("shares", sa.BigInteger(), nullable=True), sa.Column("amount", sa.BigInteger(), nullable=True), sa.Column("transaction_id", sa.String(), nullable=True), sa.ForeignKeyConstraint(["blockhash"], ["block.hash"]), sa.ForeignKeyConstraint(["transaction_id"], ["transaction.txid"]), sa.PrimaryKeyConstraint("id"), sa.UniqueConstraint("user", "blockhash"), sa.CheckConstraint("amount>0", "min_payout_amount"), )
def upgrade(): # In order to sanitize the data during migration, # the current records in the table need to be verified # and all the duplicate records which violate the PK # constraint need to be removed. context = op.get_context() if context.bind.dialect.name == 'postgresql': op.execute('DELETE FROM %(table)s WHERE id in (' 'SELECT %(table)s.id FROM %(table)s LEFT OUTER JOIN ' '(SELECT MIN(id) as id, router_id, l3_agent_id ' ' FROM %(table)s GROUP BY router_id, l3_agent_id) AS temp ' 'ON %(table)s.id = temp.id WHERE temp.id is NULL);' % {'table': TABLE_NAME}) else: op.execute('DELETE %(table)s FROM %(table)s LEFT OUTER JOIN ' '(SELECT MIN(id) as id, router_id, l3_agent_id ' ' FROM %(table)s GROUP BY router_id, l3_agent_id) AS temp ' 'ON %(table)s.id = temp.id WHERE temp.id is NULL;' % {'table': TABLE_NAME}) op.drop_column(TABLE_NAME, 'id') op.create_primary_key( name=PK_NAME, table_name=TABLE_NAME, cols=['router_id', 'l3_agent_id'] )
def upgrade(): op.drop_constraint("queues_pkey", "queues") op.execute(CreateSequence(Sequence("queues_id_seq"))) op.add_column("queues", Column("id", Integer, server_default=text("nextval('queues_id_seq'::regclass)"))) op.create_primary_key("queues_pkey", "queues", ["id"]) Email.__table__.create(bind=engine) op.create_table( "queue_notifications", Column("queue_id", Integer, ForeignKey("queues.id")), Column("email_id", Integer, ForeignKey("emails.id")), ) op.add_column("users", Column("email_id", Integer, ForeignKey("emails.id"))) conn = op.get_bind() s = select([text("users.email")]).select_from(text("users")) users = conn.execute(s) table_user = User.__table__ for row in users: ins = Email.__table__.insert().values(address=row["email"]) result_insert_email = conn.execute(ins) upd = ( table_user.update() .values(email_id=result_insert_email.inserted_primary_key[0]) .where(text("users.email = :email")) ) conn.execute(upd, email=row["email"]) op.drop_column("users", "email")
def downgrade(): op.drop_constraint('location_pkey', 'location', type_='primary') op.create_primary_key('location_pkey', 'location', ['id']) op.drop_constraint('location_board_id_fkey', 'location', type_='foreignkey') op.drop_constraint('location_board_id_name_key', 'location', type_='unique') op.create_unique_constraint('location_name_key', 'location', ['name']) op.drop_index(op.f('ix_location_board_id'), table_name='location') op.drop_column('location', 'board_id')
def upgrade_payment(): op.create_table( 'fail', sa.Column('report_id', sa.Integer()), sa.Column('count', sa.Integer(), nullable=False, index=True), sa.Column('create_timestamp', sa.Integer(), nullable=False), ) op.create_primary_key("pk_fail_report_id", "fail", ["report_id"])
def upgrade(): for table, columns in DESCRIPTIONS: op.execute("""CREATE TEMPORARY TABLE tmp_{table} ON COMMIT DROP AS SELECT DISTINCT * FROM {table}; truncate {table}; insert into {table} select * from tmp_{table};""" .format(table=table)) op.create_primary_key('{}_pkey'.format(table), table, columns)
def upgrade(): rule_table = sql.table("rule", sa.column("type")) op.execute(rule_table.delete().where(rule_table.c.type != "ban")) op.drop_column("rule", "type") op.rename_table("rule", "ban") op.drop_table("rule_ban") op.drop_constraint("pk_rule", "ban") op.create_primary_key("pk_ban", "ban", ["id"])
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column("block", sa.Column("merged_type", sa.String(), nullable=True)) op.add_column("bonus_payout", sa.Column("merged_type", sa.String(), nullable=True)) op.add_column("merge_address", sa.Column("merged_coin", sa.String(), nullable=False, primary_key=True)) op.drop_constraint("merge_address_pkey", "merge_address") op.create_primary_key("merge_address_pkey", "merge_address", ["merged_coin", "user"]) op.add_column("payout", sa.Column("merged_type", sa.String(), nullable=True))