def downgrade(migrate_engine): metadata.bind = migrate_engine metadata.reflect() for table_name in DATASET_INSTANCE_TABLE_NAMES: drop_column('state', table_name, metadata)
def downgrade(migrate_engine): metadata.bind = migrate_engine metadata.reflect() drop_column('filename_override_metadata', 'job_external_output_metadata', metadata)
def drop_timestamps(metadata, table_name): target_table = Table(table_name, metadata, autoload=True) drop_column("create_time", target_table) drop_column("update_time", target_table)
def downgrade(migrate_engine): metadata.bind = migrate_engine metadata.reflect() drop_column('imported', 'job', metadata)
def downgrade(migrate_engine): metadata = MetaData() metadata.bind = migrate_engine metadata.reflect() drop_column('pid', 'worker_process', metadata)
def downgrade(migrate_engine): metadata.bind = migrate_engine metadata.reflect() drop_column("last_action", "galaxy_session", metadata)
def downgrade(migrate_engine): metadata.bind = migrate_engine metadata.reflect() drop_column('installed_changeset_revision', 'tool_shed_repository', metadata)
def downgrade(migrate_engine): metadata.bind = migrate_engine metadata.reflect() drop_column("label", "workflow_step", metadata) drop_column("uuid", "workflow_step", metadata)
def downgrade(migrate_engine): metadata.bind = migrate_engine drop_column("from_path", "stored_workflow", metadata)
def downgrade(migrate_engine): metadata.bind = migrate_engine metadata.reflect() drop_column('content_format', 'page_revision', metadata)
def downgrade(migrate_engine): metadata.bind = migrate_engine metadata.reflect() drop_column('notify', 'request', metadata)
def downgrade(migrate_engine): metadata.bind = migrate_engine metadata.reflect() drop_column('job_id', 'dataset', metadata)
def downgrade(migrate_engine): metadata.bind = migrate_engine metadata.reflect() # NOTE: all new data added in the upgrade method is eliminated here via table drops # Drop 1 foreign key constraint from the metadata_file table MetadataFile_table = Table("metadata_file", metadata, autoload=True) LibraryDatasetDatasetAssociation_table = Table( "library_dataset_dataset_association", metadata, autoload=True) try: cons = ForeignKeyConstraint( [MetadataFile_table.c.lda_id], [LibraryDatasetDatasetAssociation_table.c.id], name='metadata_file_lda_id_fkey') # Drop the constraint cons.drop() except Exception: log.exception( "Dropping foreign key constraint 'metadata_file_lda_id_fkey' from table 'metadata_file' failed." ) # Drop 1 foreign key constraint from the history_dataset_association table HistoryDatasetAssociation_table = Table("history_dataset_association", metadata, autoload=True) LibraryDatasetDatasetAssociation_table = Table( "library_dataset_dataset_association", metadata, autoload=True) try: cons = ForeignKeyConstraint( [ HistoryDatasetAssociation_table.c. copied_from_library_dataset_dataset_association_id ], [LibraryDatasetDatasetAssociation_table.c.id], name= 'history_dataset_association_copied_from_library_dataset_da_fkey') # Drop the constraint cons.drop() except Exception: log.exception( "Dropping foreign key constraint 'history_dataset_association_copied_from_library_dataset_da_fkey' from table 'history_dataset_association' failed." ) # Drop all of the new tables above TABLES = [ UserGroupAssociation_table, UserRoleAssociation_table, GroupRoleAssociation_table, Group_table, DatasetPermissions_table, LibraryPermissions_table, LibraryFolderPermissions_table, LibraryDatasetPermissions_table, LibraryDatasetDatasetAssociationPermissions_table, LibraryItemInfoPermissions_table, LibraryItemInfoTemplatePermissions_table, DefaultUserPermissions_table, DefaultHistoryPermissions_table, Role_table, LibraryDatasetDatasetInfoAssociation_table, LibraryDataset_table, LibraryDatasetDatasetAssociation_table, LibraryDatasetDatasetInfoTemplateAssociation_table, JobExternalOutputMetadata_table, Library_table, LibraryFolder_table, LibraryItemInfoTemplateElement_table, LibraryInfoTemplateAssociation_table, LibraryFolderInfoTemplateAssociation_table, LibraryDatasetInfoTemplateAssociation_table, LibraryInfoAssociation_table, LibraryFolderInfoAssociation_table, LibraryDatasetInfoAssociation_table, LibraryItemInfoElement_table, LibraryItemInfo_table, LibraryItemInfoTemplate_table, ] for table in TABLES: drop_table(table) # Drop the index on the Job.state column - changeset 2192 drop_index('ix_job_state', 'job', 'state', metadata) # Drop 1 column from the stored_workflow table - changeset 2328 drop_column('importable', 'stored_workflow', metadata) # Drop 1 column from the metadata_file table drop_column('lda_id', 'metadata_file', metadata) # Drop 1 column from the history_dataset_association table drop_column('copied_from_library_dataset_dataset_association_id', HistoryDatasetAssociation_table) # Drop 2 columns from the galaxy_user table User_table = Table("galaxy_user", metadata, autoload=True) drop_column('deleted', User_table) drop_column('purged', User_table)
def downgrade(migrate_engine): metadata.bind = migrate_engine metadata.reflect() drop_column('dbkey', 'visualization', metadata) drop_column('dbkey', 'visualization_revision', metadata)
def downgrade(migrate_engine): metadata.bind = migrate_engine metadata.reflect() drop_column('form_values_id', 'galaxy_user', metadata)
def downgrade(migrate_engine): metadata.bind = migrate_engine metadata.reflect() for table in TABLES: drop_table(table) drop_column("state", "workflow_invocation", metadata) drop_column("scheduler", "workflow_invocation", metadata) drop_column("uuid", "workflow_invocation", metadata) drop_column("history_id", "workflow_invocation", metadata) drop_column("handler", "workflow_invocation", metadata) drop_column("action", "workflow_invocation_step", metadata)
def downgrade(migrate_engine): metadata.bind = migrate_engine metadata.reflect() drop_column('importable', 'history', metadata) drop_table(HistoryUserShareAssociation_table)
def downgrade(migrate_engine): metadata = MetaData() metadata.bind = migrate_engine metadata.reflect() drop_column('uuid', 'metadata_file', metadata)
def downgrade(migrate_engine): metadata.bind = migrate_engine metadata.reflect() drop_column('slug', 'history', metadata)
def downgrade(migrate_engine): metadata.bind = migrate_engine metadata.reflect() drop_column('requires_domain', 'interactivetool_entry_point', metadata)
def downgrade(migrate_engine): metadata.bind = migrate_engine metadata.reflect() # SQLAlchemy Migrate has a bug when dropping a boolean column in SQLite if migrate_engine.name != 'sqlite': drop_column("is_valid", "job_external_output_metadata", metadata)
def downgrade(migrate_engine): metadata.bind = migrate_engine metadata.reflect() cloudauthz_table = Table("cloudauthz", metadata, autoload=True) drop_column('create_time', cloudauthz_table)
def upgrade(migrate_engine): print(__doc__) metadata.bind = migrate_engine metadata.reflect() drop_column('installed_changeset_revision', 'tool_dependency', metadata)
def downgrade(migrate_engine): metadata.bind = migrate_engine metadata.reflect() drop_column('created_from_basename', 'dataset', metadata)
def downgrade(migrate_engine): metadata.bind = migrate_engine metadata.reflect() drop_column('handler', 'job', metadata)
def downgrade(migrate_engine): metadata.bind = migrate_engine metadata.reflect() drop_column('slug', 'stored_workflow', metadata)
def downgrade(migrate_engine): metadata.bind = migrate_engine metadata.reflect() drop_column('ctx_rev', 'tool_shed_repository', metadata)
def downgrade(migrate_engine): metadata.bind = migrate_engine metadata.reflect() drop_column('ldda_parent_id', 'implicitly_converted_dataset_association', metadata)
def upgrade(migrate_engine): print(__doc__) metadata.bind = migrate_engine metadata.reflect() # rename 'sequencer' table to 'external_service' Sequencer_table = Table("sequencer", metadata, autoload=True) Sequencer_table.rename('external_service') # if running PostgreSQL, rename the primary key sequence too if migrate_engine.name in ['postgres', 'postgresql']: cmd = "ALTER SEQUENCE sequencer_id_seq RENAME TO external_service_id_seq" migrate_engine.execute(cmd) # Add 'external_services_id' column to 'sample_dataset' table SampleDataset_table = Table("sample_dataset", metadata, autoload=True) # SQLAlchemy Migrate has a bug when adding a column with both a ForeignKey and a index in SQLite if migrate_engine.name != 'sqlite': col = Column("external_service_id", Integer, ForeignKey("external_service.id", name='sample_dataset_external_services_id_fk'), index=True) else: col = Column("external_service_id", Integer, index=True) add_column(col, SampleDataset_table, index_name="ix_sample_dataset_external_service_id") # populate the column cmd = "SELECT sample_dataset.id, request_type.sequencer_id " \ + " FROM sample_dataset, sample, request, request_type " \ + " WHERE sample.id=sample_dataset.sample_id and request.id=sample.request_id and request.request_type_id=request_type.id " \ + " ORDER BY sample_dataset.id" try: result = migrate_engine.execute(cmd) for r in result: sample_dataset_id = int(r[0]) sequencer_id = int(r[1]) cmd = "UPDATE sample_dataset SET external_service_id='%i' where id=%i" % ( sequencer_id, sample_dataset_id) migrate_engine.execute(cmd) except Exception: log.exception("Exception executing SQL command: %s", cmd) # rename 'sequencer_type_id' column to 'external_service_type_id' in the table 'external_service' # create the column as 'external_service_type_id' ExternalServices_table = Table("external_service", metadata, autoload=True) col = Column("external_service_type_id", TrimmedString(255)) add_column(col, ExternalServices_table) # populate this new column cmd = "UPDATE external_service SET external_service_type_id=sequencer_type_id" migrate_engine.execute(cmd) # remove the 'sequencer_type_id' column drop_column('sequencer_type_id', ExternalServices_table) # create 'request_type_external_service_association' table RequestTypeExternalServiceAssociation_table = Table( "request_type_external_service_association", metadata, Column("id", Integer, primary_key=True), Column("request_type_id", Integer, ForeignKey("request_type.id"), index=True), Column("external_service_id", Integer, ForeignKey("external_service.id"), index=True)) create_table(RequestTypeExternalServiceAssociation_table) # populate 'request_type_external_service_association' table cmd = "SELECT id, sequencer_id FROM request_type ORDER BY id ASC" result = migrate_engine.execute(cmd) results_list = result.fetchall() # Proceed only if request_types exists for row in results_list: request_type_id = row[0] sequencer_id = row[1] if not sequencer_id: sequencer_id = 'null' cmd = "INSERT INTO request_type_external_service_association VALUES ( %s, %s, %s )" % ( nextval(migrate_engine, 'request_type_external_service_association'), request_type_id, sequencer_id) migrate_engine.execute(cmd) # drop the 'sequencer_id' column in the 'request_type' table drop_column('sequencer_id', 'request_type', metadata)