def upgrade(migrate_engine): print(__doc__) metadata.bind = migrate_engine metadata.reflect() # 1) Drop for table_name in ["workflow_invocation_step", "workflow_invocation"]: t = Table(table_name, metadata, autoload=True) drop_table(t) metadata.remove(t) # 2) Re-add WorkflowInvocation_table = Table("workflow_invocation", metadata, Column("id", Integer, primary_key=True), Column("create_time", DateTime, default=now), Column("update_time", DateTime, default=now, onupdate=now), Column("workflow_id", Integer, ForeignKey("workflow.id"), index=True, nullable=False)) WorkflowInvocationStep_table = Table("workflow_invocation_step", metadata, Column("id", Integer, primary_key=True), Column("create_time", DateTime, default=now), Column("update_time", DateTime, default=now, onupdate=now), Column("workflow_invocation_id", Integer, ForeignKey("workflow_invocation.id"), index=True, nullable=False), Column("workflow_step_id", Integer, ForeignKey("workflow_step.id"), index=True, nullable=False), Column("job_id", Integer, ForeignKey("job.id"), index=True, nullable=True)) for table in [WorkflowInvocation_table, WorkflowInvocationStep_table]: create_table(table)
def downgrade(migrate_engine): metadata.bind = migrate_engine metadata.reflect() drop_column("dynamic_tool_id", "workflow_step", metadata) drop_column("dynamic_tool_id", "job", metadata) drop_table(DynamicTool_table)
def upgrade(migrate_engine): print(__doc__) metadata.bind = migrate_engine metadata.reflect() drop_table(validation_error_table) history_dataset_association_table = Table("history_dataset_association", metadata, autoload=True) library_dataset_dataset_association_table = Table( "library_dataset_dataset_association", metadata, autoload=True) for dataset_instance_table in [ history_dataset_association_table, library_dataset_dataset_association_table ]: validated_state_column = Column('validated_state', TrimmedString(64), default='unknown', server_default="unknown", nullable=False) add_column(validated_state_column, dataset_instance_table, metadata) validated_state_message_column = Column('validated_state_message', TEXT) add_column(validated_state_message_column, dataset_instance_table, metadata)
def downgrade(migrate_engine): print(__doc__) metadata.bind = migrate_engine metadata.reflect() # drop existing timestamp triggers new_triggers.remove(migrate_engine) try: # update history.update_time with vals from audit table put_em_back = """ UPDATE history h SET update_time = a.max_update_time FROM ( SELECT history_id, max(update_time) as max_update_time FROM history_audit GROUP BY history_id ) a WHERE h.id = a.history_id """ migrate_engine.execute(put_em_back) except Exception: print("Unable to put update_times back") # drop audit table drop_table(AuditTable) # install old timestamp triggers old_triggers.install_timestamp_triggers(migrate_engine)
def downgrade(migrate_engine): metadata.bind = migrate_engine metadata.reflect() drop_column('username', 'galaxy_user', metadata) drop_table(PageRevision_table) drop_table(Page_table)
def downgrade(migrate_engine): metadata.bind = migrate_engine NewWorkflowStepConnection_table = Table("workflow_step_connection", metadata, autoload=True) for index in NewWorkflowStepConnection_table.indexes: index.drop() NewWorkflowStepConnection_table.rename("workflow_step_connection_predowngrade145") # Try to deregister that table to work around some caching problems it seems. NewWorkflowStepConnection_table.deregister() metadata._remove_table("workflow_step_connection", metadata.schema) metadata.reflect() OldWorkflowStepConnection_table = Table( "workflow_step_connection", metadata, Column("id", Integer, primary_key=True), Column("output_step_id", Integer, ForeignKey("workflow_step.id"), index=True), Column("input_step_id", Integer, ForeignKey("workflow_step.id"), index=True), Column("output_name", TEXT), Column("input_name", TEXT), Column("input_subworkflow_step_id", Integer, ForeignKey("workflow_step.id"), index=True), ) create_table(OldWorkflowStepConnection_table) insert_step_connections_cmd = \ "INSERT INTO workflow_step_connection (output_step_id, input_step_id, output_name, input_name, input_subworkflow_step_id) " + \ "SELECT wsc.output_step_id, wsi.workflow_step_id, wsc.output_name, wsi.name, wsc.input_subworkflow_step_id " + \ "FROM workflow_step_connection_predowngrade145 AS wsc JOIN workflow_step_input AS wsi ON wsc.input_step_input_id = wsi.id ORDER BY wsc.id" migrate_engine.execute(insert_step_connections_cmd) for table in (NewWorkflowStepConnection_table, WorkflowStepInput_table): drop_table(table)
def downgrade(migrate_engine): metadata.bind = migrate_engine metadata.reflect() drop_column('hidden_beneath_collection_instance_id', 'history_dataset_association', metadata) for table in reversed(TABLES): drop_table(table)
def downgrade(migrate_engine): metadata.bind = migrate_engine metadata.reflect() # TODO: Dropping a column used in a foreign key fails in MySQL, need to remove the FK first. drop_column('extended_metadata_id', 'library_dataset_dataset_association', metadata) for table in reversed(TABLES): drop_table(table)
def downgrade(migrate_engine): metadata.bind = migrate_engine metadata.reflect() drop_table(VisualizationRatingAssociation_table) drop_table(PageRatingAssociation_table) drop_table(StoredWorkflowRatingAssociation_table) drop_table(HistoryDatasetAssociationRatingAssociation_table) drop_table(HistoryRatingAssociation_table)
def downgrade(migrate_engine): metadata.bind = migrate_engine metadata.reflect() drop_table(JobToImplicitOutputDatasetCollectionAssociation_table) dataset_collection_table = Table("dataset_collection", metadata, autoload=True) drop_column('populated_state', dataset_collection_table) drop_column('populated_state_message', dataset_collection_table)
def downgrade(migrate_engine): metadata.bind = migrate_engine metadata.reflect() for table in TABLES: drop_table(table) dataset_collection_table = Table("dataset_collection", metadata, autoload=True) drop_column('populated_state', dataset_collection_table) drop_column('populated_state_message', dataset_collection_table)
def upgrade(migrate_engine): print(__doc__) metadata.bind = migrate_engine metadata.reflect() try: drop_column(transfer_job_id.name, 'genome_index_tool_data', metadata) drop_table(TransferJob_table) except Exception: log.exception("Dropping transfer_job table failed")
def downgrade(migrate_engine): metadata.bind = migrate_engine metadata.reflect() # SQLite does not always update foreign key constraints when the target # table is renamed, so we start with the table rename. # rename the 'external_service' table to 'sequencer' ExternalServices_table = Table("external_service", metadata, autoload=True) ExternalServices_table.rename('sequencer') # if running PostgreSQL, rename the primary key sequence too if migrate_engine.name in ['postgres', 'postgresql']: cmd = "ALTER SEQUENCE external_service_id_seq RENAME TO sequencer_id_seq" migrate_engine.execute(cmd) # create the 'sequencer_id' column in the 'request_type' table col = Column("sequencer_id", Integer, ForeignKey("sequencer.id"), nullable=True) add_column(col, 'request_type', metadata) # populate 'sequencer_id' column in the 'request_type' table from the # 'request_type_external_service_association' table cmd = "SELECT request_type_id, external_service_id FROM request_type_external_service_association ORDER BY id ASC" result = migrate_engine.execute(cmd) results_list = result.fetchall() for row in results_list: request_type_id = row[0] external_service_id = row[1] cmd = "UPDATE request_type SET sequencer_id=%i WHERE id=%i" % ( external_service_id, request_type_id) migrate_engine.execute(cmd) # remove the 'request_type_external_service_association' table RequestTypeExternalServiceAssociation_table = Table( "request_type_external_service_association", metadata, autoload=True) drop_table(RequestTypeExternalServiceAssociation_table) # rename 'external_service_type_id' column to 'sequencer_type_id' in the table 'sequencer' # create the column 'sequencer_type_id' Sequencer_table = Table("sequencer", metadata, autoload=True) col = Column("sequencer_type_id", TrimmedString(255)) # should also have nullable=False add_column(col, Sequencer_table, metadata) # populate this new column cmd = "UPDATE sequencer SET sequencer_type_id=external_service_type_id" migrate_engine.execute(cmd) # remove the 'external_service_type_id' column drop_column('external_service_type_id', Sequencer_table) # drop the 'external_service_id' column in the 'sample_dataset' table drop_column('external_service_id', 'sample_dataset', metadata)
def downgrade(migrate_engine): metadata.bind = migrate_engine metadata.reflect() Visualization_table = Table("visualization", metadata, autoload=True) drop_column('deleted', Visualization_table) drop_column('importable', Visualization_table) drop_column('slug', Visualization_table) drop_column('published', Visualization_table) for table in TABLES: drop_table(table)
def downgrade(migrate_engine): metadata.bind = migrate_engine metadata.reflect() # Drop the ix_dataset_state index drop_index('ix_dataset_state', 'dataset', 'state', metadata) # Drop the library_folder_id column drop_column('library_folder_id', 'job', metadata) # Drop the job_to_output_library_dataset table drop_table(JobToOutputLibraryDataset_table)
def downgrade(migrate_engine): metadata.bind = migrate_engine metadata.reflect() drop_column("implicit_collection_jobs_id", "history_dataset_collection_association", metadata) drop_column("job_id", "history_dataset_collection_association", metadata) drop_column("implicit_collection_jobs_id", "workflow_invocation_step", metadata) drop_column("state", "workflow_invocation_step", metadata) drop_column("element_count", "dataset_collection", metadata) for table in reversed(get_new_tables()): drop_table(table)
def downgrade(migrate_engine): metadata.bind = migrate_engine metadata.reflect() for table in TABLES: drop_table(table) drop_column("state", "workflow_invocation", metadata) drop_column("scheduler", "workflow_invocation", metadata) drop_column("uuid", "workflow_invocation", metadata) drop_column("history_id", "workflow_invocation", metadata) drop_column("handler", "workflow_invocation", metadata) drop_column("action", "workflow_invocation_step", metadata)
def upgrade(migrate_engine): print(__doc__) metadata.bind = migrate_engine metadata.reflect() # Drop all of the original library_item_info tables # NOTE: all existing library item into template data is eliminated here via table drops for table_name in OLD_TABLE_NAMES: drop_table(table_name, metadata) # Create all new tables above for table in NEW_TABLES: create_table(table)
def downgrade(migrate_engine): metadata.bind = migrate_engine metadata.reflect() drop_column("subworkflow_id", "workflow_step", metadata) drop_column("parent_workflow_id", "workflow", metadata) drop_column("input_subworkflow_step_id", "workflow_step_connection", metadata) drop_column("label", "workflow_output", metadata) drop_column("uuid", "workflow_output", metadata) for table in TABLES: drop_table(table)
def downgrade(migrate_engine): metadata.bind = migrate_engine metadata.reflect() drop_table(HistoryDatasetAssociationTagAssociation_table) drop_table(DatasetTagAssociation_table) drop_table(HistoryTagAssociation_table) drop_table(Tag_table)
def downgrade(migrate_engine): metadata.bind = migrate_engine metadata.reflect() try: hda_table = Table("history_dataset_association", metadata, autoload=True) hidden_beneath_collection_instance_id_col = hda_table.c.hidden_beneath_collection_instance_id hidden_beneath_collection_instance_id_col.drop() except Exception: log.exception("Dropping HDA column failed.") for table in reversed(TABLES): drop_table(table)
def downgrade(migrate_engine): metadata.bind = migrate_engine metadata.reflect() # Drop workflow_tag_association table. drop_table(WorkflowTagAssociation_table) # Drop stored_workflow_tag_association table. drop_table(StoredWorkflowTagAssociation_table) # Drop user_id column from page_tag_association table. drop_column('user_id', 'page_tag_association', metadata) # Drop user_id column from history_dataset_association_tag_association table. drop_column('user_id', 'history_dataset_association_tag_association', metadata) # Drop user_id column from history_tag_association table. drop_column('user_id', 'history_tag_association', metadata)
def upgrade(migrate_engine): print(__doc__) metadata.bind = migrate_engine metadata.reflect() OldWorkflowStepConnection_table = Table("workflow_step_connection", metadata, autoload=True) for fkc in OldWorkflowStepConnection_table.foreign_key_constraints: mfkc = MigrateForeignKeyConstraint([_.parent for _ in fkc.elements], [_.column for _ in fkc.elements], name=fkc.name) try: mfkc.drop() except Exception: log.exception("Dropping foreign key constraint '%s' from table '%s' failed", mfkc.name, OldWorkflowStepConnection_table) for index in OldWorkflowStepConnection_table.indexes: drop_index(index, OldWorkflowStepConnection_table) OldWorkflowStepConnection_table.rename("workflow_step_connection_preupgrade145") # Try to deregister that table to work around some caching problems it seems. OldWorkflowStepConnection_table.deregister() metadata._remove_table("workflow_step_connection", metadata.schema) metadata.reflect() NewWorkflowStepConnection_table = Table( "workflow_step_connection", metadata, Column("id", Integer, primary_key=True), Column("output_step_id", Integer, ForeignKey("workflow_step.id"), index=True), Column("input_step_input_id", Integer, ForeignKey("workflow_step_input.id"), index=True), Column("output_name", TEXT), Column("input_subworkflow_step_id", Integer, ForeignKey("workflow_step.id"), index=True), ) for table in (WorkflowStepInput_table, NewWorkflowStepConnection_table): create_table(table) insert_step_inputs_cmd = \ "INSERT INTO workflow_step_input (workflow_step_id, name) " + \ "SELECT DISTINCT input_step_id, input_name FROM workflow_step_connection_preupgrade145" migrate_engine.execute(insert_step_inputs_cmd) insert_step_connections_cmd = \ "INSERT INTO workflow_step_connection (output_step_id, input_step_input_id, output_name, input_subworkflow_step_id) " + \ "SELECT wsc.output_step_id, wsi.id, wsc.output_name, wsc.input_subworkflow_step_id " + \ "FROM workflow_step_connection_preupgrade145 AS wsc JOIN workflow_step_input AS wsi ON wsc.input_step_id = wsi.workflow_step_id AND wsc.input_name = wsi.name ORDER BY wsc.id" migrate_engine.execute(insert_step_connections_cmd) drop_table(OldWorkflowStepConnection_table)
def downgrade(migrate_engine): metadata.bind = migrate_engine metadata.reflect() drop_table(SampleRunAssociation_table) drop_table(RequestTypeRunAssociation_table) drop_table(Run_table)
def downgrade(migrate_engine): metadata.bind = migrate_engine metadata.reflect() drop_table(Task_table)
def downgrade(migrate_engine): metadata.bind = migrate_engine metadata.reflect() drop_table(ToolDependency_table)
def upgrade(migrate_engine): print(__doc__) metadata.bind = migrate_engine metadata.reflect() drop_table(CloudSnapshot_table) drop_table(CloudStore_table) drop_table(CloudInstance_table) drop_table(UCI_table) drop_table(CloudImage_table) drop_table(CloudUserCredentials_table) drop_table(CloudProvider_table)
def downgrade(migrate_engine): metadata.bind = migrate_engine metadata.reflect() for table in reversed(tables): drop_table(table)
def downgrade(migrate_engine): metadata.bind = migrate_engine metadata.reflect() drop_table(GenomeIndexToolData_table)
def downgrade(migrate_engine): metadata.bind = migrate_engine metadata.reflect() for table in TABLES: drop_table(table)