def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    for table in tables:
        create_table(table)
Example #2
0
def downgrade(migrate_engine):
    metadata.bind = migrate_engine

    NewWorkflowStepConnection_table = Table("workflow_step_connection", metadata, autoload=True)
    for index in NewWorkflowStepConnection_table.indexes:
        index.drop()
    NewWorkflowStepConnection_table.rename("workflow_step_connection_predowngrade145")
    # Try to deregister that table to work around some caching problems it seems.
    NewWorkflowStepConnection_table.deregister()
    metadata._remove_table("workflow_step_connection", metadata.schema)
    metadata.reflect()

    OldWorkflowStepConnection_table = Table(
        "workflow_step_connection", metadata,
        Column("id", Integer, primary_key=True),
        Column("output_step_id", Integer, ForeignKey("workflow_step.id"), index=True),
        Column("input_step_id", Integer, ForeignKey("workflow_step.id"), index=True),
        Column("output_name", TEXT),
        Column("input_name", TEXT),
        Column("input_subworkflow_step_id", Integer, ForeignKey("workflow_step.id"), index=True),
    )
    create_table(OldWorkflowStepConnection_table)

    insert_step_connections_cmd = \
        "INSERT INTO workflow_step_connection (output_step_id, input_step_id, output_name, input_name, input_subworkflow_step_id) " + \
        "SELECT wsc.output_step_id, wsi.workflow_step_id, wsc.output_name, wsi.name, wsc.input_subworkflow_step_id " + \
        "FROM workflow_step_connection_predowngrade145 AS wsc JOIN workflow_step_input AS wsi ON wsc.input_step_input_id = wsi.id ORDER BY wsc.id"
    migrate_engine.execute(insert_step_connections_cmd)

    for table in (NewWorkflowStepConnection_table, WorkflowStepInput_table):
        drop_table(table)
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    for table in TABLES:
        create_table(table)

    History_column = Column("history_id", Integer, ForeignKey("history.id"), nullable=True)
    State_column = Column("state", TrimmedString(64))

    # TODO: Handle indexes correctly
    SchedulerId_column = Column("scheduler", TrimmedString(255))
    HandlerId_column = Column("handler", TrimmedString(255))
    WorkflowUUID_column = Column("uuid", UUIDType, nullable=True)
    add_column(History_column, "workflow_invocation", metadata)
    add_column(State_column, "workflow_invocation", metadata)
    add_column(SchedulerId_column, "workflow_invocation", metadata, index_nane="id_workflow_invocation_scheduler")
    add_column(HandlerId_column, "workflow_invocation", metadata, index_name="id_workflow_invocation_handler")
    add_column(WorkflowUUID_column, "workflow_invocation", metadata)

    # All previous invocations have been scheduled...
    cmd = "UPDATE workflow_invocation SET state = 'scheduled'"
    try:
        migrate_engine.execute(cmd)
    except Exception:
        log.exception("failed to update past workflow invocation states.")

    WorkflowInvocationStepAction_column = Column("action", JSONType, nullable=True)
    add_column(WorkflowInvocationStepAction_column, "workflow_invocation_step", metadata)
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    # 1) Drop
    for table_name in ["workflow_invocation_step", "workflow_invocation"]:
        t = Table(table_name, metadata, autoload=True)
        drop_table(t)
        metadata.remove(t)

    # 2) Re-add
    WorkflowInvocation_table = Table("workflow_invocation", metadata,
                                     Column("id", Integer, primary_key=True),
                                     Column("create_time", DateTime, default=now),
                                     Column("update_time", DateTime, default=now, onupdate=now),
                                     Column("workflow_id", Integer, ForeignKey("workflow.id"), index=True, nullable=False))

    WorkflowInvocationStep_table = Table("workflow_invocation_step", metadata,
                                         Column("id", Integer, primary_key=True),
                                         Column("create_time", DateTime, default=now),
                                         Column("update_time", DateTime, default=now, onupdate=now),
                                         Column("workflow_invocation_id", Integer, ForeignKey("workflow_invocation.id"), index=True, nullable=False),
                                         Column("workflow_step_id", Integer, ForeignKey("workflow_step.id"), index=True, nullable=False),
                                         Column("job_id", Integer, ForeignKey("job.id"), index=True, nullable=True))

    for table in [WorkflowInvocation_table, WorkflowInvocationStep_table]:
        create_table(table)
Example #5
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    if migrate_engine.name in ['postgres', 'postgresql']:
        subworkflow_id_column = Column("subworkflow_id", Integer, ForeignKey("workflow.id"), nullable=True)
        input_subworkflow_step_id_column = Column("input_subworkflow_step_id", Integer, ForeignKey("workflow_step.id"), nullable=True)
        parent_workflow_id_column = Column("parent_workflow_id", Integer, ForeignKey("workflow.id"), nullable=True)
    else:
        subworkflow_id_column = Column("subworkflow_id", Integer, nullable=True)
        input_subworkflow_step_id_column = Column("input_subworkflow_step_id", Integer, nullable=True)
        parent_workflow_id_column = Column("parent_workflow_id", Integer, nullable=True)
    add_column(subworkflow_id_column, "workflow_step", metadata)
    add_column(input_subworkflow_step_id_column, "workflow_step_connection", metadata)
    add_column(parent_workflow_id_column, "workflow", metadata)
    workflow_output_label_column = Column("label", TrimmedString(255))
    workflow_output_uuid_column = Column("uuid", UUIDType, nullable=True)
    add_column(workflow_output_label_column, "workflow_output", metadata)
    add_column(workflow_output_uuid_column, "workflow_output", metadata)

    # Make stored_workflow_id nullable, since now workflows can belong to either
    # a stored workflow or a parent workflow.
    alter_column("stored_workflow_id", "workflow", metadata, nullable=True)

    for table in TABLES:
        # Indexes are automatically created when the tables are.
        create_table(table)
Example #6
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    for table in TABLES:
        create_table(table)

    # Add columns & create indices for supporting sharing to visualization table.
    Visualization_table = Table("visualization", metadata, autoload=True)
    deleted_column = Column("deleted", Boolean, default=False, index=True)
    add_column(deleted_column,
               Visualization_table,
               metadata,
               index_name="ix_visualization_deleted")
    try:
        # Fill column with default value.
        cmd = "UPDATE visualization SET deleted = %s" % engine_false(
            migrate_engine)
        migrate_engine.execute(cmd)
    except Exception:
        log.exception(
            "Updating column 'deleted' of table 'visualization' failed.")

    importable_column = Column("importable",
                               Boolean,
                               default=False,
                               index=True)
    add_column(importable_column,
               Visualization_table,
               metadata,
               index_name='ix_visualization_importable')
    try:
        # Fill column with default value.
        cmd = "UPDATE visualization SET importable = %s" % engine_false(
            migrate_engine)
        migrate_engine.execute(cmd)
    except Exception:
        log.exception(
            "Updating column 'importable' of table 'visualization' failed.")

    slug_column = Column("slug", TEXT)
    add_column(slug_column, Visualization_table, metadata)
    # Index needs to be added separately because MySQL cannot index a TEXT/BLOB
    # column without specifying mysql_length
    add_index('ix_visualization_slug', Visualization_table, 'slug')

    published_column = Column("published", Boolean, index=True)
    add_column(published_column,
               Visualization_table,
               metadata,
               index_name='ix_visualization_published')
    try:
        # Fill column with default value.
        cmd = "UPDATE visualization SET published = %s" % engine_false(
            migrate_engine)
        migrate_engine.execute(cmd)
    except Exception:
        log.exception(
            "Updating column 'published' of table 'visualization' failed.")
Example #7
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    try:
        if migrate_engine.name == 'mysql':
            # Strip slug index prior to creation so we can do it manually.
            slug_index = None
            for ix in Page_table.indexes:
                if ix.name == 'ix_page_slug':
                    slug_index = ix
            Page_table.indexes.remove(slug_index)
        Page_table.create()
        if migrate_engine.name == 'mysql':
            # Create slug index manually afterward.
            i = Index("ix_page_slug", Page_table.c.slug, mysql_length=200)
            i.create()
    except Exception:
        log.exception("Could not create page table")
    create_table(PageRevision_table)

    col = Column('username',
                 String(255),
                 index=True,
                 unique=True,
                 default=False)
    add_column(col,
               'galaxy_user',
               metadata,
               index_name='ix_galaxy_user_username',
               unique_name='username')
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    create_table(RepositoryDependency_table)
    create_table(RepositoryRepositoryDependencyAssociation_table)
Example #9
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    for table in TABLES:
        create_table(table)

    if migrate_engine.name in ['postgres', 'postgresql']:
        workflow_dynamic_tool_id_column = Column("dynamic_tool_id",
                                                 Integer,
                                                 ForeignKey("dynamic_tool.id"),
                                                 nullable=True)
        job_workflow_dynamic_tool_id_column = Column(
            "dynamic_tool_id",
            Integer,
            ForeignKey("dynamic_tool.id"),
            nullable=True)
    else:
        workflow_dynamic_tool_id_column = Column("dynamic_tool_id",
                                                 Integer,
                                                 nullable=True)
        job_workflow_dynamic_tool_id_column = Column("dynamic_tool_id",
                                                     Integer,
                                                     nullable=True)

    add_column(workflow_dynamic_tool_id_column, "workflow_step", metadata)
    add_column(job_workflow_dynamic_tool_id_column, "job", metadata)
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    for table in get_new_tables():
        create_table(table)

    # Set default for creation to scheduled, actual mapping has new as default.
    workflow_invocation_step_state_column = Column("state", TrimmedString(64), default="scheduled")
    if migrate_engine.name in ['postgres', 'postgresql']:
        implicit_collection_jobs_id_column = Column("implicit_collection_jobs_id", Integer, ForeignKey("implicit_collection_jobs.id"), nullable=True)
        job_id_column = Column("job_id", Integer, ForeignKey("job.id"), nullable=True)
    else:
        implicit_collection_jobs_id_column = Column("implicit_collection_jobs_id", Integer, nullable=True)
        job_id_column = Column("job_id", Integer, nullable=True)
    dataset_collection_element_count_column = Column("element_count", Integer, nullable=True)

    add_column(implicit_collection_jobs_id_column, "history_dataset_collection_association", metadata)
    add_column(job_id_column, "history_dataset_collection_association", metadata)
    add_column(dataset_collection_element_count_column, "dataset_collection", metadata)

    implicit_collection_jobs_id_column = Column("implicit_collection_jobs_id", Integer, ForeignKey("implicit_collection_jobs.id"), nullable=True)
    add_column(implicit_collection_jobs_id_column, "workflow_invocation_step", metadata)
    add_column(workflow_invocation_step_state_column, "workflow_invocation_step", metadata)

    cmd = \
        "UPDATE dataset_collection SET element_count = " + \
        "(SELECT (CASE WHEN count(*) > 0 THEN count(*) ELSE 0 END) FROM dataset_collection_element WHERE " + \
        "dataset_collection_element.dataset_collection_id = dataset_collection.id)"
    migrate_engine.execute(cmd)
Example #11
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    create_table(HistoryUserShareAssociation_table)
    col = Column('importable', Boolean, index=True, default=False)
    add_column(col, 'history', metadata, index_name='ix_history_importable')
Example #12
0
def downgrade(migrate_engine):
    metadata.bind = migrate_engine
    metadata.reflect()

    try:
        create_table(TransferJob_table)
        add_column(transfer_job_id, 'genome_index_tool_data', metadata, index_name='ix_genome_index_tool_data_transfer_job_id')
    except Exception:
        log.exception("Creating transfer_job table failed")
Example #13
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    create_table(Page_table)
    create_table(PageRevision_table)

    col = Column('username', String(255), index=True, unique=True, default=False)
    add_column(col, 'galaxy_user', metadata, index_name='ix_galaxy_user_username', unique_name='username')
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    create_table(MigrateTools_table)
    try:
        cmd = "INSERT INTO migrate_tools VALUES ('GalaxyTools', 'lib/galaxy/tool_shed/migrate', %d)" % 1
        migrate_engine.execute(cmd)
    except Exception:
        log.exception("Inserting into table 'migrate_tools' failed.")
Example #15
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    for table in TABLES:
        create_table(table)

    # TODO: Find a better name for this column...
    HiddenBeneathCollection_column = Column("hidden_beneath_collection_instance_id", Integer, ForeignKey("history_dataset_collection_association.id"), nullable=True)
    add_column(HiddenBeneathCollection_column, 'history_dataset_association', metadata)
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    # Drop all of the original library_item_info tables
    # NOTE: all existing library item into template data is eliminated here via table drops
    for table_name in OLD_TABLE_NAMES:
        drop_table(table_name, metadata)

    # Create all new tables above
    for table in NEW_TABLES:
        create_table(table)
Example #17
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    for table in TABLES:
        create_table(table)
    extended_metadata_ldda_col = Column("extended_metadata_id",
                                        Integer,
                                        ForeignKey("extended_metadata.id"),
                                        nullable=True)
    add_column(extended_metadata_ldda_col,
               'library_dataset_dataset_association', metadata)
Example #18
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    create_table(HistoryRatingAssociation_table)

    # Create history_dataset_association_rating_association table.
    try:
        HistoryDatasetAssociationRatingAssociation_table.create()
    except Exception as e:
        # MySQL cannot handle long index names; when we see this error, create the index name manually.
        if migrate_engine.name == 'mysql' and \
                str(e).lower().find("identifier name 'ix_history_dataset_association_rating_association_history_dataset_association_id' is too long"):
            add_index('ix_hda_rating_association_hda_id',
                      HistoryDatasetAssociationRatingAssociation_table,
                      'history_dataset_association_id')
        else:
            log.exception(
                "Creating history_dataset_association_rating_association table failed."
            )

    create_table(StoredWorkflowRatingAssociation_table)
    create_table(PageRatingAssociation_table)
    create_table(VisualizationRatingAssociation_table)
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    # Create the job_to_output_library_dataset table
    create_table(JobToOutputLibraryDataset_table)

    # Create the library_folder_id column
    col = Column("library_folder_id", Integer, ForeignKey('library_folder.id', name='job_library_folder_id_fk'), index=True)
    add_column(col, 'job', metadata, index_name='ix_job_library_folder_id')

    # Create the ix_dataset_state index
    add_index('ix_dataset_state', 'dataset', 'state', metadata)
Example #20
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    create_table(JobToImplicitOutputDatasetCollectionAssociation_table)

    dataset_collection_table = Table("dataset_collection", metadata, autoload=True)
    # need server_default because column in non-null
    populated_state_column = Column('populated_state', TrimmedString(64), default='ok', server_default="ok", nullable=False)
    add_column(populated_state_column, dataset_collection_table, metadata)

    populated_message_column = Column('populated_state_message', TEXT, nullable=True)
    add_column(populated_message_column, dataset_collection_table, metadata)
Example #21
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    for table in TABLES:
        create_table(table)

    try:
        hda_table = Table("history_dataset_association",
                          metadata,
                          autoload=True)
        HiddenBeneathCollection_column.create(hda_table)
    except Exception:
        log.exception("Creating HDA column failed.")
Example #22
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    create_table(Tag_table)
    create_table(HistoryTagAssociation_table)
    create_table(DatasetTagAssociation_table)
    create_table(HistoryDatasetAssociationTagAssociation_table)
Example #23
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    # Create user_id column in history_tag_association table.
    # SQLAlchemy Migrate has a bug when adding a column with both a ForeignKey and a index in SQLite
    if migrate_engine.name != 'sqlite':
        c = Column("user_id", Integer, ForeignKey("galaxy_user.id"), index=True)
    else:
        c = Column("user_id", Integer, index=True)
    add_column(c, 'history_tag_association', metadata, index_name='ix_history_tag_association_user_id')

    # Populate column so that user_id is the id of the user who owns the history (and, up to now, was the only person able to tag the history).
    migrate_engine.execute(
        "UPDATE history_tag_association SET user_id=( SELECT user_id FROM history WHERE history_tag_association.history_id = history.id )")

    # Create user_id column in history_dataset_association_tag_association table.
    # SQLAlchemy Migrate has a bug when adding a column with both a ForeignKey and a index in SQLite
    if migrate_engine.name != 'sqlite':
        c = Column("user_id", Integer, ForeignKey("galaxy_user.id"), index=True)
    else:
        c = Column("user_id", Integer, index=True)
    add_column(c, 'history_dataset_association_tag_association', metadata, index_name='ix_history_dataset_association_tag_association_user_id')

    # Populate column so that user_id is the id of the user who owns the history_dataset_association (and, up to now, was the only person able to tag the page).
    migrate_engine.execute(
        "UPDATE history_dataset_association_tag_association SET user_id=( SELECT history.user_id FROM history, history_dataset_association WHERE history_dataset_association.history_id = history.id AND history_dataset_association.id = history_dataset_association_tag_association.history_dataset_association_id)")

    # Create user_id column in page_tag_association table.
    # SQLAlchemy Migrate has a bug when adding a column with both a ForeignKey and a index in SQLite
    if migrate_engine.name != 'sqlite':
        c = Column("user_id", Integer, ForeignKey("galaxy_user.id"), index=True)
    else:
        # Create user_id column in page_tag_association table.
        c = Column("user_id", Integer, index=True)
    add_column(c, 'page_tag_association', metadata, index_name='ix_page_tag_association_user_id')

    # Populate column so that user_id is the id of the user who owns the page (and, up to now, was the only person able to tag the page).
    migrate_engine.execute(
        "UPDATE page_tag_association SET user_id=( SELECT user_id FROM page WHERE page_tag_association.page_id = page.id )")

    # Create stored_workflow_tag_association table.
    create_table(StoredWorkflowTagAssociation_table)

    # Create workflow_tag_association table.
    create_table(WorkflowTagAssociation_table)
Example #24
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    create_table(DynamicTool_table)

    workflow_dynamic_tool_id_column = Column("dynamic_tool_id",
                                             Integer,
                                             ForeignKey("dynamic_tool.id"),
                                             nullable=True)
    add_column(workflow_dynamic_tool_id_column, "workflow_step", metadata)
    job_workflow_dynamic_tool_id_column = Column("dynamic_tool_id",
                                                 Integer,
                                                 ForeignKey("dynamic_tool.id"),
                                                 nullable=True)
    add_column(job_workflow_dynamic_tool_id_column, "job", metadata)
Example #25
0
def downgrade(migrate_engine):
    metadata.bind = migrate_engine
    metadata.reflect()

    create_table(validation_error_table)

    history_dataset_association_table = Table("history_dataset_association",
                                              metadata,
                                              autoload=True)
    library_dataset_dataset_association_table = Table(
        "library_dataset_dataset_association", metadata, autoload=True)
    for dataset_instance_table in [
            history_dataset_association_table,
            library_dataset_dataset_association_table
    ]:
        drop_column('validated_state', dataset_instance_table, metadata)
        drop_column('validated_state_message', dataset_instance_table,
                    metadata)
Example #26
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    OldWorkflowStepConnection_table = Table("workflow_step_connection", metadata, autoload=True)
    for fkc in OldWorkflowStepConnection_table.foreign_key_constraints:
        mfkc = MigrateForeignKeyConstraint([_.parent for _ in fkc.elements], [_.column for _ in fkc.elements], name=fkc.name)
        try:
            mfkc.drop()
        except Exception:
            log.exception("Dropping foreign key constraint '%s' from table '%s' failed", mfkc.name, OldWorkflowStepConnection_table)

    for index in OldWorkflowStepConnection_table.indexes:
        drop_index(index, OldWorkflowStepConnection_table)
    OldWorkflowStepConnection_table.rename("workflow_step_connection_preupgrade145")
    # Try to deregister that table to work around some caching problems it seems.
    OldWorkflowStepConnection_table.deregister()
    metadata._remove_table("workflow_step_connection", metadata.schema)
    metadata.reflect()

    NewWorkflowStepConnection_table = Table(
        "workflow_step_connection", metadata,
        Column("id", Integer, primary_key=True),
        Column("output_step_id", Integer, ForeignKey("workflow_step.id"), index=True),
        Column("input_step_input_id", Integer, ForeignKey("workflow_step_input.id"), index=True),
        Column("output_name", TEXT),
        Column("input_subworkflow_step_id", Integer, ForeignKey("workflow_step.id"), index=True),
    )
    for table in (WorkflowStepInput_table, NewWorkflowStepConnection_table):
        create_table(table)

    insert_step_inputs_cmd = \
        "INSERT INTO workflow_step_input (workflow_step_id, name) " + \
        "SELECT DISTINCT input_step_id, input_name FROM workflow_step_connection_preupgrade145"
    migrate_engine.execute(insert_step_inputs_cmd)

    insert_step_connections_cmd = \
        "INSERT INTO workflow_step_connection (output_step_id, input_step_input_id, output_name, input_subworkflow_step_id) " + \
        "SELECT wsc.output_step_id, wsi.id, wsc.output_name, wsc.input_subworkflow_step_id " + \
        "FROM workflow_step_connection_preupgrade145 AS wsc JOIN workflow_step_input AS wsi ON wsc.input_step_id = wsi.workflow_step_id AND wsc.input_name = wsi.name ORDER BY wsc.id"
    migrate_engine.execute(insert_step_connections_cmd)
    drop_table(OldWorkflowStepConnection_table)
Example #27
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    # Add all of the new tables above
    create_table(UserAddress_table)
    # Add 1 column to the request_type table
    col = Column("deleted", Boolean, index=True, default=False)
    add_column(col, 'request_type', metadata, index_name='ix_request_type_deleted')

    # Delete the submitted column
    # This fails for sqlite, so skip the drop -- no conflicts in the future
    Request_table = Table("request", metadata, autoload=True)
    # SQLAlchemy Migrate has a bug when dropping a boolean column in SQLite
    if migrate_engine.name != 'sqlite':
        drop_column('submitted', Request_table)
    col = Column("state", TrimmedString(255), index=True)
    add_column(col, Request_table, metadata, index_name='ix_request_state')
Example #28
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    # create table + index
    AuditTable.drop(migrate_engine, checkfirst=True)
    create_table(AuditTable)

    # populate with update_time from every history
    copy_update_times = """
        INSERT INTO history_audit (history_id, update_time)
        SELECT id, update_time FROM history
    """
    migrate_engine.execute(copy_update_times)

    # drop existing timestamp triggers
    old_triggers.drop_timestamp_triggers(migrate_engine)

    # install new timestamp triggers
    new_triggers.install(migrate_engine)
Example #29
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()
    create_table(dataset_source_table)
    create_table(dataset_hash_table)
    create_table(dataset_source_hash_table)
Example #30
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    create_table(RequestEvent_table)
    # move the current state of all existing requests to the request_event table
    cmd = \
        "INSERT INTO request_event " + \
        "SELECT %s AS id," + \
        "%s AS create_time," + \
        "%s AS update_time," + \
        "request.id AS request_id," + \
        "request.state AS state," + \
        "'%s' AS comment " + \
        "FROM request;"
    cmd = cmd % (nextval(migrate_engine,
                         'request_event'), localtimestamp(migrate_engine),
                 localtimestamp(migrate_engine), 'Imported from request table')
    migrate_engine.execute(cmd)

    drop_column('state', 'request', metadata)