예제 #1
0
def upgrade(migrate_engine):
    metadata.bind = migrate_engine
    print(__doc__)
    metadata.reflect()
    ToolDependency_table = Table("tool_dependency", metadata, autoload=True)
    if migrate_engine.name == 'sqlite':
        col = Column("status", TrimmedString(255))
    else:
        col = Column("status", TrimmedString(255), nullable=False)
    try:
        col.create(ToolDependency_table)
        assert col is ToolDependency_table.c.status
    except Exception as e:
        print("Adding status column to the tool_dependency table failed: %s" %
              str(e))
    col = Column("error_message", TEXT)
    try:
        col.create(ToolDependency_table)
        assert col is ToolDependency_table.c.error_message
    except Exception as e:
        print(
            "Adding error_message column to the tool_dependency table failed: %s"
            % str(e))

    if migrate_engine.name != 'sqlite':
        # This breaks in sqlite due to failure to drop check constraint.
        # TODO move to alembic.
        try:
            ToolDependency_table.c.uninstalled.drop()
        except Exception as e:
            print(
                "Dropping uninstalled column from the tool_dependency table failed: %s"
                % str(e))
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    for table in TABLES:
        create_table(table)

    History_column = Column("history_id", Integer, ForeignKey("history.id"), nullable=True)
    State_column = Column("state", TrimmedString(64))

    # TODO: Handle indexes correctly
    SchedulerId_column = Column("scheduler", TrimmedString(255))
    HandlerId_column = Column("handler", TrimmedString(255))
    WorkflowUUID_column = Column("uuid", UUIDType, nullable=True)
    add_column(History_column, "workflow_invocation", metadata)
    add_column(State_column, "workflow_invocation", metadata)
    add_column(SchedulerId_column, "workflow_invocation", metadata, index_nane="id_workflow_invocation_scheduler")
    add_column(HandlerId_column, "workflow_invocation", metadata, index_name="id_workflow_invocation_handler")
    add_column(WorkflowUUID_column, "workflow_invocation", metadata)

    # All previous invocations have been scheduled...
    cmd = "UPDATE workflow_invocation SET state = 'scheduled'"
    try:
        migrate_engine.execute(cmd)
    except Exception:
        log.exception("failed to update past workflow invocation states.")

    WorkflowInvocationStepAction_column = Column("action", JSONType, nullable=True)
    add_column(WorkflowInvocationStepAction_column, "workflow_invocation_step", metadata)
예제 #3
0
파일: __init__.py 프로젝트: mvdbeek/galaxy
class RepositoryReview(Base, Dictifiable, _HasTable):
    __tablename__ = 'repository_review'

    id = Column(Integer, primary_key=True)
    create_time = Column(DateTime, default=now)
    update_time = Column(DateTime, default=now, onupdate=now)
    repository_id = Column(ForeignKey('repository.id'), index=True)
    changeset_revision = Column(TrimmedString(255), index=True)
    user_id = Column(ForeignKey('galaxy_user.id'), index=True, nullable=False)
    approved = Column(TrimmedString(255))
    rating = Column(Integer, index=True)
    deleted = Column(Boolean, index=True, default=False)
    repository = relationship('Repository', back_populates='reviews')
    # Take care when using the mapper below!  It should be used only when a new review is being created for a repository change set revision.
    # Keep in mind that repository_metadata records can be removed from the database for certain change set revisions when metadata is being
    # reset on a repository!
    repository_metadata = relationship(
        'RepositoryMetadata',
        viewonly=True,
        foreign_keys=lambda:
        [RepositoryReview.repository_id, RepositoryReview.changeset_revision],
        primaryjoin=lambda: (
            (RepositoryReview.repository_id == RepositoryMetadata.repository_id
             )  # type: ignore[has-type]
            & (RepositoryReview.changeset_revision == RepositoryMetadata.
               changeset_revision)),  # type: ignore[has-type]
        back_populates='reviews')
    user = relationship('User', back_populates='repository_reviews')

    component_reviews = relationship(
        'ComponentReview',
        viewonly=True,
        primaryjoin=lambda:
        ((RepositoryReview.id == ComponentReview.repository_review_id
          )  # type: ignore[has-type]
         & (ComponentReview.deleted == false())),  # type: ignore[has-type]
        back_populates='repository_review')

    private_component_reviews = relationship(
        'ComponentReview',
        viewonly=True,
        primaryjoin=lambda:
        ((RepositoryReview.id == ComponentReview.repository_review_id
          )  # type: ignore[has-type]
         & (ComponentReview.deleted == false()) &
         (ComponentReview.private == true())))  # type: ignore[has-type]

    dict_collection_visible_keys = [
        'id', 'repository_id', 'changeset_revision', 'user_id', 'rating',
        'deleted'
    ]
    dict_element_visible_keys = [
        'id', 'repository_id', 'changeset_revision', 'user_id', 'rating',
        'deleted'
    ]
    approved_states = Bunch(NO='no', YES='yes')

    def __init__(self, deleted=False, **kwd):
        super().__init__(**kwd)
        self.deleted = deleted
def downgrade(migrate_engine):
    metadata.bind = migrate_engine

    ToolIdGuidMap_table = Table(
        "tool_id_guid_map",
        metadata,
        Column("id", Integer, primary_key=True),
        Column("create_time", DateTime, default=now),
        Column("update_time", DateTime, default=now, onupdate=now),
        Column("tool_id", String(255)),
        Column("tool_version", TEXT),
        Column("tool_shed", TrimmedString(255)),
        Column("repository_owner", TrimmedString(255)),
        Column("repository_name", TrimmedString(255)),
        Column("guid", TEXT),
        Index('ix_tool_id_guid_map_guid',
              'guid',
              unique=True,
              mysql_length=200),
    )

    metadata.reflect()
    try:
        ToolVersionAssociation_table.drop()
    except Exception:
        log.exception("Dropping tool_version_association table failed.")
    try:
        ToolVersion_table.drop()
    except Exception:
        log.exception("Dropping tool_version table failed.")
    try:
        ToolIdGuidMap_table.create()
    except Exception:
        log.exception("Creating tool_id_guid_map table failed.")
예제 #5
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()
    Repository_table = Table("repository", metadata, autoload=True)
    c_remote = Column("remote_repository_url", TrimmedString(255))
    c_homepage = Column("homepage_url", TrimmedString(255))
    try:
        # Create
        c_remote.create(Repository_table)
        c_homepage.create(Repository_table)
        assert c_remote is Repository_table.c.remote_repository_url
        assert c_homepage is Repository_table.c.homepage_url
    except Exception:
        log.exception("Adding remote_repository_url and homepage_url columns to the repository table failed.")
예제 #6
0
def upgrade(migrate_engine):
    metadata.bind = migrate_engine
    print __doc__
    metadata.reflect()
    ToolDependency_table = Table("tool_dependency", metadata, autoload=True)
    if migrate_engine.name == 'sqlite':
        col = Column("status", TrimmedString(255))
    else:
        col = Column("status", TrimmedString(255), nullable=False)
    try:
        col.create(ToolDependency_table)
        assert col is ToolDependency_table.c.status
    except Exception, e:
        print "Adding status column to the tool_dependency table failed: %s" % str(
            e)
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    for table in get_new_tables():
        create_table(table)

    # Set default for creation to scheduled, actual mapping has new as default.
    workflow_invocation_step_state_column = Column("state", TrimmedString(64), default="scheduled")
    if migrate_engine.name in ['postgres', 'postgresql']:
        implicit_collection_jobs_id_column = Column("implicit_collection_jobs_id", Integer, ForeignKey("implicit_collection_jobs.id"), nullable=True)
        job_id_column = Column("job_id", Integer, ForeignKey("job.id"), nullable=True)
    else:
        implicit_collection_jobs_id_column = Column("implicit_collection_jobs_id", Integer, nullable=True)
        job_id_column = Column("job_id", Integer, nullable=True)
    dataset_collection_element_count_column = Column("element_count", Integer, nullable=True)

    add_column(implicit_collection_jobs_id_column, "history_dataset_collection_association", metadata)
    add_column(job_id_column, "history_dataset_collection_association", metadata)
    add_column(dataset_collection_element_count_column, "dataset_collection", metadata)

    implicit_collection_jobs_id_column = Column("implicit_collection_jobs_id", Integer, ForeignKey("implicit_collection_jobs.id"), nullable=True)
    add_column(implicit_collection_jobs_id_column, "workflow_invocation_step", metadata)
    add_column(workflow_invocation_step_state_column, "workflow_invocation_step", metadata)

    cmd = \
        "UPDATE dataset_collection SET element_count = " + \
        "(SELECT (CASE WHEN count(*) > 0 THEN count(*) ELSE 0 END) FROM dataset_collection_element WHERE " + \
        "dataset_collection_element.dataset_collection_id = dataset_collection.id)"
    migrate_engine.execute(cmd)
def upgrade(migrate_engine):
    metadata.bind = migrate_engine
    print __doc__
    metadata.reflect()

    for table in TABLES:
        __create(table)

    try:
        dataset_collection_table = Table("dataset_collection",
                                         metadata,
                                         autoload=True)
        # need server_default because column in non-null
        populated_state_column = Column('populated_state',
                                        TrimmedString(64),
                                        default='ok',
                                        server_default="ok",
                                        nullable=False)
        populated_state_column.create(dataset_collection_table)

        populated_message_column = Column('populated_state_message',
                                          TEXT,
                                          nullable=True)
        populated_message_column.create(dataset_collection_table)
    except Exception as e:
        print str(e)
        log.exception("Creating dataset collection populated column failed.")
예제 #9
0
def upgrade(migrate_engine):
    metadata.bind = migrate_engine
    print(__doc__)
    metadata.reflect()

    content_format_column = Column('content_format', TrimmedString(32), default='html', server_default="html", nullable=False)
    add_column(content_format_column, 'page_revision', metadata)
예제 #10
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    col = Column("ctx_rev", TrimmedString(10))
    add_column(col, 'tool_shed_repository', metadata)
def upgrade(migrate_engine):
    metadata.bind = migrate_engine
    print __doc__
    metadata.reflect()

    subworkflow_id_column = Column("subworkflow_id",
                                   Integer,
                                   ForeignKey("workflow.id"),
                                   nullable=True)
    __add_column(subworkflow_id_column, "workflow_step", metadata)

    input_subworkflow_step_id_column = Column("input_subworkflow_step_id",
                                              Integer,
                                              ForeignKey("workflow_step.id"),
                                              nullable=True)
    __add_column(input_subworkflow_step_id_column, "workflow_step_connection",
                 metadata)

    parent_workflow_id_column = Column("parent_workflow_id",
                                       Integer,
                                       ForeignKey("workflow.id"),
                                       nullable=True)
    __add_column(parent_workflow_id_column, "workflow", metadata)

    workflow_output_label_column = Column("label", TrimmedString(255))
    workflow_output_uuid_column = Column("uuid", UUIDType, nullable=True)
    __add_column(workflow_output_label_column, "workflow_output", metadata)
    __add_column(workflow_output_uuid_column, "workflow_output", metadata)

    # Make stored_workflow_id nullable, since now workflows can belong to either
    # a stored workflow or a parent workflow.
    __alter_column("workflow", "stored_workflow_id", metadata, nullable=True)

    for table in TABLES:
        __create(table)
예제 #12
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    drop_table(validation_error_table)

    history_dataset_association_table = Table("history_dataset_association",
                                              metadata,
                                              autoload=True)
    library_dataset_dataset_association_table = Table(
        "library_dataset_dataset_association", metadata, autoload=True)
    for dataset_instance_table in [
            history_dataset_association_table,
            library_dataset_dataset_association_table
    ]:
        validated_state_column = Column('validated_state',
                                        TrimmedString(64),
                                        default='unknown',
                                        server_default="unknown",
                                        nullable=False)
        add_column(validated_state_column, dataset_instance_table, metadata)

        validated_state_message_column = Column('validated_state_message',
                                                TEXT)
        add_column(validated_state_message_column, dataset_instance_table,
                   metadata)
예제 #13
0
def upgrade(migrate_engine):
    metadata.bind = migrate_engine
    print(__doc__)
    metadata.reflect()
    ToolShedRepository_table = Table("tool_shed_repository",
                                     metadata,
                                     autoload=True)
    col = Column("installed_changeset_revision", TrimmedString(255))
    try:
        col.create(ToolShedRepository_table)
        assert col is ToolShedRepository_table.c.installed_changeset_revision
    except Exception:
        log.exception(
            "Adding installed_changeset_revision column to the tool_shed_repository table failed."
        )
    # Update each row by setting the value of installed_changeset_revison to be the value of changeset_revision.
    # This will be problematic if the value of changeset_revision was updated to something other than the value
    # that it was when the repository was installed (because the install path determined in real time will attempt to
    # find the repository using the updated changeset_revison instead of the required installed_changeset_revision),
    # but at the time this script was written, this scenario is extremely unlikely.
    cmd = "SELECT id AS id, " \
        + "installed_changeset_revision AS installed_changeset_revision, " \
        + "changeset_revision AS changeset_revision " \
        + "FROM tool_shed_repository;"
    tool_shed_repositories = migrate_engine.execute(cmd).fetchall()
    update_count = 0
    for row in tool_shed_repositories:
        cmd = "UPDATE tool_shed_repository " \
            + "SET installed_changeset_revision = '%s' " % row.changeset_revision \
            + "WHERE changeset_revision = '%s';" % row.changeset_revision
        migrate_engine.execute(cmd)
        update_count += 1
    print("Updated the installed_changeset_revision column for ", update_count,
          " rows in the tool_shed_repository table.  ")
def upgrade(migrate_engine):
    metadata.bind = migrate_engine
    print(__doc__)
    metadata.reflect()
    if migrate_engine.name in ['postgres', 'postgresql']:
        subworkflow_id_column = Column( "subworkflow_id", Integer, ForeignKey("workflow.id"), nullable=True )
        input_subworkflow_step_id_column = Column( "input_subworkflow_step_id", Integer, ForeignKey("workflow_step.id"), nullable=True )
        parent_workflow_id_column = Column( "parent_workflow_id", Integer, ForeignKey("workflow.id"), nullable=True )
    else:
        subworkflow_id_column = Column( "subworkflow_id", Integer, nullable=True )
        input_subworkflow_step_id_column = Column( "input_subworkflow_step_id", Integer, nullable=True )
        parent_workflow_id_column = Column( "parent_workflow_id", Integer, nullable=True )
    __add_column( subworkflow_id_column, "workflow_step", metadata )
    __add_column( input_subworkflow_step_id_column, "workflow_step_connection", metadata )
    __add_column( parent_workflow_id_column, "workflow", metadata )
    workflow_output_label_column = Column( "label", TrimmedString(255) )
    workflow_output_uuid_column = Column( "uuid", UUIDType, nullable=True )
    __add_column( workflow_output_label_column, "workflow_output", metadata )
    __add_column( workflow_output_uuid_column, "workflow_output", metadata )

    # Make stored_workflow_id nullable, since now workflows can belong to either
    # a stored workflow or a parent workflow.
    __alter_column("workflow", "stored_workflow_id", metadata, nullable=True)

    for table in TABLES:
        # Indexes are automatically created when the tables are.
        __create(table)
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    dataset_instance_tables = []
    for table_name in DATASET_INSTANCE_TABLE_NAMES:
        try:
            dataset_instance_tables.append(
                (table_name, Table(table_name, metadata, autoload=True)))
        except NoSuchTableError:
            log.debug("Failed loading table %s" % table_name)
    if dataset_instance_tables:
        for table_name, dataset_instance_table in dataset_instance_tables:
            index_name = "ix_%s_state" % table_name
            try:
                col = Column("state",
                             TrimmedString(64),
                             index=True,
                             nullable=True)
                col.create(dataset_instance_table, index_name=index_name)
                assert col is dataset_instance_table.c.state
            except Exception:
                log.exception("Adding column 'state' to %s table failed.",
                              table_name)
예제 #16
0
파일: __init__.py 프로젝트: mvdbeek/galaxy
class ComponentReview(Base, Dictifiable, _HasTable):
    __tablename__ = 'component_review'

    id = Column(Integer, primary_key=True)
    create_time = Column(DateTime, default=now)
    update_time = Column(DateTime, default=now, onupdate=now)
    repository_review_id = Column(ForeignKey("repository_review.id"),
                                  index=True)
    component_id = Column(ForeignKey("component.id"), index=True)
    comment = Column(TEXT)
    private = Column(Boolean, default=False)
    approved = Column(TrimmedString(255))
    rating = Column(Integer)
    deleted = Column(Boolean, index=True, default=False)
    repository_review = relationship('RepositoryReview',
                                     back_populates='component_reviews')
    component = relationship('Component')

    dict_collection_visible_keys = [
        'id', 'repository_review_id', 'component_id', 'private', 'approved',
        'rating', 'deleted'
    ]
    dict_element_visible_keys = [
        'id', 'repository_review_id', 'component_id', 'private', 'approved',
        'rating', 'deleted'
    ]
    approved_states = Bunch(NO='no', YES='yes', NA='not_applicable')

    def __init__(self, private=False, approved=False, deleted=False, **kwd):
        super().__init__(**kwd)
        self.private = private
        self.approved = approved
        self.deleted = deleted
예제 #17
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    col = Column("installed_changeset_revision", TrimmedString(255))
    add_column(col, 'tool_shed_repository', metadata)
    # Update each row by setting the value of installed_changeset_revison to be the value of changeset_revision.
    # This will be problematic if the value of changeset_revision was updated to something other than the value
    # that it was when the repository was installed (because the install path determined in real time will attempt to
    # find the repository using the updated changeset_revison instead of the required installed_changeset_revision),
    # but at the time this script was written, this scenario is extremely unlikely.
    cmd = "SELECT id AS id, " \
        + "installed_changeset_revision AS installed_changeset_revision, " \
        + "changeset_revision AS changeset_revision " \
        + "FROM tool_shed_repository;"
    tool_shed_repositories = migrate_engine.execute(cmd).fetchall()
    update_count = 0
    for row in tool_shed_repositories:
        cmd = "UPDATE tool_shed_repository " \
            + f"SET installed_changeset_revision = '{row.changeset_revision}' " \
            + f"WHERE changeset_revision = '{row.changeset_revision}';"
        migrate_engine.execute(cmd)
        update_count += 1
    print("Updated the installed_changeset_revision column for ", update_count,
          " rows in the tool_shed_repository table.")
예제 #18
0
파일: __init__.py 프로젝트: mvdbeek/galaxy
class APIKeys(Base, _HasTable):
    __tablename__ = 'api_keys'

    id = Column(Integer, primary_key=True)
    create_time = Column(DateTime, default=now)
    user_id = Column(ForeignKey('galaxy_user.id'), index=True)
    key = Column(TrimmedString(32), index=True, unique=True)
    user = relationship('User', back_populates='api_keys')
예제 #19
0
def upgrade(migrate_engine):
    metadata.bind = migrate_engine
    print(__doc__)
    metadata.reflect()

    StepLabel_column = Column("label", TrimmedString(255))
    StepUUID_column = Column("uuid", UUIDType, nullable=True)
    __add_column(StepLabel_column, "workflow_step", metadata)
    __add_column(StepUUID_column, "workflow_step", metadata)
예제 #20
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    for table_name in DATASET_INSTANCE_TABLE_NAMES:
        col = Column("state", TrimmedString(64), index=True, nullable=True)
        index_name = f"ix_{table_name}_state"
        add_column(col, table_name, metadata, index_name=index_name)
예제 #21
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    col = Column('submitted', Boolean, default=False)
    add_column(col, 'request', metadata)

    col = Column("bar_code", TrimmedString(255), index=True)
    add_column(col, 'sample', metadata, index_name='ix_sample_bar_code')
예제 #22
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    ToolDependency_table = Table("tool_dependency", metadata, autoload=True)
    if migrate_engine.name == 'sqlite':
        col = Column("status", TrimmedString(255))
    else:
        col = Column("status", TrimmedString(255), nullable=False)
    add_column(col, ToolDependency_table, metadata)

    col = Column("error_message", TEXT)
    add_column(col, ToolDependency_table, metadata)

    # SQLAlchemy Migrate has a bug when dropping a boolean column in SQLite
    # TODO move to alembic.
    if migrate_engine.name != 'sqlite':
        drop_column('uninstalled', ToolDependency_table)
예제 #23
0
def downgrade(migrate_engine):
    metadata.bind = migrate_engine

    ToolIdGuidMap_table = Table( "tool_id_guid_map", metadata,
        Column( "id", Integer, primary_key=True ),
        Column( "create_time", DateTime, default=now ),
        Column( "update_time", DateTime, default=now, onupdate=now ),
        Column( "tool_id", String( 255 ) ),
        Column( "tool_version", TEXT ),
        Column( "tool_shed", TrimmedString( 255 ) ),
        Column( "repository_owner", TrimmedString( 255 ) ),
        Column( "repository_name", TrimmedString( 255 ) ),
        Column( "guid", TEXT, index=True, unique=True ) )

    metadata.reflect()
    try:
        ToolVersionAssociation_table.drop()
    except Exception, e:
        log.debug( "Dropping tool_version_association table failed: %s" % str( e ) )
예제 #24
0
def downgrade(migrate_engine):
    metadata.bind = migrate_engine
    metadata.reflect()

    # SQLite does not always update foreign key constraints when the target
    # table is renamed, so we start with the table rename.
    # rename the 'external_service' table to 'sequencer'
    ExternalServices_table = Table("external_service", metadata, autoload=True)
    ExternalServices_table.rename('sequencer')

    # if running PostgreSQL, rename the primary key sequence too
    if migrate_engine.name in ['postgres', 'postgresql']:
        cmd = "ALTER SEQUENCE external_service_id_seq RENAME TO sequencer_id_seq"
        migrate_engine.execute(cmd)

    # create the 'sequencer_id' column in the 'request_type' table
    col = Column("sequencer_id",
                 Integer,
                 ForeignKey("sequencer.id"),
                 nullable=True)
    add_column(col, 'request_type', metadata)

    # populate 'sequencer_id' column in the 'request_type' table from the
    # 'request_type_external_service_association' table
    cmd = "SELECT request_type_id, external_service_id FROM request_type_external_service_association ORDER BY id ASC"
    result = migrate_engine.execute(cmd)
    results_list = result.fetchall()
    for row in results_list:
        request_type_id = row[0]
        external_service_id = row[1]
        cmd = "UPDATE request_type SET sequencer_id=%i WHERE id=%i" % (
            external_service_id, request_type_id)
        migrate_engine.execute(cmd)

    # remove the 'request_type_external_service_association' table
    RequestTypeExternalServiceAssociation_table = Table(
        "request_type_external_service_association", metadata, autoload=True)
    drop_table(RequestTypeExternalServiceAssociation_table)

    # rename 'external_service_type_id' column to 'sequencer_type_id' in the table 'sequencer'
    # create the column 'sequencer_type_id'
    Sequencer_table = Table("sequencer", metadata, autoload=True)
    col = Column("sequencer_type_id",
                 TrimmedString(255))  # should also have nullable=False
    add_column(col, Sequencer_table, metadata)

    # populate this new column
    cmd = "UPDATE sequencer SET sequencer_type_id=external_service_type_id"
    migrate_engine.execute(cmd)

    # remove the 'external_service_type_id' column
    drop_column('external_service_type_id', Sequencer_table)

    # drop the 'external_service_id' column in the 'sample_dataset' table
    drop_column('external_service_id', 'sample_dataset', metadata)
예제 #25
0
def upgrade():
    print __doc__
    metadata.reflect()
    try:
        task_table = Table("task", metadata, autoload=True)
        c = Column("info", TrimmedString(255), nullable=True)
        c.create(task_table)
        assert c is task_table.c.info
    except Exception, e:
        print "Adding info column to table table failed: %s" % str(e)
        log.debug("Adding info column to task table failed: %s" % str(e))
def upgrade(migrate_engine):
    metadata.bind = migrate_engine
    print(__doc__)
    metadata.reflect()
    try:
        Run_table = Table("run", metadata, autoload=True)
        c = Column("subindex", TrimmedString(255), index=True)
        c.create(Run_table, index_name="ix_run_subindex")
        assert c is Run_table.c.subindex
    except Exception as e:
        print("Adding the subindex column to the run table failed: ", str(e))
예제 #27
0
def upgrade(migrate_engine):
    metadata.bind = migrate_engine
    print(__doc__)
    metadata.reflect()
    try:
        task_table = Table("task", metadata, autoload=True)
        c = Column("info", TrimmedString(255), nullable=True)
        c.create(task_table)
        assert c is task_table.c.info
    except Exception:
        log.exception("Adding info column to task table failed.")
def upgrade(migrate_engine):
    metadata.bind = migrate_engine
    print(__doc__)
    metadata.reflect()
    ToolShedRepository_table = Table("tool_shed_repository", metadata, autoload=True)
    col = Column("ctx_rev", TrimmedString(10))
    try:
        col.create(ToolShedRepository_table)
        assert col is ToolShedRepository_table.c.ctx_rev
    except Exception:
        log.exception("Adding ctx_rev column to the tool_shed_repository table failed.")
def upgrade( migrate_engine ):
    print __doc__
    metadata.bind = migrate_engine
    metadata.reflect()
    Repository_table = Table( "repository", metadata, autoload=True )
    c = Column( "type", TrimmedString( 255 ), index=True )
    try:
        # Create
        c.create( Repository_table, index_name="ix_repository_type" )
        assert c is Repository_table.c.type
    except Exception, e:
        print "Adding type column to the repository table failed: %s" % str( e )
def upgrade():
    print __doc__
    metadata.reflect()

    try:
        OpenID_table = Table( "galaxy_user_openid", metadata, autoload=True )
        c = Column( "provider", TrimmedString( 255 ) )
        c.create( OpenID_table )
        assert c is OpenID_table.c.provider
    except Exception, e:
        print "Adding provider column to galaxy_user_openid table failed: %s" % str( e )
        log.debug( "Adding provider column to galaxy_user_openid table failed: %s" % str( e ) )