Ejemplo n.º 1
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    Request_table = Table("request", metadata, autoload=True)

    # create the column again as JSONType
    col = Column("notification", JSONType())
    add_column(col, Request_table)

    cmd = "SELECT id, user_id, notify FROM request"
    result = migrate_engine.execute(cmd)
    for r in result:
        id = int(r[0])
        notify_new = dict(email=[], sample_states=[], body='', subject='')
        cmd = "UPDATE request SET notification='%s' WHERE id=%i" % (
            dumps(notify_new), id)
        migrate_engine.execute(cmd)

    # SQLAlchemy Migrate has a bug when dropping a boolean column in SQLite
    if migrate_engine.name != 'sqlite':
        drop_column('notify', Request_table)
Ejemplo n.º 2
0
def downgrade(migrate_engine):
    metadata.bind = migrate_engine
    metadata.reflect()

    Sample_table = Table("sample", metadata, autoload=True)
    drop_column('folder_id', Sample_table)
    drop_column('library_id', Sample_table)
    drop_column('dataset_files', Sample_table)

    Request_table = Table("request", metadata, autoload=True)
    col = Column('folder_id',
                 Integer,
                 ForeignKey('library_folder.id'),
                 index=True)
    add_column(col, Request_table, metadata, index_name='ix_request_folder_id')

    col = Column('library_id', Integer, ForeignKey("library.id"), index=True)
    add_column(col,
               Request_table,
               metadata,
               index_name='ix_request_library_id')

    drop_column('datatx_info', 'request_type', metadata)
Ejemplo n.º 3
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    Visualization_table = Table("visualization", metadata, autoload=True)
    Visualization_revision_table = Table("visualization_revision",
                                         metadata,
                                         autoload=True)

    # Create dbkey columns.
    x = Column("dbkey", TEXT)
    add_column(x, Visualization_table, metadata)
    y = Column("dbkey", TEXT)
    add_column(y, Visualization_revision_table, metadata)
    # Indexes need to be added separately because MySQL cannot index a TEXT/BLOB
    # column without specifying mysql_length
    add_index("ix_visualization_dbkey", Visualization_table, 'dbkey')
    add_index("ix_visualization_revision_dbkey", Visualization_revision_table,
              'dbkey')

    all_viz = migrate_engine.execute(
        "SELECT visualization.id as viz_id, visualization_revision.id as viz_rev_id, visualization_revision.config FROM visualization_revision \
                    LEFT JOIN visualization ON visualization.id=visualization_revision.visualization_id"
    )
    for viz in all_viz:
        viz_id = viz['viz_id']
        viz_rev_id = viz['viz_rev_id']
        if viz[Visualization_revision_table.c.config]:
            dbkey = loads(viz[Visualization_revision_table.c.config]).get(
                'dbkey', "").replace("'", "\\'")
            migrate_engine.execute(
                "UPDATE visualization_revision SET dbkey='{}' WHERE id={}".
                format(dbkey, viz_rev_id))
            migrate_engine.execute(
                "UPDATE visualization SET dbkey='{}' WHERE id={}".format(
                    dbkey, viz_id))
Ejemplo n.º 4
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    Job_table = Table("job", metadata, autoload=True)
    col = Column("user_id",
                 Integer,
                 ForeignKey("galaxy_user.id"),
                 index=True,
                 nullable=True)
    add_column(col, Job_table, metadata, index_name='ix_job_user_id')
    try:
        cmd = "SELECT job.id AS galaxy_job_id, " \
            + "galaxy_session.user_id AS galaxy_user_id " \
            + "FROM job " \
            + "JOIN galaxy_session ON job.session_id = galaxy_session.id;"
        job_users = migrate_engine.execute(cmd).fetchall()
        print("Updating user_id column in job table for ", len(job_users),
              " rows...")
        print("")
        update_count = 0
        for row in job_users:
            if row.galaxy_user_id:
                cmd = "UPDATE job SET user_id = %d WHERE id = %d" % (int(
                    row.galaxy_user_id), int(row.galaxy_job_id))
                update_count += 1
            migrate_engine.execute(cmd)
        print("Updated column 'user_id' for ", update_count,
              " rows of table 'job'.")
        print(
            len(job_users) - update_count,
            " rows have no user_id since the value was NULL in the galaxy_session table."
        )
        print("")
    except Exception:
        log.exception("Updating column 'user_id' of table 'job' failed.")
Ejemplo n.º 5
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    # Add the datatx_info column in 'request_type' table
    col = Column("datatx_info", JSONType())
    add_column(col, 'request_type', metadata)

    # Delete the library_id column in 'request' table
    Request_table = Table("request", metadata, autoload=True)
    drop_column('library_id', Request_table)

    # Delete the folder_id column in 'request' table
    drop_column('folder_id', Request_table)

    # Add the dataset_files column in 'sample' table
    Sample_table = Table("sample", metadata, autoload=True)
    col = Column("dataset_files", JSONType())
    add_column(col, Sample_table)

    # Add the library_id column in 'sample' table
    # SQLAlchemy Migrate has a bug when adding a column with both a ForeignKey and a index in SQLite
    if migrate_engine.name != 'sqlite':
        col = Column("library_id",
                     Integer,
                     ForeignKey("library.id"),
                     index=True)
    else:
        col = Column("library_id", Integer, index=True)
    add_column(col, Sample_table, index_name='ix_sample_library_id')

    # Add the library_id column in 'sample' table
    # SQLAlchemy Migrate has a bug when adding a column with both a ForeignKey and a index in SQLite
    if migrate_engine.name != 'sqlite':
        col = Column("folder_id",
                     Integer,
                     ForeignKey("library_folder.id"),
                     index=True)
    else:
        col = Column("folder_id", Integer, index=True)
    add_column(col, Sample_table, index_name='ix_sample_library_folder_id')
Ejemplo n.º 6
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    # Create user_id column in history_tag_association table.
    # SQLAlchemy Migrate has a bug when adding a column with both a ForeignKey and a index in SQLite
    if migrate_engine.name != 'sqlite':
        c = Column("user_id", Integer, ForeignKey("galaxy_user.id"), index=True)
    else:
        c = Column("user_id", Integer, index=True)
    add_column(c, 'history_tag_association', metadata, index_name='ix_history_tag_association_user_id')

    # Populate column so that user_id is the id of the user who owns the history (and, up to now, was the only person able to tag the history).
    migrate_engine.execute(
        "UPDATE history_tag_association SET user_id=( SELECT user_id FROM history WHERE history_tag_association.history_id = history.id )")

    # Create user_id column in history_dataset_association_tag_association table.
    # SQLAlchemy Migrate has a bug when adding a column with both a ForeignKey and a index in SQLite
    if migrate_engine.name != 'sqlite':
        c = Column("user_id", Integer, ForeignKey("galaxy_user.id"), index=True)
    else:
        c = Column("user_id", Integer, index=True)
    add_column(c, 'history_dataset_association_tag_association', metadata, index_name='ix_history_dataset_association_tag_association_user_id')

    # Populate column so that user_id is the id of the user who owns the history_dataset_association (and, up to now, was the only person able to tag the page).
    migrate_engine.execute(
        "UPDATE history_dataset_association_tag_association SET user_id=( SELECT history.user_id FROM history, history_dataset_association WHERE history_dataset_association.history_id = history.id AND history_dataset_association.id = history_dataset_association_tag_association.history_dataset_association_id)")

    # Create user_id column in page_tag_association table.
    # SQLAlchemy Migrate has a bug when adding a column with both a ForeignKey and a index in SQLite
    if migrate_engine.name != 'sqlite':
        c = Column("user_id", Integer, ForeignKey("galaxy_user.id"), index=True)
    else:
        # Create user_id column in page_tag_association table.
        c = Column("user_id", Integer, index=True)
    add_column(c, 'page_tag_association', metadata, index_name='ix_page_tag_association_user_id')

    # Populate column so that user_id is the id of the user who owns the page (and, up to now, was the only person able to tag the page).
    migrate_engine.execute(
        "UPDATE page_tag_association SET user_id=( SELECT user_id FROM page WHERE page_tag_association.page_id = page.id )")

    # Create stored_workflow_tag_association table.
    create_table(StoredWorkflowTagAssociation_table)

    # Create workflow_tag_association table.
    create_table(WorkflowTagAssociation_table)
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    # Create published column in history table.
    History_table = Table("history", metadata, autoload=True)
    c = Column("published", Boolean, index=True)
    add_column(c, History_table, metadata, index_name='ix_history_published')
    if migrate_engine.name != 'sqlite':
        # Create index for published column in history table.
        try:
            i = Index("ix_history_published", History_table.c.published)
            i.create()
        except Exception:
            # Mysql doesn't have a named index, but alter should work
            History_table.c.published.alter(unique=False)

    # Create published column in stored workflows table.
    StoredWorkflow_table = Table("stored_workflow", metadata, autoload=True)
    c = Column("published", Boolean, index=True)
    add_column(c,
               StoredWorkflow_table,
               metadata,
               index_name='ix_stored_workflow_published')
    if migrate_engine.name != 'sqlite':
        # Create index for published column in stored workflows table.
        try:
            i = Index("ix_stored_workflow_published",
                      StoredWorkflow_table.c.published)
            i.create()
        except Exception:
            # Mysql doesn't have a named index, but alter should work
            StoredWorkflow_table.c.published.alter(unique=False)

    # Create importable column in page table.
    Page_table = Table("page", metadata, autoload=True)
    c = Column("importable", Boolean, index=True)
    add_column(c, Page_table, metadata, index_name='ix_page_importable')
    if migrate_engine.name != 'sqlite':
        # Create index for importable column in page table.
        try:
            i = Index("ix_page_importable", Page_table.c.importable)
            i.create()
        except Exception:
            # Mysql doesn't have a named index, but alter should work
            Page_table.c.importable.alter(unique=False)
Ejemplo n.º 8
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    # Add the datatx_info column in 'request_type' table
    col = Column("datatx_info", JSONType())
    add_column(col, 'request_type', metadata)

    # Delete the library_id column in 'request' table
    Request_table = Table("request", metadata, autoload=True)
    # TODO: Dropping a column used in a foreign key fails in MySQL, need to remove the FK first.
    drop_column('library_id', Request_table)

    # Delete the folder_id column in 'request' table
    # TODO: Dropping a column used in a foreign key fails in MySQL, need to remove the FK first.
    drop_column('folder_id', Request_table)

    # Add the dataset_files column in 'sample' table
    Sample_table = Table("sample", metadata, autoload=True)
    col = Column("dataset_files", JSONType())
    add_column(col, Sample_table, metadata)

    # Add the library_id column in 'sample' table
    col = Column("library_id", Integer, ForeignKey("library.id"), index=True)
    add_column(col, Sample_table, metadata, index_name='ix_sample_library_id')

    # Add the library_id column in 'sample' table
    col = Column("folder_id",
                 Integer,
                 ForeignKey("library_folder.id"),
                 index=True)
    add_column(col,
               Sample_table,
               metadata,
               index_name='ix_sample_library_folder_id')
Ejemplo n.º 9
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    # Create the folder_id column
    col = Column("folder_id",
                 Integer,
                 ForeignKey('library_folder.id'),
                 index=True)
    add_column(col, 'request', metadata, index_name='ix_request_folder_id')
    # Create the type column in form_definition
    FormDefinition_table = Table("form_definition", metadata, autoload=True)
    col = Column("type", TrimmedString(255), index=True)
    add_column(col,
               FormDefinition_table,
               metadata,
               index_name='ix_form_definition_type')
    col = Column("layout", JSONType)
    add_column(col, FormDefinition_table, metadata)
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    for table in TABLES:
        create_table(table)

    History_column = Column("history_id",
                            Integer,
                            ForeignKey("history.id"),
                            nullable=True)
    State_column = Column("state", TrimmedString(64))

    # TODO: Handle indexes correctly
    SchedulerId_column = Column("scheduler", TrimmedString(255))
    HandlerId_column = Column("handler", TrimmedString(255))
    WorkflowUUID_column = Column("uuid", UUIDType, nullable=True)
    add_column(History_column, "workflow_invocation", metadata)
    add_column(State_column, "workflow_invocation", metadata)
    add_column(SchedulerId_column,
               "workflow_invocation",
               metadata,
               index_name="id_workflow_invocation_scheduler")
    add_column(HandlerId_column,
               "workflow_invocation",
               metadata,
               index_name="id_workflow_invocation_handler")
    add_column(WorkflowUUID_column, "workflow_invocation", metadata)

    # All previous invocations have been scheduled...
    cmd = "UPDATE workflow_invocation SET state = 'scheduled'"
    try:
        migrate_engine.execute(cmd)
    except Exception:
        log.exception("failed to update past workflow invocation states.")

    WorkflowInvocationStepAction_column = Column("action",
                                                 JSONType,
                                                 nullable=True)
    add_column(WorkflowInvocationStepAction_column, "workflow_invocation_step",
               metadata)
Ejemplo n.º 11
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    # rename 'sequencer' table to 'external_service'
    Sequencer_table = Table("sequencer", metadata, autoload=True)
    Sequencer_table.rename('external_service')

    # if running PostgreSQL, rename the primary key sequence too
    if migrate_engine.name in ['postgres', 'postgresql']:
        cmd = "ALTER SEQUENCE sequencer_id_seq RENAME TO external_service_id_seq"
        migrate_engine.execute(cmd)

    # Add 'external_services_id' column to 'sample_dataset' table
    SampleDataset_table = Table("sample_dataset", metadata, autoload=True)
    col = Column("external_service_id",
                 Integer,
                 ForeignKey("external_service.id",
                            name='sample_dataset_external_services_id_fk'),
                 index=True)
    add_column(col,
               SampleDataset_table,
               metadata,
               index_name="ix_sample_dataset_external_service_id")

    # populate the column
    cmd = "SELECT sample_dataset.id, request_type.sequencer_id " \
          + " FROM sample_dataset, sample, request, request_type " \
          + " WHERE sample.id=sample_dataset.sample_id and request.id=sample.request_id and request.request_type_id=request_type.id " \
          + " ORDER BY sample_dataset.id"
    try:
        result = migrate_engine.execute(cmd)
        for r in result:
            sample_dataset_id = int(r[0])
            sequencer_id = int(r[1])
            cmd = "UPDATE sample_dataset SET external_service_id='%i' where id=%i" % (
                sequencer_id, sample_dataset_id)
            migrate_engine.execute(cmd)
    except Exception:
        log.exception("Exception executing SQL command: %s", cmd)

    # rename 'sequencer_type_id' column to 'external_service_type_id' in the table 'external_service'
    # create the column as 'external_service_type_id'
    ExternalServices_table = Table("external_service", metadata, autoload=True)
    col = Column("external_service_type_id", TrimmedString(255))
    add_column(col, ExternalServices_table, metadata)

    # populate this new column
    cmd = "UPDATE external_service SET external_service_type_id=sequencer_type_id"
    migrate_engine.execute(cmd)

    # remove the 'sequencer_type_id' column
    drop_column('sequencer_type_id', ExternalServices_table)

    # create 'request_type_external_service_association' table
    RequestTypeExternalServiceAssociation_table = Table(
        "request_type_external_service_association", metadata,
        Column("id", Integer, primary_key=True),
        Column("request_type_id",
               Integer,
               ForeignKey("request_type.id"),
               index=True),
        Column("external_service_id",
               Integer,
               ForeignKey("external_service.id"),
               index=True))
    create_table(RequestTypeExternalServiceAssociation_table)

    # populate 'request_type_external_service_association' table
    cmd = "SELECT id, sequencer_id FROM request_type ORDER BY id ASC"
    result = migrate_engine.execute(cmd)
    results_list = result.fetchall()
    # Proceed only if request_types exists
    for row in results_list:
        request_type_id = row[0]
        sequencer_id = row[1]
        if not sequencer_id:
            sequencer_id = 'null'
        cmd = "INSERT INTO request_type_external_service_association VALUES ( %s, %s, %s )" % (
            nextval(migrate_engine,
                    'request_type_external_service_association'),
            request_type_id, sequencer_id)
        migrate_engine.execute(cmd)

    # TODO: Dropping a column used in a foreign key fails in MySQL, need to remove the FK first.
    drop_column('sequencer_id', 'request_type', metadata)
Ejemplo n.º 12
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    jobs_table = Table("job", metadata, autoload=True)
    job_messages_column = Column("job_messages", JSONType, nullable=True)
    add_column(job_messages_column, jobs_table, metadata)
    job_job_stdout_column = Column("job_stdout", TEXT, nullable=True)
    add_column(job_job_stdout_column, jobs_table, metadata)
    job_job_stderr_column = Column("job_stderr", TEXT, nullable=True)
    add_column(job_job_stderr_column, jobs_table, metadata)

    tasks_table = Table("task", metadata, autoload=True)
    task_job_messages_column = Column("job_messages", JSONType, nullable=True)
    add_column(task_job_messages_column, tasks_table, metadata)
    task_job_stdout_column = Column("job_stdout", TEXT, nullable=True)
    add_column(task_job_stdout_column, tasks_table, metadata)
    task_job_stderr_column = Column("job_stderr", TEXT, nullable=True)
    add_column(task_job_stderr_column, tasks_table, metadata)

    for table in [jobs_table, tasks_table]:
        alter_column('stdout', table, name='tool_stdout')
        alter_column('stderr', table, name='tool_stderr')
def upgrade(migrate_engine):
    metadata.bind = migrate_engine
    metadata.reflect()

    # Add 2 new columns to the galaxy_user table
    try:
        User_table = Table("galaxy_user", metadata, autoload=True)
    except NoSuchTableError:
        User_table = None
        log.debug("Failed loading table galaxy_user")
    if User_table is not None:
        col = Column('deleted', Boolean, index=True, default=False)
        add_column(col, User_table, index_name='ix_galaxy_user_deleted')
        col = Column('purged', Boolean, index=True, default=False)
        add_column(col, User_table, index_name='ix_galaxy_user_purged')
    # Add 1 new column to the history_dataset_association table
    try:
        HistoryDatasetAssociation_table = Table("history_dataset_association",
                                                metadata,
                                                autoload=True)
    except NoSuchTableError:
        HistoryDatasetAssociation_table = None
        log.debug("Failed loading table history_dataset_association")
    if HistoryDatasetAssociation_table is not None:
        try:
            col = Column('copied_from_library_dataset_dataset_association_id',
                         Integer,
                         nullable=True)
            col.create(HistoryDatasetAssociation_table)
            assert col is HistoryDatasetAssociation_table.c.copied_from_library_dataset_dataset_association_id
        except Exception:
            log.exception(
                "Adding column 'copied_from_library_dataset_dataset_association_id' to history_dataset_association table failed."
            )
    # Add 1 new column to the metadata_file table
    try:
        MetadataFile_table = Table("metadata_file", metadata, autoload=True)
    except NoSuchTableError:
        MetadataFile_table = None
        log.debug("Failed loading table metadata_file")
    if MetadataFile_table is not None:
        try:
            col = Column('lda_id', Integer, index=True, nullable=True)
            col.create(MetadataFile_table,
                       index_name='ix_metadata_file_lda_id')
            assert col is MetadataFile_table.c.lda_id
        except Exception:
            log.exception(
                "Adding column 'lda_id' to metadata_file table failed.")
    # Add 1 new column to the stored_workflow table - changeset 2328
    try:
        StoredWorkflow_table = Table(
            "stored_workflow",
            metadata,
            Column("latest_workflow_id",
                   Integer,
                   ForeignKey("workflow.id",
                              use_alter=True,
                              name='stored_workflow_latest_workflow_id_fk'),
                   index=True),
            autoload=True,
            extend_existing=True)
    except NoSuchTableError:
        StoredWorkflow_table = None
        log.debug("Failed loading table stored_workflow")
    if StoredWorkflow_table is not None:
        try:
            col = Column('importable', Boolean, default=False)
            col.create(StoredWorkflow_table)
            assert col is StoredWorkflow_table.c.importable
        except Exception:
            log.exception(
                "Adding column 'importable' to stored_workflow table failed.")
    # Create an index on the Job.state column - changeset 2192
    add_index('ix_job_state', 'job', 'state', metadata)
    # Add all of the new tables above
    metadata.create_all()
    # Add 1 foreign key constraint to the history_dataset_association table
    try:
        HistoryDatasetAssociation_table = Table("history_dataset_association",
                                                metadata,
                                                autoload=True)
    except NoSuchTableError:
        HistoryDatasetAssociation_table = None
        log.debug("Failed loading table history_dataset_association")
    try:
        LibraryDatasetDatasetAssociation_table = Table(
            "library_dataset_dataset_association", metadata, autoload=True)
    except NoSuchTableError:
        LibraryDatasetDatasetAssociation_table = None
        log.debug("Failed loading table library_dataset_dataset_association")
    if HistoryDatasetAssociation_table is not None and LibraryDatasetDatasetAssociation_table is not None:
        try:
            cons = ForeignKeyConstraint(
                [
                    HistoryDatasetAssociation_table.c.
                    copied_from_library_dataset_dataset_association_id
                ], [LibraryDatasetDatasetAssociation_table.c.id],
                name=
                'history_dataset_association_copied_from_library_dataset_da_fkey'
            )
            # Create the constraint
            cons.create()
        except Exception:
            log.exception(
                "Adding foreign key constraint 'history_dataset_association_copied_from_library_dataset_da_fkey' to table 'history_dataset_association' failed."
            )
    # Add 1 foreign key constraint to the metadata_file table
    try:
        MetadataFile_table = Table("metadata_file", metadata, autoload=True)
    except NoSuchTableError:
        MetadataFile_table = None
        log.debug("Failed loading table metadata_file")
    try:
        LibraryDatasetDatasetAssociation_table = Table(
            "library_dataset_dataset_association", metadata, autoload=True)
    except NoSuchTableError:
        LibraryDatasetDatasetAssociation_table = None
        log.debug("Failed loading table library_dataset_dataset_association")
    if migrate_engine.name != 'sqlite':
        # Sqlite can't alter table add foreign key.
        if MetadataFile_table is not None and LibraryDatasetDatasetAssociation_table is not None:
            try:
                cons = ForeignKeyConstraint(
                    [MetadataFile_table.c.lda_id],
                    [LibraryDatasetDatasetAssociation_table.c.id],
                    name='metadata_file_lda_id_fkey')
                # Create the constraint
                cons.create()
            except Exception:
                log.exception(
                    "Adding foreign key constraint 'metadata_file_lda_id_fkey' to table 'metadata_file' failed."
                )
    # Make sure we have at least 1 user
    cmd = "SELECT * FROM galaxy_user;"
    users = migrate_engine.execute(cmd).fetchall()
    if users:
        cmd = "SELECT * FROM role;"
        roles = migrate_engine.execute(cmd).fetchall()
        if not roles:
            # Create private roles for each user - pass 1
            cmd = \
                "INSERT INTO role " + \
                "SELECT %s AS id," + \
                "%s AS create_time," + \
                "%s AS update_time," + \
                "email AS name," + \
                "email AS description," + \
                "'private' As type," + \
                "%s AS deleted " + \
                "FROM galaxy_user " + \
                "ORDER BY id;"
            cmd = cmd % (nextval(migrate_engine,
                                 'role'), localtimestamp(migrate_engine),
                         localtimestamp(migrate_engine),
                         engine_false(migrate_engine))
            migrate_engine.execute(cmd)
            # Create private roles for each user - pass 2
            if migrate_engine.name in ['postgres', 'postgresql', 'sqlite']:
                cmd = "UPDATE role SET description = 'Private role for ' || description;"
            elif migrate_engine.name == 'mysql':
                cmd = "UPDATE role SET description = CONCAT( 'Private role for ', description );"
            migrate_engine.execute(cmd)
            # Create private roles for each user - pass 3
            cmd = \
                "INSERT INTO user_role_association " + \
                "SELECT %s AS id," + \
                "galaxy_user.id AS user_id," + \
                "role.id AS role_id," + \
                "%s AS create_time," + \
                "%s AS update_time " + \
                "FROM galaxy_user, role " + \
                "WHERE galaxy_user.email = role.name " + \
                "ORDER BY galaxy_user.id;"
            cmd = cmd % (nextval(migrate_engine, 'user_role_association'),
                         localtimestamp(migrate_engine),
                         localtimestamp(migrate_engine))
            migrate_engine.execute(cmd)
            # Create default permissions for each user
            cmd = \
                "INSERT INTO default_user_permissions " + \
                "SELECT %s AS id," + \
                "galaxy_user.id AS user_id," + \
                "'manage permissions' AS action," + \
                "user_role_association.role_id AS role_id " + \
                "FROM galaxy_user " + \
                "JOIN user_role_association ON user_role_association.user_id = galaxy_user.id " + \
                "ORDER BY galaxy_user.id;"
            cmd = cmd % nextval(migrate_engine, 'default_user_permissions')
            migrate_engine.execute(cmd)
            # Create default history permissions for each active history associated with a user

            cmd = \
                "INSERT INTO default_history_permissions " + \
                "SELECT %s AS id," + \
                "history.id AS history_id," + \
                "'manage permissions' AS action," + \
                "user_role_association.role_id AS role_id " + \
                "FROM history " + \
                "JOIN user_role_association ON user_role_association.user_id = history.user_id " + \
                "WHERE history.purged = %s AND history.user_id IS NOT NULL;"
            cmd = cmd % (nextval(
                migrate_engine,
                'default_history_permissions'), engine_false(migrate_engine))
            migrate_engine.execute(cmd)
            # Create "manage permissions" dataset_permissions for all activate-able datasets
            cmd = \
                "INSERT INTO dataset_permissions " + \
                "SELECT %s AS id," + \
                "%s AS create_time," + \
                "%s AS update_time," + \
                "'manage permissions' AS action," + \
                "history_dataset_association.dataset_id AS dataset_id," + \
                "user_role_association.role_id AS role_id " + \
                "FROM history " + \
                "JOIN history_dataset_association ON history_dataset_association.history_id = history.id " + \
                "JOIN dataset ON history_dataset_association.dataset_id = dataset.id " + \
                "JOIN user_role_association ON user_role_association.user_id = history.user_id " + \
                "WHERE dataset.purged = %s AND history.user_id IS NOT NULL;"
            cmd = cmd % (nextval(migrate_engine, 'dataset_permissions'),
                         localtimestamp(migrate_engine),
                         localtimestamp(migrate_engine),
                         engine_false(migrate_engine))
            migrate_engine.execute(cmd)
Ejemplo n.º 14
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    add_column(from_path_column, "stored_workflow", metadata)
Ejemplo n.º 15
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    add_column(handler_col, 'job', metadata, index_name="ix_job_handler")
def downgrade(migrate_engine):
    metadata.bind = migrate_engine
    metadata.reflect()

    c = Column("installed_changeset_revision", TrimmedString(255))
    add_column(c, "tool_dependency", metadata)
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    add_column(hidden_col, 'stored_workflow', metadata)