コード例 #1
0
def add_sequencer(migrate_engine, sequencer_index, sequencer_form_definition_id, sequencer_info):
    '''Adds a new sequencer to the sequencer table along with its form values.'''
    # Create a new form values record with the supplied sequencer information
    values = dumps({'field_0': sequencer_info.get('host', ''),
                    'field_1': sequencer_info.get('username', ''),
                    'field_2': sequencer_info.get('password', ''),
                    'field_3': sequencer_info.get('data_dir', ''),
                    'field_4': sequencer_info.get('rename_dataset', '')})
    cmd = "INSERT INTO form_values VALUES ( %s, %s, %s, %s, '%s' )" % (nextval(migrate_engine, 'form_values'),
                                                                       localtimestamp(migrate_engine),
                                                                       localtimestamp(migrate_engine),
                                                                       sequencer_form_definition_id,
                                                                       values)
    migrate_engine.execute(cmd)
    sequencer_form_values_id = get_latest_id(migrate_engine, 'form_values')
    # Create a new sequencer record with reference to the form value created above.
    name = 'Sequencer_%i' % sequencer_index
    desc = ''
    version = ''
    sequencer_type_id = 'simple_unknown_sequencer'
    cmd = "INSERT INTO sequencer VALUES ( %s, %s, %s, '%s', '%s', '%s', '%s', %s, %s, %s )" % (
        nextval(migrate_engine, 'sequencer'),
        localtimestamp(migrate_engine),
        localtimestamp(migrate_engine),
        name,
        desc,
        sequencer_type_id,
        version,
        sequencer_form_definition_id,
        sequencer_form_values_id,
        engine_false(migrate_engine))
    migrate_engine.execute(cmd)
    return get_latest_id(migrate_engine, 'sequencer')
コード例 #2
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    try:
        SampleDataset_table.create()
    except Exception:
        log.exception("Creating sample_dataset table failed.")

    cmd = "SELECT id, dataset_files FROM sample"
    result = migrate_engine.execute(cmd)
    for r in result:
        sample_id = r[0]
        if r[1]:
            dataset_files = loads(r[1])
            for df in dataset_files:
                if isinstance(df, dict):
                    cmd = "INSERT INTO sample_dataset VALUES (%s, %s, %s, %s, '%s', '%s', '%s', '%s', '%s')"
                    cmd = cmd % (
                        nextval(migrate_engine, 'sample_dataset'),
                        localtimestamp(migrate_engine),
                        localtimestamp(migrate_engine), str(sample_id),
                        df.get('name', ''), df.get('filepath', ''),
                        df.get('status', '').replace('"', '').replace(
                            "'", ""), "", df.get('size', '').replace(
                                '"', '').replace("'", "").replace(
                                    df.get('filepath', ''), '').strip())
                migrate_engine.execute(cmd)

    # Delete the dataset_files column in the Sample table
    try:
        Sample_table = Table("sample", metadata, autoload=True)
    except NoSuchTableError:
        Sample_table = None
        log.debug("Failed loading table sample")
    if Sample_table is not None:
        try:
            Sample_table.c.dataset_files.drop()
        except Exception:
            log.exception(
                "Deleting column 'dataset_files' from the 'sample' table failed."
            )
コード例 #3
0
ファイル: 0027_request_events.py プロジェクト: msauria/galaxy
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    create_table(RequestEvent_table)
    # move the current state of all existing requests to the request_event table
    cmd = \
        "INSERT INTO request_event " + \
        "SELECT %s AS id," + \
        "%s AS create_time," + \
        "%s AS update_time," + \
        "request.id AS request_id," + \
        "request.state AS state," + \
        "'%s' AS comment " + \
        "FROM request;"
    cmd = cmd % (nextval(migrate_engine,
                         'request_event'), localtimestamp(migrate_engine),
                 localtimestamp(migrate_engine), 'Imported from request table')
    migrate_engine.execute(cmd)

    drop_column('state', 'request', metadata)
コード例 #4
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    # Add new request_event table
    try:
        RequestEvent_table.create()
    except Exception:
        log.exception("Creating request_event table failed.")
    # move the current state of all existing requests to the request_event table

    cmd = \
        "INSERT INTO request_event " + \
        "SELECT %s AS id," + \
        "%s AS create_time," + \
        "%s AS update_time," + \
        "request.id AS request_id," + \
        "request.state AS state," + \
        "'%s' AS comment " + \
        "FROM request;"
    cmd = cmd % (nextval(migrate_engine,
                         'request_event'), localtimestamp(migrate_engine),
                 localtimestamp(migrate_engine), 'Imported from request table')
    migrate_engine.execute(cmd)

    if migrate_engine.name != 'sqlite':
        # Delete the state column
        try:
            Request_table = Table("request", metadata, autoload=True)
        except NoSuchTableError:
            Request_table = None
            log.debug("Failed loading table request")
        if Request_table is not None:
            try:
                Request_table.c.state.drop()
            except Exception:
                log.exception(
                    "Deleting column 'state' to request table failed.")
コード例 #5
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    # Create the tables.
    try:
        ToolVersion_table.create()
    except Exception:
        log.exception("Creating tool_version table failed.")
    try:
        ToolVersionAssociation_table.create()
    except Exception:
        log.exception("Creating tool_version_association table failed.")
    # Populate the tool table with tools included in installed tool shed repositories.
    cmd = "SELECT id, metadata FROM tool_shed_repository"
    result = migrate_engine.execute(cmd)
    count = 0
    for row in result:
        if row[1]:
            tool_shed_repository_id = row[0]
            repository_metadata = loads(_sniffnfix_pg9_hex(str(row[1])))
            # Create a new row in the tool table for each tool included in repository.  We will NOT
            # handle tool_version_associaions because we do not have the information we need to do so.
            tools = repository_metadata.get('tools', [])
            for tool_dict in tools:
                cmd = "INSERT INTO tool_version VALUES (%s, %s, %s, '%s', %s)" % \
                    (nextval(migrate_engine, 'tool_version'), localtimestamp(migrate_engine), localtimestamp(migrate_engine), tool_dict['guid'], tool_shed_repository_id)
                migrate_engine.execute(cmd)
                count += 1
    print("Added %d rows to the new tool_version table." % count)
    # Drop the tool_id_guid_map table since the 2 new tables render it unnecessary.
    ToolIdGuidMap_table = Table("tool_id_guid_map", metadata, autoload=True)
    try:
        ToolIdGuidMap_table.drop()
    except Exception:
        log.exception("Dropping tool_id_guid_map table failed.")
コード例 #6
0
def create_sequencer_form_definition(migrate_engine):
    '''
    Create a new form_definition containing 5 fields (host, username, password,
    data_dir & rename_datasets) which described the existing datatx_info json
    dict in the request_type table
    '''
    # create new form_definition_current in the db
    cmd = "INSERT INTO form_definition_current VALUES ( %s, %s, %s, %s, %s )" % (
        nextval(migrate_engine, 'form_definition_current'),
        localtimestamp(migrate_engine),
        localtimestamp(migrate_engine),
        'NULL',
        engine_false(migrate_engine))
    migrate_engine.execute(cmd)
    # get this form_definition_current id
    form_definition_current_id = get_latest_id(migrate_engine, 'form_definition_current')
    # create new form_definition in the db
    form_definition_name = 'Generic sequencer form'
    form_definition_desc = ''
    form_definition_fields = []
    fields = [('Host', 'TextField'),
              ('User name', 'TextField'),
              ('Password', 'PasswordField'),
              ('Data directory', 'TextField')]
    for index, (label, field_type) in enumerate(fields):
        form_definition_fields.append({'name': 'field_%i' % index,
                                       'label': label,
                                       'helptext': '',
                                       'visible': True,
                                       'required': False,
                                       'type': field_type,
                                       'selectlist': [],
                                       'layout': 'none',
                                       'default': ''})
    form_definition_fields.append({'name': 'field_%i' % len(fields),
                                   'label': 'Prepend the experiment name and sample name to the dataset name?',
                                   'helptext': 'Galaxy datasets are renamed by prepending the experiment name and sample name to the dataset name, ensuring dataset names remain unique in Galaxy even when multiple datasets have the same name on the sequencer.',
                                   'visible': True,
                                   'required': False,
                                   'type': 'SelectField',
                                   'selectlist': ['Do not rename',
                                                  'Preprend sample name',
                                                  'Prepend experiment name',
                                                  'Prepend experiment and sample name'],
                                   'layout': 'none',
                                   'default': ''})
    form_definition_type = 'Sequencer Information Form'
    form_definition_layout = dumps('[]')
    cmd = "INSERT INTO form_definition VALUES ( %s, %s, %s, '%s', '%s', %s, '%s', '%s', '%s' )"
    cmd = cmd % (nextval(migrate_engine, 'form_definition'),
                 localtimestamp(migrate_engine),
                 localtimestamp(migrate_engine),
                 form_definition_name,
                 form_definition_desc,
                 form_definition_current_id,
                 dumps(form_definition_fields),
                 form_definition_type,
                 form_definition_layout)
    migrate_engine.execute(cmd)
    # get this form_definition id
    form_definition_id = get_latest_id(migrate_engine, 'form_definition')
    # update the form_definition_id column in form_definition_current
    cmd = "UPDATE form_definition_current SET latest_form_id=%i WHERE id=%i" % (form_definition_id, form_definition_current_id)
    migrate_engine.execute(cmd)
    return form_definition_id
コード例 #7
0
def upgrade(migrate_engine):
    metadata.bind = migrate_engine
    metadata.reflect()

    # Add 2 new columns to the galaxy_user table
    try:
        User_table = Table("galaxy_user", metadata, autoload=True)
    except NoSuchTableError:
        User_table = None
        log.debug("Failed loading table galaxy_user")
    if User_table is not None:
        col = Column('deleted', Boolean, index=True, default=False)
        add_column(col, User_table, index_name='ix_galaxy_user_deleted')
        col = Column('purged', Boolean, index=True, default=False)
        add_column(col, User_table, index_name='ix_galaxy_user_purged')
    # Add 1 new column to the history_dataset_association table
    try:
        HistoryDatasetAssociation_table = Table("history_dataset_association",
                                                metadata,
                                                autoload=True)
    except NoSuchTableError:
        HistoryDatasetAssociation_table = None
        log.debug("Failed loading table history_dataset_association")
    if HistoryDatasetAssociation_table is not None:
        try:
            col = Column('copied_from_library_dataset_dataset_association_id',
                         Integer,
                         nullable=True)
            col.create(HistoryDatasetAssociation_table)
            assert col is HistoryDatasetAssociation_table.c.copied_from_library_dataset_dataset_association_id
        except Exception:
            log.exception(
                "Adding column 'copied_from_library_dataset_dataset_association_id' to history_dataset_association table failed."
            )
    # Add 1 new column to the metadata_file table
    try:
        MetadataFile_table = Table("metadata_file", metadata, autoload=True)
    except NoSuchTableError:
        MetadataFile_table = None
        log.debug("Failed loading table metadata_file")
    if MetadataFile_table is not None:
        try:
            col = Column('lda_id', Integer, index=True, nullable=True)
            col.create(MetadataFile_table,
                       index_name='ix_metadata_file_lda_id')
            assert col is MetadataFile_table.c.lda_id
        except Exception:
            log.exception(
                "Adding column 'lda_id' to metadata_file table failed.")
    # Add 1 new column to the stored_workflow table - changeset 2328
    try:
        StoredWorkflow_table = Table(
            "stored_workflow",
            metadata,
            Column("latest_workflow_id",
                   Integer,
                   ForeignKey("workflow.id",
                              use_alter=True,
                              name='stored_workflow_latest_workflow_id_fk'),
                   index=True),
            autoload=True,
            extend_existing=True)
    except NoSuchTableError:
        StoredWorkflow_table = None
        log.debug("Failed loading table stored_workflow")
    if StoredWorkflow_table is not None:
        try:
            col = Column('importable', Boolean, default=False)
            col.create(StoredWorkflow_table)
            assert col is StoredWorkflow_table.c.importable
        except Exception:
            log.exception(
                "Adding column 'importable' to stored_workflow table failed.")
    # Create an index on the Job.state column - changeset 2192
    add_index('ix_job_state', 'job', 'state', metadata)
    # Add all of the new tables above
    metadata.create_all()
    # Add 1 foreign key constraint to the history_dataset_association table
    try:
        HistoryDatasetAssociation_table = Table("history_dataset_association",
                                                metadata,
                                                autoload=True)
    except NoSuchTableError:
        HistoryDatasetAssociation_table = None
        log.debug("Failed loading table history_dataset_association")
    try:
        LibraryDatasetDatasetAssociation_table = Table(
            "library_dataset_dataset_association", metadata, autoload=True)
    except NoSuchTableError:
        LibraryDatasetDatasetAssociation_table = None
        log.debug("Failed loading table library_dataset_dataset_association")
    if HistoryDatasetAssociation_table is not None and LibraryDatasetDatasetAssociation_table is not None:
        try:
            cons = ForeignKeyConstraint(
                [
                    HistoryDatasetAssociation_table.c.
                    copied_from_library_dataset_dataset_association_id
                ], [LibraryDatasetDatasetAssociation_table.c.id],
                name=
                'history_dataset_association_copied_from_library_dataset_da_fkey'
            )
            # Create the constraint
            cons.create()
        except Exception:
            log.exception(
                "Adding foreign key constraint 'history_dataset_association_copied_from_library_dataset_da_fkey' to table 'history_dataset_association' failed."
            )
    # Add 1 foreign key constraint to the metadata_file table
    try:
        MetadataFile_table = Table("metadata_file", metadata, autoload=True)
    except NoSuchTableError:
        MetadataFile_table = None
        log.debug("Failed loading table metadata_file")
    try:
        LibraryDatasetDatasetAssociation_table = Table(
            "library_dataset_dataset_association", metadata, autoload=True)
    except NoSuchTableError:
        LibraryDatasetDatasetAssociation_table = None
        log.debug("Failed loading table library_dataset_dataset_association")
    if migrate_engine.name != 'sqlite':
        # Sqlite can't alter table add foreign key.
        if MetadataFile_table is not None and LibraryDatasetDatasetAssociation_table is not None:
            try:
                cons = ForeignKeyConstraint(
                    [MetadataFile_table.c.lda_id],
                    [LibraryDatasetDatasetAssociation_table.c.id],
                    name='metadata_file_lda_id_fkey')
                # Create the constraint
                cons.create()
            except Exception:
                log.exception(
                    "Adding foreign key constraint 'metadata_file_lda_id_fkey' to table 'metadata_file' failed."
                )
    # Make sure we have at least 1 user
    cmd = "SELECT * FROM galaxy_user;"
    users = migrate_engine.execute(cmd).fetchall()
    if users:
        cmd = "SELECT * FROM role;"
        roles = migrate_engine.execute(cmd).fetchall()
        if not roles:
            # Create private roles for each user - pass 1
            cmd = \
                "INSERT INTO role " + \
                "SELECT %s AS id," + \
                "%s AS create_time," + \
                "%s AS update_time," + \
                "email AS name," + \
                "email AS description," + \
                "'private' As type," + \
                "%s AS deleted " + \
                "FROM galaxy_user " + \
                "ORDER BY id;"
            cmd = cmd % (nextval(migrate_engine,
                                 'role'), localtimestamp(migrate_engine),
                         localtimestamp(migrate_engine),
                         engine_false(migrate_engine))
            migrate_engine.execute(cmd)
            # Create private roles for each user - pass 2
            if migrate_engine.name in ['postgres', 'postgresql', 'sqlite']:
                cmd = "UPDATE role SET description = 'Private role for ' || description;"
            elif migrate_engine.name == 'mysql':
                cmd = "UPDATE role SET description = CONCAT( 'Private role for ', description );"
            migrate_engine.execute(cmd)
            # Create private roles for each user - pass 3
            cmd = \
                "INSERT INTO user_role_association " + \
                "SELECT %s AS id," + \
                "galaxy_user.id AS user_id," + \
                "role.id AS role_id," + \
                "%s AS create_time," + \
                "%s AS update_time " + \
                "FROM galaxy_user, role " + \
                "WHERE galaxy_user.email = role.name " + \
                "ORDER BY galaxy_user.id;"
            cmd = cmd % (nextval(migrate_engine, 'user_role_association'),
                         localtimestamp(migrate_engine),
                         localtimestamp(migrate_engine))
            migrate_engine.execute(cmd)
            # Create default permissions for each user
            cmd = \
                "INSERT INTO default_user_permissions " + \
                "SELECT %s AS id," + \
                "galaxy_user.id AS user_id," + \
                "'manage permissions' AS action," + \
                "user_role_association.role_id AS role_id " + \
                "FROM galaxy_user " + \
                "JOIN user_role_association ON user_role_association.user_id = galaxy_user.id " + \
                "ORDER BY galaxy_user.id;"
            cmd = cmd % nextval(migrate_engine, 'default_user_permissions')
            migrate_engine.execute(cmd)
            # Create default history permissions for each active history associated with a user

            cmd = \
                "INSERT INTO default_history_permissions " + \
                "SELECT %s AS id," + \
                "history.id AS history_id," + \
                "'manage permissions' AS action," + \
                "user_role_association.role_id AS role_id " + \
                "FROM history " + \
                "JOIN user_role_association ON user_role_association.user_id = history.user_id " + \
                "WHERE history.purged = %s AND history.user_id IS NOT NULL;"
            cmd = cmd % (nextval(
                migrate_engine,
                'default_history_permissions'), engine_false(migrate_engine))
            migrate_engine.execute(cmd)
            # Create "manage permissions" dataset_permissions for all activate-able datasets
            cmd = \
                "INSERT INTO dataset_permissions " + \
                "SELECT %s AS id," + \
                "%s AS create_time," + \
                "%s AS update_time," + \
                "'manage permissions' AS action," + \
                "history_dataset_association.dataset_id AS dataset_id," + \
                "user_role_association.role_id AS role_id " + \
                "FROM history " + \
                "JOIN history_dataset_association ON history_dataset_association.history_id = history.id " + \
                "JOIN dataset ON history_dataset_association.dataset_id = dataset.id " + \
                "JOIN user_role_association ON user_role_association.user_id = history.user_id " + \
                "WHERE dataset.purged = %s AND history.user_id IS NOT NULL;"
            cmd = cmd % (nextval(migrate_engine, 'dataset_permissions'),
                         localtimestamp(migrate_engine),
                         localtimestamp(migrate_engine),
                         engine_false(migrate_engine))
            migrate_engine.execute(cmd)