def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    ToolShedRepository_table = Table("tool_shed_repository",
                                     metadata,
                                     autoload=True)
    c = Column("metadata", JSONType(), nullable=True)
    add_column(c, ToolShedRepository_table, metadata)
    c = Column("includes_datatypes", Boolean, index=True, default=False)
    add_column(c,
               ToolShedRepository_table,
               metadata,
               index_name="ix_tool_shed_repository_includes_datatypes")
    try:
        migrate_engine.execute(
            "UPDATE tool_shed_repository SET includes_datatypes=%s" %
            engine_false(migrate_engine))
    except Exception:
        log.exception(
            "Updating column 'includes_datatypes' of table 'tool_shed_repository' failed."
        )
    c = Column("update_available", Boolean, default=False)
    add_column(c, ToolShedRepository_table, metadata)
    try:
        migrate_engine.execute(
            "UPDATE tool_shed_repository SET update_available=%s" %
            engine_false(migrate_engine))
    except Exception:
        log.exception(
            "Updating column 'update_available' of table 'tool_shed_repository' failed."
        )
示例#2
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    for table in TABLES:
        create_table(table)

    # Add columns & create indices for supporting sharing to visualization table.
    Visualization_table = Table("visualization", metadata, autoload=True)
    deleted_column = Column("deleted", Boolean, default=False, index=True)
    add_column(deleted_column,
               Visualization_table,
               metadata,
               index_name="ix_visualization_deleted")
    try:
        # Fill column with default value.
        cmd = "UPDATE visualization SET deleted = %s" % engine_false(
            migrate_engine)
        migrate_engine.execute(cmd)
    except Exception:
        log.exception(
            "Updating column 'deleted' of table 'visualization' failed.")

    importable_column = Column("importable",
                               Boolean,
                               default=False,
                               index=True)
    add_column(importable_column,
               Visualization_table,
               metadata,
               index_name='ix_visualization_importable')
    try:
        # Fill column with default value.
        cmd = "UPDATE visualization SET importable = %s" % engine_false(
            migrate_engine)
        migrate_engine.execute(cmd)
    except Exception:
        log.exception(
            "Updating column 'importable' of table 'visualization' failed.")

    slug_column = Column("slug", TEXT)
    add_column(slug_column, Visualization_table, metadata)
    # Index needs to be added separately because MySQL cannot index a TEXT/BLOB
    # column without specifying mysql_length
    add_index('ix_visualization_slug', Visualization_table, 'slug')

    published_column = Column("published", Boolean, index=True)
    add_column(published_column,
               Visualization_table,
               metadata,
               index_name='ix_visualization_published')
    try:
        # Fill column with default value.
        cmd = "UPDATE visualization SET published = %s" % engine_false(
            migrate_engine)
        migrate_engine.execute(cmd)
    except Exception:
        log.exception(
            "Updating column 'published' of table 'visualization' failed.")
示例#3
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    User_table = Table("galaxy_user", metadata, autoload=True)
    # The next add_index() calls are not needed any more after commit
    # 7ee93c0995123b0f357abd649326295dfa06766c , but harmless
    add_index('ix_galaxy_user_deleted', User_table, 'deleted')
    add_index('ix_galaxy_user_purged', User_table, 'purged')
    # Set the default data in the galaxy_user table, but only for null values
    cmd = "UPDATE galaxy_user SET deleted = %s WHERE deleted is null" % engine_false(
        migrate_engine)
    try:
        migrate_engine.execute(cmd)
    except Exception:
        log.exception(
            "Setting default data for galaxy_user.deleted column failed.")
    cmd = "UPDATE galaxy_user SET purged = %s WHERE purged is null" % engine_false(
        migrate_engine)
    try:
        migrate_engine.execute(cmd)
    except Exception:
        log.exception(
            "Setting default data for galaxy_user.purged column failed.")
    add_index('ix_hda_copied_from_library_dataset_dataset_association_id',
              'history_dataset_association',
              'copied_from_library_dataset_dataset_association_id', metadata)
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    c = Column('purged', Boolean, index=True, default=False)
    add_column(c,
               'library_dataset',
               metadata,
               index_name='ix_library_dataset_purged')
    # Update the purged flag to the default False
    cmd = "UPDATE library_dataset SET purged = %s;" % engine_false(
        migrate_engine)
    try:
        migrate_engine.execute(cmd)
    except Exception:
        log.exception(
            "Setting default data for library_dataset.purged column failed.")

    # Update the purged flag for those LibaryDatasets whose purged flag should be True.  This happens
    # when the LibraryDataset has no active LibraryDatasetDatasetAssociations.
    cmd = "SELECT * FROM library_dataset WHERE deleted = %s;" % engine_true(
        migrate_engine)
    deleted_lds = migrate_engine.execute(cmd).fetchall()
    for row in deleted_lds:
        cmd = "SELECT * FROM library_dataset_dataset_association WHERE library_dataset_id = %d AND library_dataset_dataset_association.deleted = %s;" % (
            int(row.id), engine_false(migrate_engine))
        active_lddas = migrate_engine.execute(cmd).fetchall()
        if not active_lddas:
            print("Updating purged column to True for LibraryDataset id : ",
                  int(row.id))
            cmd = "UPDATE library_dataset SET purged = %s WHERE id = %d;" % (
                engine_true(migrate_engine), int(row.id))
            migrate_engine.execute(cmd)
示例#5
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    ToolShedRepository_table = Table("tool_shed_repository",
                                     metadata,
                                     autoload=True)
    c = Column("uninstalled", Boolean, default=False)
    try:
        c.create(ToolShedRepository_table)
        assert c is ToolShedRepository_table.c.uninstalled
        migrate_engine.execute(
            "UPDATE tool_shed_repository SET uninstalled=%s" %
            engine_false(migrate_engine))
    except Exception:
        log.exception(
            "Adding uninstalled column to the tool_shed_repository table failed."
        )
    c = Column("dist_to_shed", Boolean, default=False)
    try:
        c.create(ToolShedRepository_table)
        assert c is ToolShedRepository_table.c.dist_to_shed
        migrate_engine.execute(
            "UPDATE tool_shed_repository SET dist_to_shed=%s" %
            engine_false(migrate_engine))
    except Exception:
        log.exception(
            "Adding dist_to_shed column to the tool_shed_repository table failed."
        )
def add_sequencer(migrate_engine, sequencer_index, sequencer_form_definition_id, sequencer_info):
    '''Adds a new sequencer to the sequencer table along with its form values.'''
    # Create a new form values record with the supplied sequencer information
    values = dumps({'field_0': sequencer_info.get('host', ''),
                    'field_1': sequencer_info.get('username', ''),
                    'field_2': sequencer_info.get('password', ''),
                    'field_3': sequencer_info.get('data_dir', ''),
                    'field_4': sequencer_info.get('rename_dataset', '')})
    cmd = "INSERT INTO form_values VALUES ( %s, %s, %s, %s, '%s' )" % (nextval(migrate_engine, 'form_values'),
                                                                       localtimestamp(migrate_engine),
                                                                       localtimestamp(migrate_engine),
                                                                       sequencer_form_definition_id,
                                                                       values)
    migrate_engine.execute(cmd)
    sequencer_form_values_id = get_latest_id(migrate_engine, 'form_values')
    # Create a new sequencer record with reference to the form value created above.
    name = 'Sequencer_%i' % sequencer_index
    desc = ''
    version = ''
    sequencer_type_id = 'simple_unknown_sequencer'
    cmd = "INSERT INTO sequencer VALUES ( %s, %s, %s, '%s', '%s', '%s', '%s', %s, %s, %s )" % (
        nextval(migrate_engine, 'sequencer'),
        localtimestamp(migrate_engine),
        localtimestamp(migrate_engine),
        name,
        desc,
        sequencer_type_id,
        version,
        sequencer_form_definition_id,
        sequencer_form_values_id,
        engine_false(migrate_engine))
    migrate_engine.execute(cmd)
    return get_latest_id(migrate_engine, 'sequencer')
def downgrade(migrate_engine):
    metadata.bind = migrate_engine
    metadata.reflect()

    try:
        ToolShedRepository_table = Table("tool_shed_repository",
                                         metadata,
                                         autoload=True)
    except NoSuchTableError:
        ToolShedRepository_table = None
        log.debug("Failed loading table tool_shed_repository")
    if ToolShedRepository_table is not None:
        # For some unknown reason it is no longer possible to drop a column in a migration script if using the sqlite database.
        if migrate_engine.name != 'sqlite':
            drop_column('tool_shed_status', ToolShedRepository_table)
            c = Column("update_available", Boolean, default=False)
            add_column(c, ToolShedRepository_table)
            try:
                migrate_engine.execute(
                    "UPDATE tool_shed_repository SET update_available=%s" %
                    engine_false(migrate_engine))
            except Exception:
                log.exception(
                    "Updating column 'update_available' of table 'tool_shed_repository' failed."
                )
示例#8
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    ToolShedRepository_table = Table("tool_shed_repository", metadata, autoload=True)
    c = Column("uninstalled", Boolean, default=False)
    add_column(c, ToolShedRepository_table, metadata)
    try:
        migrate_engine.execute("UPDATE tool_shed_repository SET uninstalled=%s" % engine_false(migrate_engine))
    except Exception:
        log.exception("Updating column 'uninstalled' of table 'tool_shed_repository' failed.")
    c = Column("dist_to_shed", Boolean, default=False)
    add_column(c, ToolShedRepository_table, metadata)
    try:
        migrate_engine.execute("UPDATE tool_shed_repository SET dist_to_shed=%s" % engine_false(migrate_engine))
    except Exception:
        log.exception("Updating column 'dist_to_shed' of table 'tool_shed_repository' failed.")
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    # Create and initialize imported column in job table.
    Jobs_table = Table("job", metadata, autoload=True)
    c = Column("imported", Boolean, default=False, index=True)
    add_column(c, Jobs_table, metadata, index_name="ix_job_imported")
    try:
        migrate_engine.execute("UPDATE job SET imported=%s" %
                               engine_false(migrate_engine))
    except Exception:
        log.exception("Updating column 'imported' of table 'job' failed.")
def downgrade(migrate_engine):
    metadata.bind = migrate_engine
    metadata.reflect()

    ToolShedRepository_table = Table("tool_shed_repository",
                                     metadata,
                                     autoload=True)
    drop_column('tool_shed_status', ToolShedRepository_table)
    c = Column("update_available", Boolean, default=False)
    add_column(c, ToolShedRepository_table, metadata)
    try:
        migrate_engine.execute(
            "UPDATE tool_shed_repository SET update_available=%s" %
            engine_false(migrate_engine))
    except Exception:
        log.exception(
            "Updating column 'update_available' of table 'tool_shed_repository' failed."
        )
示例#11
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    # Add column to history table and initialize.
    try:
        History_table = Table("history", metadata, autoload=True)
        importing_col.create(History_table, index_name="ix_history_importing")
        assert importing_col is History_table.c.importing

        # Initialize column to false.
        migrate_engine.execute("UPDATE history SET importing=%s" %
                               engine_false(migrate_engine))
    except Exception:
        log.exception("Adding column 'importing' to history table failed.")

    # Create job_import_history_archive table.
    try:
        JobImportHistoryArchive_table.create()
    except Exception:
        log.exception("Creating job_import_history_archive table failed.")
def create_sequencer_form_definition(migrate_engine):
    '''
    Create a new form_definition containing 5 fields (host, username, password,
    data_dir & rename_datasets) which described the existing datatx_info json
    dict in the request_type table
    '''
    # create new form_definition_current in the db
    cmd = "INSERT INTO form_definition_current VALUES ( %s, %s, %s, %s, %s )" % (
        nextval(migrate_engine, 'form_definition_current'),
        localtimestamp(migrate_engine),
        localtimestamp(migrate_engine),
        'NULL',
        engine_false(migrate_engine))
    migrate_engine.execute(cmd)
    # get this form_definition_current id
    form_definition_current_id = get_latest_id(migrate_engine, 'form_definition_current')
    # create new form_definition in the db
    form_definition_name = 'Generic sequencer form'
    form_definition_desc = ''
    form_definition_fields = []
    fields = [('Host', 'TextField'),
              ('User name', 'TextField'),
              ('Password', 'PasswordField'),
              ('Data directory', 'TextField')]
    for index, (label, field_type) in enumerate(fields):
        form_definition_fields.append({'name': 'field_%i' % index,
                                       'label': label,
                                       'helptext': '',
                                       'visible': True,
                                       'required': False,
                                       'type': field_type,
                                       'selectlist': [],
                                       'layout': 'none',
                                       'default': ''})
    form_definition_fields.append({'name': 'field_%i' % len(fields),
                                   'label': 'Prepend the experiment name and sample name to the dataset name?',
                                   'helptext': 'Galaxy datasets are renamed by prepending the experiment name and sample name to the dataset name, ensuring dataset names remain unique in Galaxy even when multiple datasets have the same name on the sequencer.',
                                   'visible': True,
                                   'required': False,
                                   'type': 'SelectField',
                                   'selectlist': ['Do not rename',
                                                  'Preprend sample name',
                                                  'Prepend experiment name',
                                                  'Prepend experiment and sample name'],
                                   'layout': 'none',
                                   'default': ''})
    form_definition_type = 'Sequencer Information Form'
    form_definition_layout = dumps('[]')
    cmd = "INSERT INTO form_definition VALUES ( %s, %s, %s, '%s', '%s', %s, '%s', '%s', '%s' )"
    cmd = cmd % (nextval(migrate_engine, 'form_definition'),
                 localtimestamp(migrate_engine),
                 localtimestamp(migrate_engine),
                 form_definition_name,
                 form_definition_desc,
                 form_definition_current_id,
                 dumps(form_definition_fields),
                 form_definition_type,
                 form_definition_layout)
    migrate_engine.execute(cmd)
    # get this form_definition id
    form_definition_id = get_latest_id(migrate_engine, 'form_definition')
    # update the form_definition_id column in form_definition_current
    cmd = "UPDATE form_definition_current SET latest_form_id=%i WHERE id=%i" % (form_definition_id, form_definition_current_id)
    migrate_engine.execute(cmd)
    return form_definition_id
示例#13
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    try:
        LibraryInfoAssociation_table = Table("library_info_association",
                                             metadata,
                                             autoload=True)
        c = Column("deleted", Boolean, index=True, default=False)
        c.create(LibraryInfoAssociation_table,
                 index_name='ix_library_info_association_deleted')
        assert c is LibraryInfoAssociation_table.c.deleted
    except Exception:
        log.exception(
            "Adding column 'deleted' to 'library_info_association' table failed."
        )
    cmd = "UPDATE library_info_association SET deleted = %s" % engine_false(
        migrate_engine)
    try:
        migrate_engine.execute(cmd)
    except Exception:
        log.exception("deleted to false in library_info_association failed.")
    try:
        LibraryFolderInfoAssociation_table = Table(
            "library_folder_info_association", metadata, autoload=True)
        c = Column("deleted", Boolean, index=True, default=False)
        c.create(LibraryFolderInfoAssociation_table,
                 index_name='ix_library_folder_info_association_deleted')
        assert c is LibraryFolderInfoAssociation_table.c.deleted
    except Exception:
        log.exception(
            "Adding column 'deleted' to 'library_folder_info_association' table failed."
        )
    cmd = "UPDATE library_folder_info_association SET deleted = %s" % engine_false(
        migrate_engine)
    try:
        migrate_engine.execute(cmd)
    except Exception:
        log.exception(
            "deleted to false in library_folder_info_association failed.")
    try:
        LibraryDatasetDatasetInfoAssociation_table = Table(
            "library_dataset_dataset_info_association",
            metadata,
            autoload=True)
        c = Column("deleted", Boolean, index=True, default=False)
        c.create(
            LibraryDatasetDatasetInfoAssociation_table,
            index_name='ix_library_dataset_dataset_info_association_deleted')
        assert c is LibraryDatasetDatasetInfoAssociation_table.c.deleted
    except Exception:
        log.exception(
            "Adding column 'deleted' to 'library_dataset_dataset_info_association' table failed."
        )
    cmd = "UPDATE library_dataset_dataset_info_association SET deleted = %s" % engine_false(
        migrate_engine)
    try:
        migrate_engine.execute(cmd)
    except Exception:
        log.exception(
            "deleted to false in library_dataset_dataset_info_association failed."
        )
def upgrade(migrate_engine):
    metadata.bind = migrate_engine
    metadata.reflect()

    # Add 2 new columns to the galaxy_user table
    try:
        User_table = Table("galaxy_user", metadata, autoload=True)
    except NoSuchTableError:
        User_table = None
        log.debug("Failed loading table galaxy_user")
    if User_table is not None:
        col = Column('deleted', Boolean, index=True, default=False)
        add_column(col, User_table, index_name='ix_galaxy_user_deleted')
        col = Column('purged', Boolean, index=True, default=False)
        add_column(col, User_table, index_name='ix_galaxy_user_purged')
    # Add 1 new column to the history_dataset_association table
    try:
        HistoryDatasetAssociation_table = Table("history_dataset_association",
                                                metadata,
                                                autoload=True)
    except NoSuchTableError:
        HistoryDatasetAssociation_table = None
        log.debug("Failed loading table history_dataset_association")
    if HistoryDatasetAssociation_table is not None:
        try:
            col = Column('copied_from_library_dataset_dataset_association_id',
                         Integer,
                         nullable=True)
            col.create(HistoryDatasetAssociation_table)
            assert col is HistoryDatasetAssociation_table.c.copied_from_library_dataset_dataset_association_id
        except Exception:
            log.exception(
                "Adding column 'copied_from_library_dataset_dataset_association_id' to history_dataset_association table failed."
            )
    # Add 1 new column to the metadata_file table
    try:
        MetadataFile_table = Table("metadata_file", metadata, autoload=True)
    except NoSuchTableError:
        MetadataFile_table = None
        log.debug("Failed loading table metadata_file")
    if MetadataFile_table is not None:
        try:
            col = Column('lda_id', Integer, index=True, nullable=True)
            col.create(MetadataFile_table,
                       index_name='ix_metadata_file_lda_id')
            assert col is MetadataFile_table.c.lda_id
        except Exception:
            log.exception(
                "Adding column 'lda_id' to metadata_file table failed.")
    # Add 1 new column to the stored_workflow table - changeset 2328
    try:
        StoredWorkflow_table = Table(
            "stored_workflow",
            metadata,
            Column("latest_workflow_id",
                   Integer,
                   ForeignKey("workflow.id",
                              use_alter=True,
                              name='stored_workflow_latest_workflow_id_fk'),
                   index=True),
            autoload=True,
            extend_existing=True)
    except NoSuchTableError:
        StoredWorkflow_table = None
        log.debug("Failed loading table stored_workflow")
    if StoredWorkflow_table is not None:
        try:
            col = Column('importable', Boolean, default=False)
            col.create(StoredWorkflow_table)
            assert col is StoredWorkflow_table.c.importable
        except Exception:
            log.exception(
                "Adding column 'importable' to stored_workflow table failed.")
    # Create an index on the Job.state column - changeset 2192
    add_index('ix_job_state', 'job', 'state', metadata)
    # Add all of the new tables above
    metadata.create_all()
    # Add 1 foreign key constraint to the history_dataset_association table
    try:
        HistoryDatasetAssociation_table = Table("history_dataset_association",
                                                metadata,
                                                autoload=True)
    except NoSuchTableError:
        HistoryDatasetAssociation_table = None
        log.debug("Failed loading table history_dataset_association")
    try:
        LibraryDatasetDatasetAssociation_table = Table(
            "library_dataset_dataset_association", metadata, autoload=True)
    except NoSuchTableError:
        LibraryDatasetDatasetAssociation_table = None
        log.debug("Failed loading table library_dataset_dataset_association")
    if HistoryDatasetAssociation_table is not None and LibraryDatasetDatasetAssociation_table is not None:
        try:
            cons = ForeignKeyConstraint(
                [
                    HistoryDatasetAssociation_table.c.
                    copied_from_library_dataset_dataset_association_id
                ], [LibraryDatasetDatasetAssociation_table.c.id],
                name=
                'history_dataset_association_copied_from_library_dataset_da_fkey'
            )
            # Create the constraint
            cons.create()
        except Exception:
            log.exception(
                "Adding foreign key constraint 'history_dataset_association_copied_from_library_dataset_da_fkey' to table 'history_dataset_association' failed."
            )
    # Add 1 foreign key constraint to the metadata_file table
    try:
        MetadataFile_table = Table("metadata_file", metadata, autoload=True)
    except NoSuchTableError:
        MetadataFile_table = None
        log.debug("Failed loading table metadata_file")
    try:
        LibraryDatasetDatasetAssociation_table = Table(
            "library_dataset_dataset_association", metadata, autoload=True)
    except NoSuchTableError:
        LibraryDatasetDatasetAssociation_table = None
        log.debug("Failed loading table library_dataset_dataset_association")
    if migrate_engine.name != 'sqlite':
        # Sqlite can't alter table add foreign key.
        if MetadataFile_table is not None and LibraryDatasetDatasetAssociation_table is not None:
            try:
                cons = ForeignKeyConstraint(
                    [MetadataFile_table.c.lda_id],
                    [LibraryDatasetDatasetAssociation_table.c.id],
                    name='metadata_file_lda_id_fkey')
                # Create the constraint
                cons.create()
            except Exception:
                log.exception(
                    "Adding foreign key constraint 'metadata_file_lda_id_fkey' to table 'metadata_file' failed."
                )
    # Make sure we have at least 1 user
    cmd = "SELECT * FROM galaxy_user;"
    users = migrate_engine.execute(cmd).fetchall()
    if users:
        cmd = "SELECT * FROM role;"
        roles = migrate_engine.execute(cmd).fetchall()
        if not roles:
            # Create private roles for each user - pass 1
            cmd = \
                "INSERT INTO role " + \
                "SELECT %s AS id," + \
                "%s AS create_time," + \
                "%s AS update_time," + \
                "email AS name," + \
                "email AS description," + \
                "'private' As type," + \
                "%s AS deleted " + \
                "FROM galaxy_user " + \
                "ORDER BY id;"
            cmd = cmd % (nextval(migrate_engine,
                                 'role'), localtimestamp(migrate_engine),
                         localtimestamp(migrate_engine),
                         engine_false(migrate_engine))
            migrate_engine.execute(cmd)
            # Create private roles for each user - pass 2
            if migrate_engine.name in ['postgres', 'postgresql', 'sqlite']:
                cmd = "UPDATE role SET description = 'Private role for ' || description;"
            elif migrate_engine.name == 'mysql':
                cmd = "UPDATE role SET description = CONCAT( 'Private role for ', description );"
            migrate_engine.execute(cmd)
            # Create private roles for each user - pass 3
            cmd = \
                "INSERT INTO user_role_association " + \
                "SELECT %s AS id," + \
                "galaxy_user.id AS user_id," + \
                "role.id AS role_id," + \
                "%s AS create_time," + \
                "%s AS update_time " + \
                "FROM galaxy_user, role " + \
                "WHERE galaxy_user.email = role.name " + \
                "ORDER BY galaxy_user.id;"
            cmd = cmd % (nextval(migrate_engine, 'user_role_association'),
                         localtimestamp(migrate_engine),
                         localtimestamp(migrate_engine))
            migrate_engine.execute(cmd)
            # Create default permissions for each user
            cmd = \
                "INSERT INTO default_user_permissions " + \
                "SELECT %s AS id," + \
                "galaxy_user.id AS user_id," + \
                "'manage permissions' AS action," + \
                "user_role_association.role_id AS role_id " + \
                "FROM galaxy_user " + \
                "JOIN user_role_association ON user_role_association.user_id = galaxy_user.id " + \
                "ORDER BY galaxy_user.id;"
            cmd = cmd % nextval(migrate_engine, 'default_user_permissions')
            migrate_engine.execute(cmd)
            # Create default history permissions for each active history associated with a user

            cmd = \
                "INSERT INTO default_history_permissions " + \
                "SELECT %s AS id," + \
                "history.id AS history_id," + \
                "'manage permissions' AS action," + \
                "user_role_association.role_id AS role_id " + \
                "FROM history " + \
                "JOIN user_role_association ON user_role_association.user_id = history.user_id " + \
                "WHERE history.purged = %s AND history.user_id IS NOT NULL;"
            cmd = cmd % (nextval(
                migrate_engine,
                'default_history_permissions'), engine_false(migrate_engine))
            migrate_engine.execute(cmd)
            # Create "manage permissions" dataset_permissions for all activate-able datasets
            cmd = \
                "INSERT INTO dataset_permissions " + \
                "SELECT %s AS id," + \
                "%s AS create_time," + \
                "%s AS update_time," + \
                "'manage permissions' AS action," + \
                "history_dataset_association.dataset_id AS dataset_id," + \
                "user_role_association.role_id AS role_id " + \
                "FROM history " + \
                "JOIN history_dataset_association ON history_dataset_association.history_id = history.id " + \
                "JOIN dataset ON history_dataset_association.dataset_id = dataset.id " + \
                "JOIN user_role_association ON user_role_association.user_id = history.user_id " + \
                "WHERE dataset.purged = %s AND history.user_id IS NOT NULL;"
            cmd = cmd % (nextval(migrate_engine, 'dataset_permissions'),
                         localtimestamp(migrate_engine),
                         localtimestamp(migrate_engine),
                         engine_false(migrate_engine))
            migrate_engine.execute(cmd)
示例#15
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine

    # In case of sqlite, check if the previous migration script deleted the
    # request table and if so, restore the table.
    if migrate_engine.name == 'sqlite':
        if not migrate_engine.has_table('request'):
            # load the tables referenced in foreign keys
            metadata.reflect(
                only=['form_values', 'request_type', 'galaxy_user'])
            # create a temporary table
            Request_table = Table(
                'request', metadata, Column("id", Integer, primary_key=True),
                Column("create_time", DateTime, default=now),
                Column("update_time", DateTime, default=now, onupdate=now),
                Column("name", TrimmedString(255), nullable=False),
                Column("desc", TEXT),
                Column("form_values_id",
                       Integer,
                       ForeignKey("form_values.id"),
                       index=True),
                Column("request_type_id",
                       Integer,
                       ForeignKey("request_type.id"),
                       index=True),
                Column("user_id",
                       Integer,
                       ForeignKey("galaxy_user.id"),
                       index=True),
                Column("deleted", Boolean, index=True, default=False))
            try:
                Request_table.create()
            except Exception:
                log.exception("Creating request table failed.")

    metadata.reflect()
    try:
        LibraryInfoAssociation_table = Table("library_info_association",
                                             metadata,
                                             autoload=True)
        c = Column("inheritable", Boolean, index=True, default=False)
        c.create(LibraryInfoAssociation_table,
                 index_name='ix_library_info_association_inheritable')
        assert c is LibraryInfoAssociation_table.c.inheritable
    except Exception:
        log.exception(
            "Adding column 'inheritable' to 'library_info_association' table failed."
        )
    cmd = "UPDATE library_info_association SET inheritable = %s" % engine_false(
        migrate_engine)
    try:
        migrate_engine.execute(cmd)
    except Exception:
        log.exception(
            "Setting value of column inheritable to false in library_info_association failed."
        )
    try:
        LibraryFolderInfoAssociation_table = Table(
            "library_folder_info_association", metadata, autoload=True)
        c = Column("inheritable", Boolean, index=True, default=False)
        c.create(LibraryFolderInfoAssociation_table,
                 index_name='ix_library_folder_info_association_inheritable')
        assert c is LibraryFolderInfoAssociation_table.c.inheritable
    except Exception:
        log.exception(
            "Adding column 'inheritable' to 'library_folder_info_association' table failed."
        )
    cmd = "UPDATE library_folder_info_association SET inheritable = %s" % engine_false(
        migrate_engine)
    try:
        migrate_engine.execute(cmd)
    except Exception:
        log.exception(
            "Setting value of column inheritable to false in library_folder_info_association failed."
        )
示例#16
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    Visualiation_table = Table("visualization", metadata, autoload=True)
    # Create visualization_user_share_association table.
    try:
        VisualizationUserShareAssociation_table.create()
    except Exception:
        log.exception(
            "Creating visualization_user_share_association table failed.")

    # Add columns & create indices for supporting sharing to visualization table.
    deleted_column = Column("deleted", Boolean, default=False, index=True)
    importable_column = Column("importable",
                               Boolean,
                               default=False,
                               index=True)
    slug_column = Column("slug", TEXT)
    published_column = Column("published", Boolean, index=True)

    try:
        # Add column.
        deleted_column.create(Visualiation_table,
                              index_name="ix_visualization_deleted")
        assert deleted_column is Visualiation_table.c.deleted

        # Fill column with default value.
        cmd = "UPDATE visualization SET deleted = %s" % engine_false(
            migrate_engine)
        migrate_engine.execute(cmd)
    except Exception:
        log.exception("Adding deleted column to visualization table failed.")

    try:
        # Add column.
        importable_column.create(Visualiation_table,
                                 index_name='ix_visualization_importable')
        assert importable_column is Visualiation_table.c.importable

        # Fill column with default value.
        cmd = "UPDATE visualization SET importable = %s" % engine_false(
            migrate_engine)
        migrate_engine.execute(cmd)
    except Exception:
        log.exception(
            "Adding importable column to visualization table failed.")

    try:
        slug_column.create(Visualiation_table)
        assert slug_column is Visualiation_table.c.slug
    except Exception:
        log.exception("Adding slug column to visualization table failed.")

    try:
        if migrate_engine.name == 'mysql':
            # Have to create index manually.
            cmd = "CREATE INDEX ix_visualization_slug ON visualization ( slug ( 100 ) )"
            migrate_engine.execute(cmd)
        else:
            i = Index("ix_visualization_slug", Visualiation_table.c.slug)
            i.create()
    except Exception:
        log.exception("Adding index 'ix_visualization_slug' failed.")

    try:
        # Add column.
        published_column.create(Visualiation_table,
                                index_name='ix_visualization_published')
        assert published_column is Visualiation_table.c.published

        # Fill column with default value.
        cmd = "UPDATE visualization SET published = %s" % engine_false(
            migrate_engine)
        migrate_engine.execute(cmd)
    except Exception:
        log.exception("Adding published column to visualization table failed.")

    # Create visualization_tag_association table.
    try:
        VisualizationTagAssociation_table.create()
    except Exception:
        log.exception("Creating visualization_tag_association table failed.")

    # Create visualization_annotation_association table.
    try:
        VisualizationAnnotationAssociation_table.create()
    except Exception:
        log.exception(
            "Creating visualization_annotation_association table failed.")

    # Need to create index for visualization annotation manually to deal with errors.
    try:
        if migrate_engine.name == 'mysql':
            # Have to create index manually.
            cmd = "CREATE INDEX ix_visualization_annotation_association_annotation ON visualization_annotation_association ( annotation ( 100 ) )"
            migrate_engine.execute(cmd)
        else:
            i = Index("ix_visualization_annotation_association_annotation",
                      VisualizationAnnotationAssociation_table.c.annotation)
            i.create()
    except Exception:
        log.exception(
            "Adding index 'ix_visualization_annotation_association_annotation' failed."
        )