コード例 #1
0
ファイル: 0053_item_ratings.py プロジェクト: scrathat/galaxy
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    create_table(HistoryRatingAssociation_table)

    # Create history_dataset_association_rating_association table.
    try:
        HistoryDatasetAssociationRatingAssociation_table.create()
    except Exception as e:
        # MySQL cannot handle long index names; when we see this error, create the index name manually.
        if migrate_engine.name == 'mysql' and \
                str(e).lower().find("identifier name 'ix_history_dataset_association_rating_association_history_dataset_association_id' is too long"):
            add_index('ix_hda_rating_association_hda_id',
                      HistoryDatasetAssociationRatingAssociation_table,
                      'history_dataset_association_id')
        else:
            log.exception(
                "Creating history_dataset_association_rating_association table failed."
            )

    create_table(StoredWorkflowRatingAssociation_table)
    create_table(PageRatingAssociation_table)
    create_table(VisualizationRatingAssociation_table)
コード例 #2
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    Visualization_table = Table("visualization", metadata, autoload=True)
    Visualization_revision_table = Table("visualization_revision", metadata, autoload=True)

    # Create dbkey columns.
    x = Column("dbkey", TEXT)
    add_column(x, Visualization_table, metadata)
    y = Column("dbkey", TEXT)
    add_column(y, Visualization_revision_table, metadata)
    # Indexes need to be added separately because MySQL cannot index a TEXT/BLOB
    # column without specifying mysql_length
    add_index("ix_visualization_dbkey", Visualization_table, 'dbkey')
    add_index("ix_visualization_revision_dbkey", Visualization_revision_table, 'dbkey')

    all_viz = migrate_engine.execute("SELECT visualization.id as viz_id, visualization_revision.id as viz_rev_id, visualization_revision.config FROM visualization_revision \
                    LEFT JOIN visualization ON visualization.id=visualization_revision.visualization_id")
    for viz in all_viz:
        viz_id = viz['viz_id']
        viz_rev_id = viz['viz_rev_id']
        if viz[Visualization_revision_table.c.config]:
            dbkey = loads(viz[Visualization_revision_table.c.config]).get('dbkey', "").replace("'", "\\'")
            migrate_engine.execute("UPDATE visualization_revision SET dbkey='%s' WHERE id=%s" % (dbkey, viz_rev_id))
            migrate_engine.execute("UPDATE visualization SET dbkey='%s' WHERE id=%s" % (dbkey, viz_id))
コード例 #3
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    for table in TABLES:
        create_table(table)

    # Add columns & create indices for supporting sharing to visualization table.
    Visualization_table = Table("visualization", metadata, autoload=True)
    deleted_column = Column("deleted", Boolean, default=False, index=True)
    add_column(deleted_column,
               Visualization_table,
               metadata,
               index_name="ix_visualization_deleted")
    try:
        # Fill column with default value.
        cmd = "UPDATE visualization SET deleted = %s" % engine_false(
            migrate_engine)
        migrate_engine.execute(cmd)
    except Exception:
        log.exception(
            "Updating column 'deleted' of table 'visualization' failed.")

    importable_column = Column("importable",
                               Boolean,
                               default=False,
                               index=True)
    add_column(importable_column,
               Visualization_table,
               metadata,
               index_name='ix_visualization_importable')
    try:
        # Fill column with default value.
        cmd = "UPDATE visualization SET importable = %s" % engine_false(
            migrate_engine)
        migrate_engine.execute(cmd)
    except Exception:
        log.exception(
            "Updating column 'importable' of table 'visualization' failed.")

    slug_column = Column("slug", TEXT)
    add_column(slug_column, Visualization_table, metadata)
    # Index needs to be added separately because MySQL cannot index a TEXT/BLOB
    # column without specifying mysql_length
    add_index('ix_visualization_slug', Visualization_table, 'slug')

    published_column = Column("published", Boolean, index=True)
    add_column(published_column,
               Visualization_table,
               metadata,
               index_name='ix_visualization_published')
    try:
        # Fill column with default value.
        cmd = "UPDATE visualization SET published = %s" % engine_false(
            migrate_engine)
        migrate_engine.execute(cmd)
    except Exception:
        log.exception(
            "Updating column 'published' of table 'visualization' failed.")
コード例 #4
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    for ix, table, col in indexes:
        add_index(ix, table, col, metadata)
コード例 #5
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    add_index('ix_hda_ta_history_dataset_association_id',
              'history_dataset_association_tag_association',
              'history_dataset_association_id', metadata)
コード例 #6
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    if migrate_engine.name == 'mysql':
        add_index('ix_hdadaa_history_dataset_association_id',
                  'history_dataset_association_display_at_authorization',
                  'history_dataset_association_id', metadata)
コード例 #7
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    StoredWorkflow_table = Table("stored_workflow", metadata, autoload=True)
    c = Column("slug", TEXT)
    add_column(c, StoredWorkflow_table, metadata)
    # Index needs to be added separately because MySQL cannot index a TEXT/BLOB
    # column without specifying mysql_length
    add_index('ix_stored_workflow_slug', StoredWorkflow_table, 'slug')
コード例 #8
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    HistoryDatasetAssociationTagAssociation_table = Table(
        'history_dataset_association_tag_association', metadata, autoload=True)
    if not any([_.name
                for _ in index.columns] == ['history_dataset_association_id']
               for index in
               HistoryDatasetAssociationTagAssociation_table.indexes):
        add_index('ix_hda_ta_history_dataset_association_id',
                  HistoryDatasetAssociationTagAssociation_table,
                  'history_dataset_association_id')
コード例 #9
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    Page_table = Table("page", metadata, autoload=True)
    try:
        # Sqlite doesn't support .alter, so we need to drop an recreate
        drop_index("ix_page_slug", Page_table, 'slug')

        add_index("ix_page_slug", Page_table, 'slug', unique=False)
    except Exception:
        # Mysql doesn't have a named index, but alter should work
        Page_table.c.slug.alter(unique=False)
コード例 #10
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    # Create the job_to_output_library_dataset table
    create_table(JobToOutputLibraryDataset_table)

    # Create the library_folder_id column
    col = Column("library_folder_id", Integer, ForeignKey('library_folder.id', name='job_library_folder_id_fk'), index=True)
    add_column(col, 'job', metadata, index_name='ix_job_library_folder_id')

    # Create the ix_dataset_state index
    add_index('ix_dataset_state', 'dataset', 'state', metadata)
コード例 #11
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    User_table = Table("galaxy_user", metadata, autoload=True)
    # The next add_index() calls are not needed any more after commit
    # 7ee93c0995123b0f357abd649326295dfa06766c , but harmless
    add_index('ix_galaxy_user_deleted', User_table, 'deleted')
    add_index('ix_galaxy_user_purged', User_table, 'purged')
    # Set the default data in the galaxy_user table, but only for null values
    cmd = "UPDATE galaxy_user SET deleted = %s WHERE deleted is null" % engine_false(
        migrate_engine)
    try:
        migrate_engine.execute(cmd)
    except Exception:
        log.exception(
            "Setting default data for galaxy_user.deleted column failed.")
    cmd = "UPDATE galaxy_user SET purged = %s WHERE purged is null" % engine_false(
        migrate_engine)
    try:
        migrate_engine.execute(cmd)
    except Exception:
        log.exception(
            "Setting default data for galaxy_user.purged column failed.")
    add_index('ix_hda_copied_from_library_dataset_dataset_association_id',
              'history_dataset_association',
              'copied_from_library_dataset_dataset_association_id', metadata)
コード例 #12
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    # Create the job_to_output_library_dataset table
    create_table(JobToOutputLibraryDataset_table)

    # Create the library_folder_id column
    # SQLAlchemy Migrate has a bug when adding a column with both a ForeignKey and a index in SQLite
    if migrate_engine.name != 'sqlite':
        col = Column("library_folder_id",
                     Integer,
                     ForeignKey('library_folder.id',
                                name='job_library_folder_id_fk'),
                     index=True)
    else:
        col = Column("library_folder_id", Integer, index=True)
    add_column(col, 'job', metadata, index_name='ix_job_library_folder_id')

    # Create the ix_dataset_state index
    add_index('ix_dataset_state', 'dataset', 'state', metadata)
コード例 #13
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    add_index('ix_library_folder_name', 'library_folder', 'name', metadata)
    add_index('ix_library_dataset_dataset_association_name', 'library_dataset_dataset_association', 'name', metadata)
    add_index('ix_library_dataset_name', 'library_dataset', 'name', metadata)
コード例 #14
0
def upgrade(migrate_engine):
    metadata.bind = migrate_engine
    metadata.reflect()

    # Add 2 new columns to the galaxy_user table
    try:
        User_table = Table("galaxy_user", metadata, autoload=True)
    except NoSuchTableError:
        User_table = None
        log.debug("Failed loading table galaxy_user")
    if User_table is not None:
        col = Column('deleted', Boolean, index=True, default=False)
        add_column(col, User_table, index_name='ix_galaxy_user_deleted')
        col = Column('purged', Boolean, index=True, default=False)
        add_column(col, User_table, index_name='ix_galaxy_user_purged')
    # Add 1 new column to the history_dataset_association table
    try:
        HistoryDatasetAssociation_table = Table("history_dataset_association",
                                                metadata,
                                                autoload=True)
    except NoSuchTableError:
        HistoryDatasetAssociation_table = None
        log.debug("Failed loading table history_dataset_association")
    if HistoryDatasetAssociation_table is not None:
        try:
            col = Column('copied_from_library_dataset_dataset_association_id',
                         Integer,
                         nullable=True)
            col.create(HistoryDatasetAssociation_table)
            assert col is HistoryDatasetAssociation_table.c.copied_from_library_dataset_dataset_association_id
        except Exception:
            log.exception(
                "Adding column 'copied_from_library_dataset_dataset_association_id' to history_dataset_association table failed."
            )
    # Add 1 new column to the metadata_file table
    try:
        MetadataFile_table = Table("metadata_file", metadata, autoload=True)
    except NoSuchTableError:
        MetadataFile_table = None
        log.debug("Failed loading table metadata_file")
    if MetadataFile_table is not None:
        try:
            col = Column('lda_id', Integer, index=True, nullable=True)
            col.create(MetadataFile_table,
                       index_name='ix_metadata_file_lda_id')
            assert col is MetadataFile_table.c.lda_id
        except Exception:
            log.exception(
                "Adding column 'lda_id' to metadata_file table failed.")
    # Add 1 new column to the stored_workflow table - changeset 2328
    try:
        StoredWorkflow_table = Table(
            "stored_workflow",
            metadata,
            Column("latest_workflow_id",
                   Integer,
                   ForeignKey("workflow.id",
                              use_alter=True,
                              name='stored_workflow_latest_workflow_id_fk'),
                   index=True),
            autoload=True,
            extend_existing=True)
    except NoSuchTableError:
        StoredWorkflow_table = None
        log.debug("Failed loading table stored_workflow")
    if StoredWorkflow_table is not None:
        try:
            col = Column('importable', Boolean, default=False)
            col.create(StoredWorkflow_table)
            assert col is StoredWorkflow_table.c.importable
        except Exception:
            log.exception(
                "Adding column 'importable' to stored_workflow table failed.")
    # Create an index on the Job.state column - changeset 2192
    add_index('ix_job_state', 'job', 'state', metadata)
    # Add all of the new tables above
    metadata.create_all()
    # Add 1 foreign key constraint to the history_dataset_association table
    try:
        HistoryDatasetAssociation_table = Table("history_dataset_association",
                                                metadata,
                                                autoload=True)
    except NoSuchTableError:
        HistoryDatasetAssociation_table = None
        log.debug("Failed loading table history_dataset_association")
    try:
        LibraryDatasetDatasetAssociation_table = Table(
            "library_dataset_dataset_association", metadata, autoload=True)
    except NoSuchTableError:
        LibraryDatasetDatasetAssociation_table = None
        log.debug("Failed loading table library_dataset_dataset_association")
    if HistoryDatasetAssociation_table is not None and LibraryDatasetDatasetAssociation_table is not None:
        try:
            cons = ForeignKeyConstraint(
                [
                    HistoryDatasetAssociation_table.c.
                    copied_from_library_dataset_dataset_association_id
                ], [LibraryDatasetDatasetAssociation_table.c.id],
                name=
                'history_dataset_association_copied_from_library_dataset_da_fkey'
            )
            # Create the constraint
            cons.create()
        except Exception:
            log.exception(
                "Adding foreign key constraint 'history_dataset_association_copied_from_library_dataset_da_fkey' to table 'history_dataset_association' failed."
            )
    # Add 1 foreign key constraint to the metadata_file table
    try:
        MetadataFile_table = Table("metadata_file", metadata, autoload=True)
    except NoSuchTableError:
        MetadataFile_table = None
        log.debug("Failed loading table metadata_file")
    try:
        LibraryDatasetDatasetAssociation_table = Table(
            "library_dataset_dataset_association", metadata, autoload=True)
    except NoSuchTableError:
        LibraryDatasetDatasetAssociation_table = None
        log.debug("Failed loading table library_dataset_dataset_association")
    if migrate_engine.name != 'sqlite':
        # Sqlite can't alter table add foreign key.
        if MetadataFile_table is not None and LibraryDatasetDatasetAssociation_table is not None:
            try:
                cons = ForeignKeyConstraint(
                    [MetadataFile_table.c.lda_id],
                    [LibraryDatasetDatasetAssociation_table.c.id],
                    name='metadata_file_lda_id_fkey')
                # Create the constraint
                cons.create()
            except Exception:
                log.exception(
                    "Adding foreign key constraint 'metadata_file_lda_id_fkey' to table 'metadata_file' failed."
                )
    # Make sure we have at least 1 user
    cmd = "SELECT * FROM galaxy_user;"
    users = migrate_engine.execute(cmd).fetchall()
    if users:
        cmd = "SELECT * FROM role;"
        roles = migrate_engine.execute(cmd).fetchall()
        if not roles:
            # Create private roles for each user - pass 1
            cmd = \
                "INSERT INTO role " + \
                "SELECT %s AS id," + \
                "%s AS create_time," + \
                "%s AS update_time," + \
                "email AS name," + \
                "email AS description," + \
                "'private' As type," + \
                "%s AS deleted " + \
                "FROM galaxy_user " + \
                "ORDER BY id;"
            cmd = cmd % (nextval(migrate_engine,
                                 'role'), localtimestamp(migrate_engine),
                         localtimestamp(migrate_engine),
                         engine_false(migrate_engine))
            migrate_engine.execute(cmd)
            # Create private roles for each user - pass 2
            if migrate_engine.name in ['postgres', 'postgresql', 'sqlite']:
                cmd = "UPDATE role SET description = 'Private role for ' || description;"
            elif migrate_engine.name == 'mysql':
                cmd = "UPDATE role SET description = CONCAT( 'Private role for ', description );"
            migrate_engine.execute(cmd)
            # Create private roles for each user - pass 3
            cmd = \
                "INSERT INTO user_role_association " + \
                "SELECT %s AS id," + \
                "galaxy_user.id AS user_id," + \
                "role.id AS role_id," + \
                "%s AS create_time," + \
                "%s AS update_time " + \
                "FROM galaxy_user, role " + \
                "WHERE galaxy_user.email = role.name " + \
                "ORDER BY galaxy_user.id;"
            cmd = cmd % (nextval(migrate_engine, 'user_role_association'),
                         localtimestamp(migrate_engine),
                         localtimestamp(migrate_engine))
            migrate_engine.execute(cmd)
            # Create default permissions for each user
            cmd = \
                "INSERT INTO default_user_permissions " + \
                "SELECT %s AS id," + \
                "galaxy_user.id AS user_id," + \
                "'manage permissions' AS action," + \
                "user_role_association.role_id AS role_id " + \
                "FROM galaxy_user " + \
                "JOIN user_role_association ON user_role_association.user_id = galaxy_user.id " + \
                "ORDER BY galaxy_user.id;"
            cmd = cmd % nextval(migrate_engine, 'default_user_permissions')
            migrate_engine.execute(cmd)
            # Create default history permissions for each active history associated with a user

            cmd = \
                "INSERT INTO default_history_permissions " + \
                "SELECT %s AS id," + \
                "history.id AS history_id," + \
                "'manage permissions' AS action," + \
                "user_role_association.role_id AS role_id " + \
                "FROM history " + \
                "JOIN user_role_association ON user_role_association.user_id = history.user_id " + \
                "WHERE history.purged = %s AND history.user_id IS NOT NULL;"
            cmd = cmd % (nextval(
                migrate_engine,
                'default_history_permissions'), engine_false(migrate_engine))
            migrate_engine.execute(cmd)
            # Create "manage permissions" dataset_permissions for all activate-able datasets
            cmd = \
                "INSERT INTO dataset_permissions " + \
                "SELECT %s AS id," + \
                "%s AS create_time," + \
                "%s AS update_time," + \
                "'manage permissions' AS action," + \
                "history_dataset_association.dataset_id AS dataset_id," + \
                "user_role_association.role_id AS role_id " + \
                "FROM history " + \
                "JOIN history_dataset_association ON history_dataset_association.history_id = history.id " + \
                "JOIN dataset ON history_dataset_association.dataset_id = dataset.id " + \
                "JOIN user_role_association ON user_role_association.user_id = history.user_id " + \
                "WHERE dataset.purged = %s AND history.user_id IS NOT NULL;"
            cmd = cmd % (nextval(migrate_engine, 'dataset_permissions'),
                         localtimestamp(migrate_engine),
                         localtimestamp(migrate_engine),
                         engine_false(migrate_engine))
            migrate_engine.execute(cmd)
コード例 #15
0
def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()
    add_index('ix_job_job_runner_external_id', 'job', 'job_runner_external_id',
              metadata)