示例#1
0
def get_tool_dependencies_for_installed_repository(repository_id, status=None, exclude_status=None):
    if status is not None:
        return (
            database_contexts.install_context.query(install_model.ToolDependency)
            .filter(
                and_(
                    install_model.ToolDependency.table.c.tool_shed_repository_id == repository_id,
                    install_model.ToolDependency.table.c.status == status,
                )
            )
            .all()
        )
    elif exclude_status is not None:
        return (
            database_contexts.install_context.query(install_model.ToolDependency)
            .filter(
                and_(
                    install_model.ToolDependency.table.c.tool_shed_repository_id == repository_id,
                    install_model.ToolDependency.table.c.status != exclude_status,
                )
            )
            .all()
        )
    else:
        return (
            database_contexts.install_context.query(install_model.ToolDependency)
            .filter(install_model.ToolDependency.table.c.tool_shed_repository_id == repository_id)
            .all()
        )
def purge_libraries(app, cutoff_time, remove_from_disk, info_only=False, force_retry=False):
    # Purges deleted libraries whose update_time is older than the cutoff_time.
    # The dataset associations of each library are also marked as deleted.
    # The Purge Dataset method will purge each Dataset as necessary
    # library.purged == True simply means that it can no longer be undeleted
    # i.e. all associated LibraryDatasets/folders are marked as deleted
    library_count = 0
    start = time.time()
    if force_retry:
        libraries = app.sa_session.query(app.model.Library).filter(
            and_(app.model.Library.table.c.deleted == True, app.model.Library.table.c.update_time < cutoff_time)
        )
    else:
        libraries = app.sa_session.query(app.model.Library).filter(
            and_(
                app.model.Library.table.c.deleted == True,
                app.model.Library.table.c.purged == False,
                app.model.Library.table.c.update_time < cutoff_time,
            )
        )
    for library in libraries:
        _purge_folder(library.root_folder, app, remove_from_disk, info_only=info_only)
        if not info_only:
            print "Purging library id ", library.id
            library.purged = True
            app.sa_session.add(library)
            app.sa_session.flush()
        library_count += 1
    stop = time.time()
    print "# Purged %d libraries ." % library_count
    print "Elapsed time: ", stop - start
    print "##########################################"
def purge_folders(app, cutoff_time, remove_from_disk, info_only=False, force_retry=False):
    # Purges deleted folders whose update_time is older than the cutoff_time.
    # The dataset associations of each folder are also marked as deleted.
    # The Purge Dataset method will purge each Dataset as necessary
    # libraryFolder.purged == True simply means that it can no longer be undeleted
    # i.e. all associated LibraryDatasets/folders are marked as deleted
    folder_count = 0
    start = time.time()
    if force_retry:
        folders = app.sa_session.query(app.model.LibraryFolder).filter(
            and_(
                app.model.LibraryFolder.table.c.deleted == True,
                app.model.LibraryFolder.table.c.update_time < cutoff_time,
            )
        )
    else:
        folders = app.sa_session.query(app.model.LibraryFolder).filter(
            and_(
                app.model.LibraryFolder.table.c.deleted == True,
                app.model.LibraryFolder.table.c.purged == False,
                app.model.LibraryFolder.table.c.update_time < cutoff_time,
            )
        )
    for folder in folders:
        _purge_folder(folder, app, remove_from_disk, info_only=info_only)
        folder_count += 1
    stop = time.time()
    print "# Purged %d folders." % folder_count
    print "Elapsed time: ", stop - start
    print "##########################################"
示例#4
0
def get_tool_shed_repository_by_tool_shed_name_owner_changeset_revision( app, tool_shed_url, name, owner, changeset_revision ):
    sa_session = app.model.context.current
    tool_shed = common_util.clean_tool_shed_url( tool_shed_url )
    tool_shed_repository =  sa_session.query( app.model.ToolShedRepository ) \
                                      .filter( and_( app.model.ToolShedRepository.table.c.tool_shed == tool_shed,
                                                     app.model.ToolShedRepository.table.c.name == name,
                                                     app.model.ToolShedRepository.table.c.owner == owner,
                                                     app.model.ToolShedRepository.table.c.changeset_revision == changeset_revision ) ) \
                                      .first()
    if tool_shed_repository:
        return tool_shed_repository
    # The tool_shed_repository must have been updated to a newer changeset revision than the one defined in the repository_dependencies.xml file,
    # so call the tool shed to get all appropriate newer changeset revisions.
    text = get_updated_changeset_revisions_from_tool_shed( app, tool_shed_url, name, owner, changeset_revision )
    if text:
        changeset_revisions = listify( text )
        for changeset_revision in changeset_revisions:
            tool_shed_repository = sa_session.query( app.model.ToolShedRepository ) \
                                             .filter( and_( app.model.ToolShedRepository.table.c.tool_shed == tool_shed,
                                                            app.model.ToolShedRepository.table.c.name == name,
                                                            app.model.ToolShedRepository.table.c.owner == owner,
                                                            app.model.ToolShedRepository.table.c.changeset_revision == changeset_revision ) ) \
                                             .first()
            if tool_shed_repository:
                return tool_shed_repository
    return None
示例#5
0
def purge_datasets(app,
                   cutoff_time,
                   remove_from_disk,
                   info_only=False,
                   force_retry=False):
    # Purges deleted datasets whose update_time is older than cutoff_time.  Files may or may
    # not be removed from disk.
    dataset_count = 0
    disk_space = 0
    start = time.time()
    if force_retry:
        datasets = app.sa_session.query( app.model.Dataset ) \
                                 .filter( and_( app.model.Dataset.table.c.deleted==True,
                                                app.model.Dataset.table.c.purgable==True,
                                                app.model.Dataset.table.c.update_time < cutoff_time ) )
    else:
        datasets = app.sa_session.query( app.model.Dataset ) \
                                 .filter( and_( app.model.Dataset.table.c.deleted==True,
                                                app.model.Dataset.table.c.purgable==True,
                                                app.model.Dataset.table.c.purged==False,
                                                app.model.Dataset.table.c.update_time < cutoff_time ) )
    for dataset in datasets:
        file_size = dataset.file_size
        _purge_dataset(app, dataset, remove_from_disk, info_only=info_only)
        dataset_count += 1
        try:
            disk_space += file_size
        except:
            pass
    stop = time.time()
    print 'Purged %d datasets' % dataset_count
    if remove_from_disk:
        print 'Freed disk space: ', disk_space
    print "Elapsed time: ", stop - start
    print "##########################################"
示例#6
0
def get_tool_shed_repository_by_tool_shed_name_owner_changeset_revision(
        app, tool_shed_url, name, owner, changeset_revision):
    sa_session = app.model.context.current
    tool_shed = common_util.clean_tool_shed_url(tool_shed_url)
    tool_shed_repository =  sa_session.query( app.model.ToolShedRepository ) \
                                      .filter( and_( app.model.ToolShedRepository.table.c.tool_shed == tool_shed,
                                                     app.model.ToolShedRepository.table.c.name == name,
                                                     app.model.ToolShedRepository.table.c.owner == owner,
                                                     app.model.ToolShedRepository.table.c.changeset_revision == changeset_revision ) ) \
                                      .first()
    if tool_shed_repository:
        return tool_shed_repository
    # The tool_shed_repository must have been updated to a newer changeset revision than the one defined in the repository_dependencies.xml file,
    # so call the tool shed to get all appropriate newer changeset revisions.
    text = get_updated_changeset_revisions_from_tool_shed(
        app, tool_shed_url, name, owner, changeset_revision)
    if text:
        changeset_revisions = listify(text)
        for changeset_revision in changeset_revisions:
            tool_shed_repository = sa_session.query( app.model.ToolShedRepository ) \
                                             .filter( and_( app.model.ToolShedRepository.table.c.tool_shed == tool_shed,
                                                            app.model.ToolShedRepository.table.c.name == name,
                                                            app.model.ToolShedRepository.table.c.owner == owner,
                                                            app.model.ToolShedRepository.table.c.changeset_revision == changeset_revision ) ) \
                                             .first()
            if tool_shed_repository:
                return tool_shed_repository
    return None
示例#7
0
def purge_datasets( app, cutoff_time, remove_from_disk, info_only = False, force_retry = False ):
    # Purges deleted datasets whose update_time is older than cutoff_time.  Files may or may
    # not be removed from disk.
    dataset_count = 0
    disk_space = 0
    start = time.time()
    if force_retry:
        datasets = app.sa_session.query( app.model.Dataset ) \
                                 .filter( and_( app.model.Dataset.table.c.deleted==True,
                                                app.model.Dataset.table.c.purgable==True,
                                                app.model.Dataset.table.c.update_time < cutoff_time ) )
    else:
        datasets = app.sa_session.query( app.model.Dataset ) \
                                 .filter( and_( app.model.Dataset.table.c.deleted==True,
                                                app.model.Dataset.table.c.purgable==True,
                                                app.model.Dataset.table.c.purged==False,
                                                app.model.Dataset.table.c.update_time < cutoff_time ) )
    for dataset in datasets:
        file_size = dataset.file_size
        _purge_dataset( app, dataset, remove_from_disk, info_only = info_only )
        dataset_count += 1
        try:
            disk_space += file_size
        except:
            pass
    stop = time.time()
    print 'Purged %d datasets' % dataset_count
    if remove_from_disk:
        print 'Freed disk space: ', disk_space
    print "Elapsed time: ", stop - start
    print "##########################################" 
示例#8
0
    def index( self, trans, deleted=False, owner=None, name=None, **kwd ):
        """
        GET /api/repositories

        :param deleted: True/False, displays repositories that are or are not set to deleted.
        :param owner: the owner's public username.
        :param name: the repository name.

        Displays a collection (list) of repositories.
        """
        # Example URL: http://localhost:9009/api/repositories
        repository_dicts = []
        deleted = util.asbool( deleted )
        clause_list = [ and_( trans.app.model.Repository.table.c.deprecated == False,
                              trans.app.model.Repository.table.c.deleted == deleted ) ]
        if owner is not None:
            clause_list.append( and_( trans.app.model.User.table.c.username == owner, 
                                      trans.app.model.Repository.table.c.user_id == trans.app.model.User.table.c.id ) )
        if name is not None:
            clause_list.append( trans.app.model.Repository.table.c.name == name )
        for repository in trans.sa_session.query( trans.app.model.Repository ) \
                                          .filter( *clause_list ) \
                                          .order_by( trans.app.model.Repository.table.c.name ):
            repository_dict = repository.to_dict( view='collection',
                                                  value_mapper=self.__get_value_mapper( trans ) )
            repository_dict[ 'url' ] = web.url_for( controller='repositories',
                                                    action='show',
                                                    id=trans.security.encode_id( repository.id ) )
            repository_dicts.append( repository_dict )
        return repository_dicts
示例#9
0
    def index( self, trans, deleted=False, owner=None, name=None, **kwd ):
        """
        GET /api/repositories

        :param deleted: True/False, displays repositories that are or are not set to deleted.
        :param owner: the owner's public username.
        :param name: the repository name.

        Displays a collection (list) of repositories.
        """
        # Example URL: http://localhost:9009/api/repositories
        repository_dicts = []
        deleted = util.asbool( deleted )
        clause_list = [ and_( trans.app.model.Repository.table.c.deprecated == False,
                              trans.app.model.Repository.table.c.deleted == deleted ) ]
        if owner is not None:
            clause_list.append( and_( trans.app.model.User.table.c.username == owner,
                                      trans.app.model.Repository.table.c.user_id == trans.app.model.User.table.c.id ) )
        if name is not None:
            clause_list.append( trans.app.model.Repository.table.c.name == name )
        for repository in trans.sa_session.query( trans.app.model.Repository ) \
                                          .filter( *clause_list ) \
                                          .order_by( trans.app.model.Repository.table.c.name ):
            repository_dict = repository.to_dict( view='collection',
                                                  value_mapper=self.__get_value_mapper( trans ) )
            repository_dict[ 'category_ids' ] = \
                    [ trans.security.encode_id( x.category.id ) for x in repository.categories ]
            repository_dicts.append( repository_dict )
        return repository_dicts
示例#10
0
def purge_folders(app,
                  cutoff_time,
                  remove_from_disk,
                  info_only=False,
                  force_retry=False):
    # Purges deleted folders whose update_time is older than the cutoff_time.
    # The dataset associations of each folder are also marked as deleted.
    # The Purge Dataset method will purge each Dataset as necessary
    # libraryFolder.purged == True simply means that it can no longer be undeleted
    # i.e. all associated LibraryDatasets/folders are marked as deleted
    folder_count = 0
    start = time.time()
    if force_retry:
        folders = app.sa_session.query( app.model.LibraryFolder ) \
                                .filter( and_( app.model.LibraryFolder.table.c.deleted==True,
                                               app.model.LibraryFolder.table.c.update_time < cutoff_time ) )
    else:
        folders = app.sa_session.query( app.model.LibraryFolder ) \
                                .filter( and_( app.model.LibraryFolder.table.c.deleted==True,
                                               app.model.LibraryFolder.table.c.purged==False,
                                               app.model.LibraryFolder.table.c.update_time < cutoff_time ) )
    for folder in folders:
        _purge_folder(folder, app, remove_from_disk, info_only=info_only)
        folder_count += 1
    stop = time.time()
    print '# Purged %d folders.' % folder_count
    print "Elapsed time: ", stop - start
    print "##########################################"
示例#11
0
def delete_userless_histories(app,
                              cutoff_time,
                              info_only=False,
                              force_retry=False):
    # Deletes userless histories whose update_time value is older than the cutoff_time.
    # The purge history script will handle marking DatasetInstances as deleted.
    # Nothing is removed from disk yet.
    history_count = 0
    start = time.time()
    if force_retry:
        histories = app.sa_session.query( app.model.History ) \
                                  .filter( and_( app.model.History.table.c.user_id==None,
                                                 app.model.History.table.c.update_time < cutoff_time ) )
    else:
        histories = app.sa_session.query( app.model.History ) \
                                  .filter( and_( app.model.History.table.c.user_id==None,
                                                 app.model.History.table.c.deleted==False,
                                                 app.model.History.table.c.update_time < cutoff_time ) )
    for history in histories:
        if not info_only:
            print "Deleting history id ", history.id
            history.deleted = True
            app.sa_session.add(history)
            app.sa_session.flush()
        history_count += 1
    stop = time.time()
    print "Deleted %d histories" % history_count
    print "Elapsed time: ", stop - start
    print "##########################################"
示例#12
0
def delete_userless_histories(app, cutoff_time, info_only=False, force_retry=False):
    # Deletes userless histories whose update_time value is older than the cutoff_time.
    # The purge history script will handle marking DatasetInstances as deleted.
    # Nothing is removed from disk yet.
    history_count = 0
    start = time.time()
    if force_retry:
        histories = app.sa_session.query(app.model.History).filter(
            and_(app.model.History.table.c.user_id == None, app.model.History.table.c.update_time < cutoff_time)
        )
    else:
        histories = app.sa_session.query(app.model.History).filter(
            and_(
                app.model.History.table.c.user_id == None,
                app.model.History.table.c.deleted == False,
                app.model.History.table.c.update_time < cutoff_time,
            )
        )
    for history in histories:
        if not info_only:
            print "Deleting history id ", history.id
            history.deleted = True
            app.sa_session.add(history)
            app.sa_session.flush()
        history_count += 1
    stop = time.time()
    print "Deleted %d histories" % history_count
    print "Elapsed time: ", stop - start
    print "##########################################"
 def build_initial_query( self, trans, **kwd ):
     return trans.sa_session.query( model.Repository ) \
                            .filter( and_( model.Repository.table.c.deleted == False,
                                           model.Repository.table.c.deprecated == False ) ) \
                            .join( model.RepositoryMetadata.table ) \
                            .filter( and_( model.RepositoryMetadata.table.c.downloadable == True,
                                           model.RepositoryMetadata.table.c.includes_tools == True,
                                           model.RepositoryMetadata.table.c.tools_functionally_correct == False ) ) \
                            .join( model.User.table )
 def build_initial_query(self, trans, **kwd):
     return trans.sa_session.query( model.Repository ) \
                            .filter( and_( model.Repository.table.c.deleted == False,
                                           model.Repository.table.c.deprecated == False ) ) \
                            .join( model.RepositoryMetadata.table ) \
                            .filter( and_( model.RepositoryMetadata.table.c.downloadable == True,
                                           model.RepositoryMetadata.table.c.includes_tools == True,
                                           model.RepositoryMetadata.table.c.tools_functionally_correct == False ) ) \
                            .join( model.User.table )
示例#15
0
def purge_histories(app, cutoff_time, remove_from_disk, info_only=False, force_retry=False):
    # Purges deleted histories whose update_time is older than the cutoff_time.
    # The dataset associations of each history are also marked as deleted.
    # The Purge Dataset method will purge each Dataset as necessary
    # history.purged == True simply means that it can no longer be undeleted
    # i.e. all associated datasets are marked as deleted
    history_count = 0
    start = time.time()
    if force_retry:
        histories = (
            app.sa_session.query(app.model.History)
            .filter(
                and_(app.model.History.table.c.deleted == True, app.model.History.table.c.update_time < cutoff_time)
            )
            .options(eagerload("datasets"))
        )
    else:
        histories = (
            app.sa_session.query(app.model.History)
            .filter(
                and_(
                    app.model.History.table.c.deleted == True,
                    app.model.History.table.c.purged == False,
                    app.model.History.table.c.update_time < cutoff_time,
                )
            )
            .options(eagerload("datasets"))
        )
    for history in histories:
        print ("### Processing history id %d (%s)" % (history.id, history.name)).encode("utf-8")
        for dataset_assoc in history.datasets:
            _purge_dataset_instance(
                dataset_assoc, app, remove_from_disk, info_only=info_only
            )  # mark a DatasetInstance as deleted, clear associated files, and mark the Dataset as deleted if it is deletable
        if not info_only:
            # TODO: should the Delete DefaultHistoryPermissions be deleted here?  This was incorrectly
            # done in the _list_delete() method of the history controller, so copied it here.  Not sure
            # if we should ever delete info like this from the db though, so commented out for now...
            # for dhp in history.default_permissions:
            #    dhp.delete()
            print "Purging history id ", history.id
            history.purged = True
            app.sa_session.add(history)
            app.sa_session.flush()
        else:
            print "History id %d will be purged (without 'info_only' mode)" % history.id
        history_count += 1
    stop = time.time()
    print "Purged %d histories." % history_count
    print "Elapsed time: ", stop - start
    print "##########################################"
示例#16
0
    def list( self, trans, deleted=False ):
        """
        Return a list of libraries from the DB.

        :param  deleted: if True, show only ``deleted`` libraries, if False show only ``non-deleted``
        :type   deleted: boolean (optional)

        :returns: query that will emit all accessible libraries
        :rtype: sqlalchemy query
        """
        is_admin = trans.user_is_admin()
        query = trans.sa_session.query( trans.app.model.Library )

        if is_admin:
            if deleted is None:
                #  Flag is not specified, do not filter on it.
                pass
            elif deleted:
                query = query.filter( trans.app.model.Library.table.c.deleted == True ) 
            else:
                query = query.filter( trans.app.model.Library.table.c.deleted == False )
        else:
            #  Nonadmins can't see deleted libraries
            current_user_role_ids = [ role.id for role in trans.get_current_user_roles() ]
            library_access_action = trans.app.security_agent.permitted_actions.LIBRARY_ACCESS.action
            restricted_library_ids = [ lp.library_id for lp in ( trans.sa_session.query( trans.model.LibraryPermissions )
                                                                 .filter( trans.model.LibraryPermissions.table.c.action == library_access_action )
                                                                 .distinct() ) ]
            accessible_restricted_library_ids = [ lp.library_id for lp in ( trans.sa_session.query( trans.model.LibraryPermissions )
                                                  .filter( and_( trans.model.LibraryPermissions.table.c.action == library_access_action,
                                                                 trans.model.LibraryPermissions.table.c.role_id.in_( current_user_role_ids ) ) ) ) ]
            query = query.filter( or_( not_( trans.model.Library.table.c.id.in_( restricted_library_ids ) ), trans.model.Library.table.c.id.in_( accessible_restricted_library_ids ) ) )
        return query
示例#17
0
def get_tool_dependency_by_name_type_repository(app, repository, name, type):
    sa_session = app.model.context.current
    return sa_session.query( app.model.ToolDependency ) \
                     .filter( and_( app.model.ToolDependency.table.c.tool_shed_repository_id == repository.id,
                                    app.model.ToolDependency.table.c.name == name,
                                    app.model.ToolDependency.table.c.type == type ) ) \
                     .first()
 def build_initial_query(self, trans, **kwd):
     user_id = trans.security.decode_id(kwd['id'])
     return trans.sa_session.query( model.RepositoryReview ) \
                            .filter( and_( model.RepositoryReview.table.c.deleted == False, \
                                           model.RepositoryReview.table.c.user_id == user_id ) ) \
                            .join( ( model.Repository.table, model.RepositoryReview.table.c.repository_id == model.Repository.table.c.id ) ) \
                            .filter( model.Repository.table.c.deprecated == False )
示例#19
0
def get_installed_repository_by_name_owner_changeset_revision(
        name, owner, changeset_revision):
    return database_contexts.install_context.query( install_model.ToolShedRepository ) \
                                            .filter( and_( install_model.ToolShedRepository.table.c.name == name,
                                                           install_model.ToolShedRepository.table.c.owner == owner,
                                                           install_model.ToolShedRepository.table.c.installed_changeset_revision == changeset_revision ) ) \
                                            .one()
示例#20
0
def get_component_review_by_repository_review_id_component_id(
        trans, repository_review_id, component_id):
    """Get a component_review from the database via repository_review_id and component_id."""
    return trans.sa_session.query( trans.model.ComponentReview ) \
                           .filter( and_( trans.model.ComponentReview.table.c.repository_review_id == trans.security.decode_id( repository_review_id ),
                                          trans.model.ComponentReview.table.c.component_id == trans.security.decode_id( component_id ) ) ) \
                           .first()
示例#21
0
def get_reviews_by_repository_id_changeset_revision(trans, repository_id,
                                                    changeset_revision):
    """Get all repository_reviews from the database via repository id and changeset_revision."""
    return trans.sa_session.query( trans.model.RepositoryReview ) \
                           .filter( and_( trans.model.RepositoryReview.repository_id == trans.security.decode_id( repository_id ),
                                          trans.model.RepositoryReview.changeset_revision == changeset_revision ) ) \
                           .all()
示例#22
0
def should_set_do_not_test_flag(app, repository, changeset_revision):
    '''
    Returns True if:
    a) There are multiple downloadable revisions, and the provided changeset revision is not the most recent downloadable revision. In this case,
       the revision will never be updated with correct data, and re-testing it would be redundant.
    b) There are one or more downloadable revisions, and the provided changeset revision is the most recent downloadable revision. In this case, if 
       the repository is updated with test data or functional tests, the downloadable changeset revision that was tested will either be replaced
       with the new changeset revision, or a new downloadable changeset revision will be created, either of which will be automatically checked and
       flagged as appropriate. In the install and test script, this behavior is slightly different, since we do want to always run functional tests
       on the most recent downloadable changeset revision.
    '''
    metadata_records = app.sa_session.query( app.model.RepositoryMetadata ) \
                                     .filter( and_( app.model.RepositoryMetadata.table.c.downloadable == True,
                                                    app.model.RepositoryMetadata.table.c.repository_id == repository.id ) ) \
                                     .all()
    downloadable_revisions = [
        metadata_record.changeset_revision
        for metadata_record in metadata_records
    ]
    is_latest_revision = is_most_recent_downloadable_revision(
        app, repository, changeset_revision, downloadable_revisions)
    if len(downloadable_revisions) == 1:
        return True
    elif len(downloadable_revisions) > 1 and is_latest_revision:
        return True
    elif len(downloadable_revisions) > 1 and not is_latest_revision:
        return True
    else:
        return False
示例#23
0
 def build_initial_query( self, trans, **kwd ):
     user_id = trans.security.decode_id( kwd[ 'id' ] )
     return trans.sa_session.query( model.RepositoryReview ) \
                            .filter( and_( model.RepositoryReview.table.c.deleted == False, \
                                           model.RepositoryReview.table.c.user_id == user_id ) ) \
                            .join( ( model.Repository.table, model.RepositoryReview.table.c.repository_id == model.Repository.table.c.id ) ) \
                            .filter( model.Repository.table.c.deprecated == False )
def should_set_do_not_test_flag( app, repository, changeset_revision ):
    '''
    Returns True if:
    a) There are multiple downloadable revisions, and the provided changeset revision is not the most recent downloadable revision. In this case,
       the revision will never be updated with correct data, and re-testing it would be redundant.
    b) There are one or more downloadable revisions, and the provided changeset revision is the most recent downloadable revision. In this case, if 
       the repository is updated with test data or functional tests, the downloadable changeset revision that was tested will either be replaced
       with the new changeset revision, or a new downloadable changeset revision will be created, either of which will be automatically checked and
       flagged as appropriate. In the install and test script, this behavior is slightly different, since we do want to always run functional tests
       on the most recent downloadable changeset revision.
    '''
    metadata_records = app.sa_session.query( app.model.RepositoryMetadata ) \
                                     .filter( and_( app.model.RepositoryMetadata.table.c.downloadable == True,
                                                    app.model.RepositoryMetadata.table.c.repository_id == repository.id ) ) \
                                     .all()
    downloadable_revisions = [ metadata_record.changeset_revision for metadata_record in metadata_records ]
    is_latest_revision = is_most_recent_downloadable_revision( app, repository, changeset_revision, downloadable_revisions )
    if len( downloadable_revisions ) == 1:
        return True
    elif len( downloadable_revisions ) > 1 and is_latest_revision:
        return True
    elif len( downloadable_revisions ) > 1 and not is_latest_revision:
        return True
    else:
        return False
def get_tool_dependency_by_name_version_type( app, name, version, type ):
    context = app.install_model.context
    return context.query( app.install_model.ToolDependency ) \
                     .filter( and_( app.install_model.ToolDependency.table.c.name == name,
                                    app.install_model.ToolDependency.table.c.version == version,
                                    app.install_model.ToolDependency.table.c.type == type ) ) \
                     .first()
示例#26
0
    def deleted_histories( self, trans, **kwd ):
        """
        The number of histories that were deleted more than the specified number of days ago, but have not yet been purged.
        Also included is the number of datasets associated with the histories.
        """
        params = util.Params( kwd )
        message = ''
        if params.deleted_histories_days:
            deleted_histories_days = int( params.deleted_histories_days )
            cutoff_time = datetime.utcnow() - timedelta( days=deleted_histories_days )
            history_count = 0
            dataset_count = 0
            disk_space = 0
            histories = trans.sa_session.query( model.History ) \
                                        .filter( and_( model.History.table.c.deleted == True,
                                                       model.History.table.c.purged == False,
                                                       model.History.table.c.update_time < cutoff_time ) ) \
                                        .options( eagerload( 'datasets' ) )

            for history in histories:
                for hda in history.datasets:
                    if not hda.dataset.purged:
                        dataset_count += 1
                        try:
                            disk_space += hda.dataset.file_size
                        except:
                            pass
                history_count += 1
            message = "%d histories ( including a total of %d datasets ) were deleted more than %d days ago, but have not yet been purged.  Disk space: " %( history_count, dataset_count, deleted_histories_days ) + str( disk_space )
        else:
            message = "Enter the number of days."
        return str( deleted_histories_days ), message
示例#27
0
def get_review_by_repository_id_changeset_revision_user_id( trans, repository_id, changeset_revision, user_id ):
    """Get a repository_review from the database via repository id, changeset_revision and user_id."""
    return trans.sa_session.query( trans.model.RepositoryReview ) \
                           .filter( and_( trans.model.RepositoryReview.repository_id == trans.security.decode_id( repository_id ),
                                          trans.model.RepositoryReview.changeset_revision == changeset_revision,
                                          trans.model.RepositoryReview.user_id == trans.security.decode_id( user_id ) ) ) \
                           .first()
示例#28
0
def get_component_review_by_repository_review_id_component_id( app, repository_review_id, component_id ):
    """Get a component_review from the database via repository_review_id and component_id."""
    sa_session = app.model.context.current
    return sa_session.query( app.model.ComponentReview ) \
                     .filter( and_( app.model.ComponentReview.table.c.repository_review_id == app.security.decode_id( repository_review_id ),
                                    app.model.ComponentReview.table.c.component_id == app.security.decode_id( component_id ) ) ) \
                     .first()
示例#29
0
    def deleted_histories( self, trans, **kwd ):
        """
        The number of histories that were deleted more than the specified number of days ago, but have not yet been purged.
        Also included is the number of datasets associated with the histories.
        """
        params = util.Params( kwd )
        message = ''
        if params.deleted_histories_days:
            deleted_histories_days = int( params.deleted_histories_days )
            cutoff_time = datetime.utcnow() - timedelta( days=deleted_histories_days )
            history_count = 0
            dataset_count = 0
            disk_space = 0
            histories = trans.sa_session.query( model.History ) \
                                        .filter( and_( model.History.table.c.deleted == True,
                                                       model.History.table.c.purged == False,
                                                       model.History.table.c.update_time < cutoff_time ) ) \
                                        .options( eagerload( 'datasets' ) )

            for history in histories:
                for hda in history.datasets:
                    if not hda.dataset.purged:
                        dataset_count += 1
                        try:
                            disk_space += hda.dataset.file_size
                        except:
                            pass
                history_count += 1
            message = "%d histories ( including a total of %d datasets ) were deleted more than %d days ago, but have not yet been purged, " \
                    "disk space: %s." % ( history_count, dataset_count, deleted_histories_days, nice_size( disk_space, True ) )
        else:
            message = "Enter the number of days."
        return str( deleted_histories_days ), message
示例#30
0
def get_tool_dependency_by_name_type_repository(app, repository, name, type):
    context = app.install_model.context
    return context.query( app.install_model.ToolDependency ) \
                     .filter( and_( app.install_model.ToolDependency.table.c.tool_shed_repository_id == repository.id,
                                    app.install_model.ToolDependency.table.c.name == name,
                                    app.install_model.ToolDependency.table.c.type == type ) ) \
                     .first()
示例#31
0
def get_tool_dependency_by_name_version_type(app, name, version, type):
    context = app.install_model.context
    return context.query( app.install_model.ToolDependency ) \
                     .filter( and_( app.install_model.ToolDependency.table.c.name == name,
                                    app.install_model.ToolDependency.table.c.version == version,
                                    app.install_model.ToolDependency.table.c.type == type ) ) \
                     .first()
示例#32
0
def purge_histories( app, cutoff_time, remove_from_disk, info_only = False ):
    # Purges deleted histories whose update_time is older than the cutoff_time.
    # The dataset associations of each history are also marked as deleted.
    # The Purge Dataset method will purge each Dataset as necessary
    # history.purged == True simply means that it can no longer be undeleted
    # i.e. all associated datasets are marked as deleted
    history_count = 0
    print '# The following datasets and associated deleted histories have been purged'
    start = time.clock()
    histories = app.model.History.filter( and_( app.model.History.table.c.deleted==True,
                                app.model.History.table.c.purged==False,
                                app.model.History.table.c.update_time < cutoff_time ) ) \
                 .options( eagerload( 'datasets' ) ).all()
    for history in histories:
        for dataset_assoc in history.datasets:
            _purge_dataset_instance( dataset_assoc, app, remove_from_disk, info_only = info_only ) #mark a DatasetInstance as deleted, clear associated files, and mark the Dataset as deleted if it is deletable
        if not info_only:
            # TODO: should the Delete DefaultHistoryPermissions be deleted here?  This was incorrectly
            # done in the _list_delete() method of the history controller, so copied it here.  Not sure 
            # if we should ever delete info like this from the db though, so commented out for now...
            #for dhp in history.default_permissions:
            #    dhp.delete()
            history.purged = True
        print "%d" % history.id
        history_count += 1
    app.model.flush()
    stop = time.clock()
    print '# Purged %d histories.' % ( history_count ), '\n'
    print "Elapsed time: ", stop - start, "\n"
示例#33
0
def get_reviews_by_repository_id_changeset_revision( app, repository_id, changeset_revision ):
    """Get all repository_reviews from the database via repository id and changeset_revision."""
    sa_session = app.model.context.current
    return sa_session.query( app.model.RepositoryReview ) \
                     .filter( and_( app.model.RepositoryReview.repository_id == app.security.decode_id( repository_id ),
                                    app.model.RepositoryReview.changeset_revision == changeset_revision ) ) \
                     .all()
示例#34
0
def get_installed_repository_by_name_owner_changeset_revision(
        name, owner, changeset_revision):
    return sa_session.query( model.ToolShedRepository ) \
                     .filter( and_( model.ToolShedRepository.table.c.name == name,
                                    model.ToolShedRepository.table.c.owner == owner,
                                    model.ToolShedRepository.table.c.installed_changeset_revision == changeset_revision ) ) \
                     .one()
def get_tool_dependency_by_name_type_repository( app, repository, name, type ):
    context = app.install_model.context
    return context.query( app.install_model.ToolDependency ) \
                     .filter( and_( app.install_model.ToolDependency.table.c.tool_shed_repository_id == repository.id,
                                    app.install_model.ToolDependency.table.c.name == name,
                                    app.install_model.ToolDependency.table.c.type == type ) ) \
                     .first()
示例#36
0
 def apply_query_filter(self, trans, query, **kwd):
     current_user_role_ids = [role.id for role in trans.get_current_user_roles()]
     library_access_action = trans.app.security_agent.permitted_actions.LIBRARY_ACCESS.action
     restricted_library_ids = [
         lp.library_id
         for lp in trans.sa_session.query(trans.model.LibraryPermissions)
         .filter(trans.model.LibraryPermissions.table.c.action == library_access_action)
         .distinct()
     ]
     accessible_restricted_library_ids = [
         lp.library_id
         for lp in trans.sa_session.query(trans.model.LibraryPermissions).filter(
             and_(
                 trans.model.LibraryPermissions.table.c.action == library_access_action,
                 trans.model.LibraryPermissions.table.c.role_id.in_(current_user_role_ids),
             )
         )
     ]
     if not trans.user:
         # Filter to get only public libraries, a library whose id
         # is not in restricted_library_ids is a public library
         return query.filter(not_(trans.model.Library.table.c.id.in_(restricted_library_ids)))
     else:
         # Filter to get libraries accessible by the current user, get both
         # public libraries and restricted libraries accessible by the current user.
         return query.filter(
             or_(
                 not_(trans.model.Library.table.c.id.in_(restricted_library_ids)),
                 trans.model.Library.table.c.id.in_(accessible_restricted_library_ids),
             )
         )
示例#37
0
def purge_histories(app,
                    cutoff_time,
                    remove_from_disk,
                    info_only=False,
                    force_retry=False):
    # Purges deleted histories whose update_time is older than the cutoff_time.
    # The dataset associations of each history are also marked as deleted.
    # The Purge Dataset method will purge each Dataset as necessary
    # history.purged == True simply means that it can no longer be undeleted
    # i.e. all associated datasets are marked as deleted
    history_count = 0
    start = time.time()
    if force_retry:
        histories = app.sa_session.query( app.model.History ) \
                                  .filter( and_( app.model.History.table.c.deleted==True,
                                                 app.model.History.table.c.update_time < cutoff_time ) ) \
                                  .options( eagerload( 'datasets' ) )
    else:
        histories = app.sa_session.query( app.model.History ) \
                                  .filter( and_( app.model.History.table.c.deleted==True,
                                                 app.model.History.table.c.purged==False,
                                                 app.model.History.table.c.update_time < cutoff_time ) ) \
                                  .options( eagerload( 'datasets' ) )
    for history in histories:
        print("### Processing history id %d (%s)" %
              (history.id, history.name)).encode('utf-8')
        for dataset_assoc in history.datasets:
            _purge_dataset_instance(
                dataset_assoc, app, remove_from_disk, info_only=info_only
            )  #mark a DatasetInstance as deleted, clear associated files, and mark the Dataset as deleted if it is deletable
        if not info_only:
            # TODO: should the Delete DefaultHistoryPermissions be deleted here?  This was incorrectly
            # done in the _list_delete() method of the history controller, so copied it here.  Not sure
            # if we should ever delete info like this from the db though, so commented out for now...
            #for dhp in history.default_permissions:
            #    dhp.delete()
            print "Purging history id ", history.id
            history.purged = True
            app.sa_session.add(history)
            app.sa_session.flush()
        else:
            print "History id %d will be purged (without 'info_only' mode)" % history.id
        history_count += 1
    stop = time.time()
    print 'Purged %d histories.' % history_count
    print "Elapsed time: ", stop - start
    print "##########################################"
示例#38
0
    def index( self, trans, **kwd ):
        """
        index( self, trans, **kwd )
        * GET /api/libraries:
            Returns a list of summary data for all libraries.

        :param  deleted: if True, show only ``deleted`` libraries, if False show only ``non-deleted``
        :type   deleted: boolean (optional)

        :returns:   list of dictionaries containing library information
        :rtype:     list

        .. seealso:: :attr:`galaxy.model.Library.dict_collection_visible_keys`

        """
        is_admin = trans.user_is_admin()
        query = trans.sa_session.query( trans.app.model.Library )
        deleted = kwd.get( 'deleted', 'missing' )
        try:
            if not is_admin:
                # non-admins can't see deleted libraries
                deleted = False
            else:
                deleted = util.asbool( deleted )
            if deleted:
                query = query.filter( trans.app.model.Library.table.c.deleted == True )
            else:
                query = query.filter( trans.app.model.Library.table.c.deleted == False )
        except ValueError:
            # given value wasn't true/false but the user is admin so we don't filter on this parameter at all
            pass

        if not is_admin:
            # non-admins can see only allowed and public libraries
            current_user_role_ids = [ role.id for role in trans.get_current_user_roles() ]
            library_access_action = trans.app.security_agent.permitted_actions.LIBRARY_ACCESS.action
            restricted_library_ids = [ lp.library_id for lp in ( trans.sa_session.query( trans.model.LibraryPermissions )
                                                                 .filter( trans.model.LibraryPermissions.table.c.action == library_access_action )
                                                                 .distinct() ) ]
            accessible_restricted_library_ids = [ lp.library_id for lp in ( trans.sa_session.query( trans.model.LibraryPermissions )
                                                  .filter( and_( trans.model.LibraryPermissions.table.c.action == library_access_action,
                                                                 trans.model.LibraryPermissions.table.c.role_id.in_( current_user_role_ids ) ) ) ) ]
            query = query.filter( or_( not_( trans.model.Library.table.c.id.in_( restricted_library_ids ) ), trans.model.Library.table.c.id.in_( accessible_restricted_library_ids ) ) )
        libraries = []
        for library in query:
            item = library.to_dict( view='element', value_mapper={ 'id': trans.security.encode_id, 'root_folder_id': trans.security.encode_id } )
            if trans.app.security_agent.library_is_public( library, contents=False ):
                item[ 'public' ] = True
            current_user_roles = trans.get_current_user_roles()
            if not trans.user_is_admin():
                item['can_user_add'] = trans.app.security_agent.can_add_library_item( current_user_roles, library )
                item['can_user_modify'] = trans.app.security_agent.can_modify_library_item( current_user_roles, library )
                item['can_user_manage'] = trans.app.security_agent.can_manage_library_item( current_user_roles, library )
            else:
                item['can_user_add'] = True
                item['can_user_modify'] = True
                item['can_user_manage'] = True
            libraries.append( item )
        return libraries
示例#39
0
 def filter(self, trans, user, query, column_filter):
     if column_filter == 'All':
         return query
     return query.filter(
         and_(
             model.StoredWorkflow.table.c.user_id ==
             model.User.table.c.id,
             model.User.table.c.email == column_filter))
 def build_initial_query( self, trans, **kwd ):
     return trans.sa_session.query( model.Repository ) \
                            .filter( and_( model.Repository.table.c.deleted == False,
                                           model.Repository.table.c.deprecated == False ) ) \
                            .join( ( model.RepositoryReview.table, model.RepositoryReview.table.c.repository_id == model.Repository.table.c.id ) ) \
                            .join( ( model.User.table, model.User.table.c.id == model.Repository.table.c.user_id ) ) \
                            .outerjoin( ( model.ComponentReview.table, model.ComponentReview.table.c.repository_review_id == model.RepositoryReview.table.c.id ) ) \
                            .outerjoin( ( model.Component.table, model.Component.table.c.id == model.ComponentReview.table.c.component_id ) )
示例#41
0
def get_tool_dependencies_for_installed_repository(repository_id,
                                                   status=None,
                                                   exclude_status=None):
    if status is not None:
        return database_contexts.install_context.query( install_model.ToolDependency ) \
                                                .filter( and_( install_model.ToolDependency.table.c.tool_shed_repository_id == repository_id,
                                                               install_model.ToolDependency.table.c.status == status ) ) \
                                                .all()
    elif exclude_status is not None:
        return database_contexts.install_context.query( install_model.ToolDependency ) \
                                                .filter( and_( install_model.ToolDependency.table.c.tool_shed_repository_id == repository_id,
                                                               install_model.ToolDependency.table.c.status != exclude_status ) ) \
                                                .all()
    else:
        return database_contexts.install_context.query( install_model.ToolDependency ) \
                                                .filter( install_model.ToolDependency.table.c.tool_shed_repository_id == repository_id ) \
                                                .all()
示例#42
0
def get_review_by_repository_id_changeset_revision_user_id(
        trans, repository_id, changeset_revision, user_id):
    """Get a repository_review from the database via repository id, changeset_revision and user_id."""
    return trans.sa_session.query( trans.model.RepositoryReview ) \
                           .filter( and_( trans.model.RepositoryReview.repository_id == trans.security.decode_id( repository_id ),
                                          trans.model.RepositoryReview.changeset_revision == changeset_revision,
                                          trans.model.RepositoryReview.user_id == trans.security.decode_id( user_id ) ) ) \
                           .first()
 def build_initial_query(self, trans, **kwd):
     return trans.sa_session.query( model.Repository ) \
                            .filter( and_( model.Repository.table.c.deleted == False,
                                           model.Repository.table.c.deprecated == False ) ) \
                            .join( ( model.RepositoryReview.table, model.RepositoryReview.table.c.repository_id == model.Repository.table.c.id ) ) \
                            .join( ( model.User.table, model.User.table.c.id == model.Repository.table.c.user_id ) ) \
                            .outerjoin( ( model.ComponentReview.table, model.ComponentReview.table.c.repository_review_id == model.RepositoryReview.table.c.id ) ) \
                            .outerjoin( ( model.Component.table, model.Component.table.c.id == model.ComponentReview.table.c.component_id ) )
示例#44
0
 def dataset_info( self, trans, **kwd ):
     params = util.Params( kwd )
     message = ''
     dataset = trans.sa_session.query( model.Dataset ).get( trans.security.decode_id( kwd.get( 'id', '' ) ) )
     # Get all associated hdas and lddas that use the same disk file.
     associated_hdas = trans.sa_session.query( trans.model.HistoryDatasetAssociation ) \
                                       .filter( and_( trans.model.HistoryDatasetAssociation.deleted == False,
                                                      trans.model.HistoryDatasetAssociation.dataset_id == dataset.id ) ) \
                                       .all()
     associated_lddas = trans.sa_session.query( trans.model.LibraryDatasetDatasetAssociation ) \
                                        .filter( and_( trans.model.LibraryDatasetDatasetAssociation.deleted == False,
                                                       trans.model.LibraryDatasetDatasetAssociation.dataset_id == dataset.id ) ) \
                                        .all()
     return trans.fill_template( '/webapps/reports/dataset_info.mako',
                                 dataset=dataset,
                                 associated_hdas=associated_hdas,
                                 associated_lddas=associated_lddas,
                                 message=message )
示例#45
0
 def dataset_info( self, trans, **kwd ):
     params = util.Params( kwd )
     message = ''
     dataset = trans.sa_session.query( model.Dataset ).get( trans.security.decode_id( kwd.get( 'id', '' ) ) )
     # Get all associated hdas and lddas that use the same disk file.
     associated_hdas = trans.sa_session.query( trans.model.HistoryDatasetAssociation ) \
                                       .filter( and_( trans.model.HistoryDatasetAssociation.deleted == False,
                                                      trans.model.HistoryDatasetAssociation.dataset_id == dataset.id ) ) \
                                       .all()
     associated_lddas = trans.sa_session.query( trans.model.LibraryDatasetDatasetAssociation ) \
                                        .filter( and_( trans.model.LibraryDatasetDatasetAssociation.deleted == False,
                                                       trans.model.LibraryDatasetDatasetAssociation.dataset_id == dataset.id ) ) \
                                        .all()
     return trans.fill_template( '/webapps/reports/dataset_info.mako',
                                 dataset=dataset,
                                 associated_hdas=associated_hdas,
                                 associated_lddas=associated_lddas,
                                 message=message )
 def index( self, trans, **kwd ):
     """
     GET /api/repository_revisions
     Displays a collection (list) of repository revisions.
     """
     # Example URL: http://localhost:9009/api/repository_revisions
     repository_metadata_dicts = []
     # Build up an anded clause list of filters.
     clause_list = []
     # Filter by downloadable if received.
     downloadable =  kwd.get( 'downloadable', None )
     if downloadable is not None:
         clause_list.append( trans.model.RepositoryMetadata.table.c.downloadable == util.asbool( downloadable ) )
     # Filter by malicious if received.
     malicious =  kwd.get( 'malicious', None )
     if malicious is not None:
         clause_list.append( trans.model.RepositoryMetadata.table.c.malicious == util.asbool( malicious ) )
     # Filter by tools_functionally_correct if received.
     tools_functionally_correct = kwd.get( 'tools_functionally_correct', None )
     if tools_functionally_correct is not None:
         clause_list.append( trans.model.RepositoryMetadata.table.c.tools_functionally_correct == util.asbool( tools_functionally_correct ) )
     # Filter by missing_test_components if received.
     missing_test_components = kwd.get( 'missing_test_components', None )
     if missing_test_components is not None:
         clause_list.append( trans.model.RepositoryMetadata.table.c.missing_test_components == util.asbool( missing_test_components ) )
     # Filter by do_not_test if received.
     do_not_test = kwd.get( 'do_not_test', None )
     if do_not_test is not None:
         clause_list.append( trans.model.RepositoryMetadata.table.c.do_not_test == util.asbool( do_not_test ) )
     # Filter by includes_tools if received.
     includes_tools = kwd.get( 'includes_tools', None )
     if includes_tools is not None:
         clause_list.append( trans.model.RepositoryMetadata.table.c.includes_tools == util.asbool( includes_tools ) )
     # Filter by test_install_error if received.
     test_install_error = kwd.get( 'test_install_error', None )
     if test_install_error is not None:
         clause_list.append( trans.model.RepositoryMetadata.table.c.test_install_error == util.asbool( test_install_error ) )
     # Filter by skip_tool_test if received.
     skip_tool_test = kwd.get( 'skip_tool_test', None )
     if skip_tool_test is not None:
         skip_tool_test = util.asbool( skip_tool_test )
         skipped_metadata_ids_subquery = select( [ trans.app.model.SkipToolTest.table.c.repository_metadata_id ] )
         if skip_tool_test:
             clause_list.append( trans.model.RepositoryMetadata.id.in_( skipped_metadata_ids_subquery ) )
         else:
             clause_list.append( not_( trans.model.RepositoryMetadata.id.in_( skipped_metadata_ids_subquery ) ) )
     for repository_metadata in trans.sa_session.query( trans.app.model.RepositoryMetadata ) \
                                                .filter( and_( *clause_list ) ) \
                                                .order_by( trans.app.model.RepositoryMetadata.table.c.repository_id.desc() ):
         repository_metadata_dict = repository_metadata.to_dict( view='collection',
                                                                 value_mapper=self.__get_value_mapper( trans ) )
         repository_metadata_dict[ 'url' ] = web.url_for( controller='repository_revisions',
                                                          action='show',
                                                          id=trans.security.encode_id( repository_metadata.id ) )
         repository_metadata_dicts.append( repository_metadata_dict )
     return repository_metadata_dicts
示例#47
0
 def disk_usage( self, trans, **kwd ):
     file_path = trans.app.config.file_path
     disk_usage = self.get_disk_usage( file_path )
     min_file_size = 2**32 # 4 Gb
     file_size_str = nice_size( min_file_size )
     datasets = trans.sa_session.query( model.Dataset ) \
                                .filter( and_( model.Dataset.table.c.purged == False,
                                               model.Dataset.table.c.file_size > min_file_size ) ) \
                                .order_by( desc( model.Dataset.table.c.file_size ) )
     return file_path, disk_usage, datasets, file_size_str
示例#48
0
 def get_tool_version_association(self, parent_tool_version, tool_version):
     """
     Return a ToolVersionAssociation if one exists that associates the two received
     tool_versions  This function is called only from Galaxy.
     """
     context = self.app.install_model.context
     return context.query( self.app.install_model.ToolVersionAssociation ) \
                   .filter( and_( self.app.install_model.ToolVersionAssociation.table.c.parent_id == parent_tool_version.id,
                                  self.app.install_model.ToolVersionAssociation.table.c.tool_id == tool_version.id ) ) \
                   .first()
示例#49
0
 def filter( self, trans, user, query, column_filter ):
     if column_filter == 'All':
         return query
     # We are either filtering on a date like YYYY-MM-DD or on a month like YYYY-MM,
     # so we need to figure out which type of date we have
     if column_filter.count( '-' ) == 2:
         # We are filtering on a date like YYYY-MM-DD
         year, month, day = map( int, column_filter.split( "-" ) )
         start_date = date( year, month, day )
         end_date = start_date + timedelta( days=1 )
         return query.filter( and_( self.model_class.table.c.create_time >= start_date,
                                    self.model_class.table.c.create_time < end_date ) )
     if column_filter.count( '-' ) == 1:
         # We are filtering on a month like YYYY-MM
         year, month = map( int, column_filter.split( "-" ) )
         start_date = date( year, month, 1 )
         end_date = start_date + timedelta( days=calendar.monthrange( year, month )[1] )
         return query.filter( and_( self.model_class.table.c.create_time >= start_date,
                                    self.model_class.table.c.create_time < end_date ) )
示例#50
0
 def disk_usage( self, trans, **kwd ):
     file_path = trans.app.config.file_path
     disk_usage = self.get_disk_usage( file_path )
     min_file_size = 2**32 # 4 Gb
     file_size_str = nice_size( min_file_size )
     datasets = trans.sa_session.query( model.Dataset ) \
                                .filter( and_( model.Dataset.table.c.purged == False,
                                               model.Dataset.table.c.file_size > min_file_size ) ) \
                                .order_by( desc( model.Dataset.table.c.file_size ) )
     return file_path, disk_usage, datasets, file_size_str
 def load_proprietary_datatypes( self ):
     for tool_shed_repository in self.context.query( self.install_model.ToolShedRepository ) \
                                                .filter( and_( self.install_model.ToolShedRepository.table.c.includes_datatypes==True,
                                                               self.install_model.ToolShedRepository.table.c.deleted==False ) ) \
                                                .order_by( self.install_model.ToolShedRepository.table.c.id ):
         relative_install_dir = self.get_repository_install_dir( tool_shed_repository )
         if relative_install_dir:
             installed_repository_dict = datatype_util.load_installed_datatypes( self.app, tool_shed_repository, relative_install_dir )
             if installed_repository_dict:
                 self.installed_repository_dicts.append( installed_repository_dict )
示例#52
0
 def load_proprietary_datatypes( self ):
     for tool_shed_repository in self.sa_session.query( self.model.ToolShedRepository ) \
                                                .filter( and_( self.model.ToolShedRepository.table.c.includes_datatypes==True,
                                                               self.model.ToolShedRepository.table.c.deleted==False ) ) \
                                                .order_by( self.model.ToolShedRepository.table.c.id ):
         relative_install_dir = self.get_repository_install_dir( tool_shed_repository )
         if relative_install_dir:
             installed_repository_dict = tool_shed.util.datatype_util.load_installed_datatypes( self.app, tool_shed_repository, relative_install_dir )
             if installed_repository_dict:
                 self.installed_repository_dicts.append( installed_repository_dict )
 def get_tool_version_association( self, parent_tool_version, tool_version ):
     """
     Return a ToolVersionAssociation if one exists that associates the two received
     tool_versions  This function is called only from Galaxy.
     """
     context = self.app.install_model.context
     return context.query( self.app.install_model.ToolVersionAssociation ) \
                   .filter( and_( self.app.install_model.ToolVersionAssociation.table.c.parent_id == parent_tool_version.id,
                                  self.app.install_model.ToolVersionAssociation.table.c.tool_id == tool_version.id ) ) \
                   .first()
def deprecate_repositories( app, cutoff_time, days=14, info_only=False, verbose=False ):
    # This method will get a list of repositories that were created on or before cutoff_time, but have never
    # had any metadata records associated with them. Then it will iterate through that list and deprecate the
    # repositories, sending an email to each repository owner.
    dataset_count = 0
    disk_space = 0
    start = time.time()
    repository_ids_to_not_check = []
    # Get a unique list of repository ids from the repository_metadata table. Any repository ID found in this table is not
    # empty, and will not be checked.
    metadata_records = sa.select( [ distinct( app.model.RepositoryMetadata.table.c.repository_id ) ],
                                  from_obj=app.model.RepositoryMetadata.table ) \
                         .execute()
    for metadata_record in metadata_records:
        repository_ids_to_not_check.append( metadata_record.repository_id )
    # Get the repositories that are A) not present in the above list, and b) older than the specified time.
    # This will yield a list of repositories that have been created more than n days ago, but never populated.
    repository_query = sa.select( [ app.model.Repository.table.c.id ],
                                  whereclause = and_( app.model.Repository.table.c.create_time < cutoff_time,
                                                      app.model.Repository.table.c.deprecated == False,
                                                      app.model.Repository.table.c.deleted == False,
                                                      not_( app.model.Repository.table.c.id.in_( repository_ids_to_not_check ) ) ),
                                  from_obj = [ app.model.Repository.table ] )
    query_result = repository_query.execute()
    repositories = []
    repositories_by_owner = {}
    repository_ids = [ row.id for row in query_result ]
    # Iterate through the list of repository ids for empty repositories and deprecate them unless info_only is set.
    for repository_id in repository_ids:
        repository = app.sa_session.query( app.model.Repository ) \
                               .filter( app.model.Repository.table.c.id == repository_id ) \
                               .one()
        owner = repository.user
        if info_only:
            print '# Repository %s owned by %s would have been deprecated, but info_only was set.' % ( repository.name, repository.user.username )
        else:
            if verbose:
                print '# Deprecating repository %s owned by %s.' % ( repository.name, owner.username )
            if owner.username not in repositories_by_owner:
                repositories_by_owner[ owner.username ] = dict( owner=owner, repositories=[] )
            repositories_by_owner[ owner.username ][ 'repositories' ].append( repository )
            repositories.append( repository )
    # Send an email to each repository owner, listing the repositories that were deprecated.
    for repository_owner in repositories_by_owner:
        for repository in repositories_by_owner[ repository_owner ][ 'repositories' ]:
            repository.deprecated = True
            app.sa_session.add( repository )
            app.sa_session.flush()
        owner = repositories_by_owner[ repository_owner ][ 'owner' ]
        send_mail_to_owner( app, repository.name, owner.username, owner.email, repositories_by_owner[ repository_owner ][ 'repositories' ], days )
    stop = time.time()
    print '# Deprecated %d repositories.' % len( repositories )
    print "# Elapsed time: ", stop - start
    print "####################################################################################"
示例#55
0
def get_review_by_repository_id_changeset_revision_user_id( app, repository_id, changeset_revision, user_id ):
    """
    Get a repository_review from the database via repository id, changeset_revision
    and user_id.
    """
    sa_session = app.model.context.current
    return sa_session.query( app.model.RepositoryReview ) \
                     .filter( and_( app.model.RepositoryReview.repository_id == app.security.decode_id( repository_id ),
                                    app.model.RepositoryReview.changeset_revision == changeset_revision,
                                    app.model.RepositoryReview.user_id == app.security.decode_id( user_id ) ) ) \
                     .first()
示例#56
0
 def get_private_user_role(self, user, auto_create=False):
     role = self.sa_session.query( self.model.Role ) \
                           .filter( and_( self.model.Role.table.c.name == user.email,
                                          self.model.Role.table.c.type == self.model.Role.types.PRIVATE ) ) \
                           .first()
     if not role:
         if auto_create:
             return self.create_private_user_role(user)
         else:
             return None
     return role