def import_workflows( self, trans, **kwd ):
        """
        POST /api/tool_shed_repositories/import_workflows

        Import all of the exported workflows contained in the specified installed tool shed repository into Galaxy.

        :param key: the API key of the Galaxy user with which the imported workflows will be associated.
        :param id: the encoded id of the ToolShedRepository object
        """
        api_key = kwd.get( 'key', None )
        if api_key is None:
            raise HTTPBadRequest( detail="Missing required parameter 'key' whose value is the API key for the Galaxy user importing the specified workflow." )
        tool_shed_repository_id = kwd.get( 'id', '' )
        if not tool_shed_repository_id:
            raise HTTPBadRequest( detail="Missing required parameter 'id'." )
        repository = suc.get_tool_shed_repository_by_id( trans.app, tool_shed_repository_id )
        exported_workflows = json.loads( self.exported_workflows( trans, tool_shed_repository_id ) )
        imported_workflow_dicts = []
        for exported_workflow_dict in exported_workflows:
            workflow_name = exported_workflow_dict[ 'workflow_name' ]
            workflow, status, error_message = workflow_util.import_workflow( trans, repository, workflow_name )
            if status == 'error':
                log.debug( error_message )
            else:
                imported_workflow_dicts.append( workflow.to_dict( view='element' ) )
        return imported_workflow_dicts
    def exported_workflows(self, trans, id, **kwd):
        """
        GET /api/tool_shed_repositories/{encoded_tool_shed_repository_id}/exported_workflows

        Display a list of dictionaries containing information about this tool shed repository's exported workflows.

        :param id: the encoded id of the ToolShedRepository object
        """
        # Example URL: http://localhost:8763/api/tool_shed_repositories/f2db41e1fa331b3e/exported_workflows
        # Since exported workflows are dictionaries with very few attributes that differentiate them from each
        # other, we'll build the list based on the following dictionary of those few attributes.
        exported_workflows = []
        repository = suc.get_tool_shed_repository_by_id(trans.app, id)
        metadata = repository.metadata
        if metadata:
            exported_workflow_tups = metadata.get('workflows', [])
        else:
            exported_workflow_tups = []
        for index, exported_workflow_tup in enumerate(exported_workflow_tups):
            # The exported_workflow_tup looks like ( relative_path, exported_workflow_dict ), where the value of
            # relative_path is the location on disk (relative to the root of the installed repository) where the
            # exported_workflow_dict file (.ga file) is located.
            exported_workflow_dict = exported_workflow_tup[1]
            annotation = exported_workflow_dict.get('annotation', '')
            format_version = exported_workflow_dict.get('format-version', '')
            workflow_name = exported_workflow_dict.get('name', '')
            # Since we don't have an in-memory object with an id, we'll identify the exported workflow via its
            # location (i.e., index) in the list.
            display_dict = dict(index=index,
                                annotation=annotation,
                                format_version=format_version,
                                workflow_name=workflow_name)
            exported_workflows.append(display_dict)
        return exported_workflows
    def import_workflow( self, trans, payload, **kwd ):
        """
        POST /api/tool_shed_repositories/import_workflow

        Import the specified exported workflow contained in the specified installed tool shed repository into Galaxy.

        :param key: the API key of the Galaxy user with which the imported workflow will be associated.
        :param id: the encoded id of the ToolShedRepository object

        The following parameters are included in the payload.
        :param index: the index location of the workflow tuple in the list of exported workflows stored in the metadata for the specified repository
        """
        api_key = kwd.get( 'key', None )
        if api_key is None:
            raise HTTPBadRequest( detail="Missing required parameter 'key' whose value is the API key for the Galaxy user importing the specified workflow." )
        tool_shed_repository_id = kwd.get( 'id', '' )
        if not tool_shed_repository_id:
            raise HTTPBadRequest( detail="Missing required parameter 'id'." )
        index = payload.get( 'index', None )
        if index is None:
            raise HTTPBadRequest( detail="Missing required parameter 'index'." )
        repository = suc.get_tool_shed_repository_by_id( trans.app, tool_shed_repository_id )
        exported_workflows = json.loads( self.exported_workflows( trans, tool_shed_repository_id ) )
        # Since we don't have an in-memory object with an id, we'll identify the exported workflow via its location (i.e., index) in the list.
        exported_workflow = exported_workflows[ int( index ) ]
        workflow_name = exported_workflow[ 'workflow_name' ]
        workflow, status, error_message = workflow_util.import_workflow( trans, repository, workflow_name )
        if status == 'error':
            log.debug( error_message )
            return {}
        return workflow.to_dict( view='element' )
    def import_workflows(self, trans, **kwd):
        """
        POST /api/tool_shed_repositories/import_workflows

        Import all of the exported workflows contained in the specified installed tool shed repository into Galaxy.

        :param key: the API key of the Galaxy user with which the imported workflows will be associated.
        :param id: the encoded id of the ToolShedRepository object
        """
        api_key = kwd.get('key', None)
        if api_key is None:
            raise HTTPBadRequest(
                detail=
                "Missing required parameter 'key' whose value is the API key for the Galaxy user importing the specified workflow."
            )
        tool_shed_repository_id = kwd.get('id', '')
        if not tool_shed_repository_id:
            raise HTTPBadRequest(detail="Missing required parameter 'id'.")
        repository = suc.get_tool_shed_repository_by_id(
            trans.app, tool_shed_repository_id)
        exported_workflows = json.from_json_string(
            self.exported_workflows(trans, tool_shed_repository_id))
        imported_workflow_dicts = []
        for exported_workflow_dict in exported_workflows:
            workflow_name = exported_workflow_dict['workflow_name']
            workflow, status, error_message = workflow_util.import_workflow(
                trans, repository, workflow_name)
            if status == 'error':
                log.debug(error_message)
            else:
                imported_workflow_dicts.append(
                    workflow.to_dict(view='element'))
        return imported_workflow_dicts
def check_for_updates( app, model, repository_id=None ):
    message = ''
    status = 'ok'
    if repository_id is None:
        success_count = 0
        repository_names_not_updated = []
        updated_count = 0
        for repository in model.context.query( model.ToolShedRepository ) \
                                       .filter( model.ToolShedRepository.table.c.deleted == false() ):
            ok, updated = \
                check_or_update_tool_shed_status_for_installed_repository( app, repository )
            if ok:
                success_count += 1
            else:
                repository_names_not_updated.append( '<b>%s</b>' % escape( str( repository.name ) ) )
            if updated:
                updated_count += 1
        message = "Checked the status in the tool shed for %d repositories.  " % success_count
        message += "Updated the tool shed status for %d repositories.  " % updated_count
        if repository_names_not_updated:
            message += "Unable to retrieve status from the tool shed for the following repositories:\n"
            message += ", ".join( repository_names_not_updated )
    else:
        repository = suc.get_tool_shed_repository_by_id( app, repository_id )
        ok, updated = \
            check_or_update_tool_shed_status_for_installed_repository( app, repository )
        if ok:
            if updated:
                message = "The tool shed status for repository <b>%s</b> has been updated." % escape( str( repository.name ) )
            else:
                message = "The status has not changed in the tool shed for repository <b>%s</b>." % escape( str( repository.name ) )
        else:
            message = "Unable to retrieve status from the tool shed for repository <b>%s</b>." % escape( str( repository.name ) )
            status = 'error'
    return message, status
    def import_workflow( self, trans, payload, **kwd ):
        """
        POST /api/tool_shed_repositories/import_workflow

        Import the specified exported workflow contained in the specified installed tool shed repository into Galaxy.

        :param key: the API key of the Galaxy user with which the imported workflow will be associated.
        :param id: the encoded id of the ToolShedRepository object

        The following parameters are included in the payload.
        :param index: the index location of the workflow tuple in the list of exported workflows stored in the metadata for the specified repository
        """
        api_key = kwd.get( 'key', None )
        if api_key is None:
            raise HTTPBadRequest( detail="Missing required parameter 'key' whose value is the API key for the Galaxy user importing the specified workflow." )
        tool_shed_repository_id = kwd.get( 'id', '' )
        if not tool_shed_repository_id:
            raise HTTPBadRequest( detail="Missing required parameter 'id'." )
        index = payload.get( 'index', None )
        if index is None:
            raise HTTPBadRequest( detail="Missing required parameter 'index'." )
        repository = suc.get_tool_shed_repository_by_id( trans.app, tool_shed_repository_id )
        exported_workflows = json.loads( self.exported_workflows( trans, tool_shed_repository_id ) )
        # Since we don't have an in-memory object with an id, we'll identify the exported workflow via its location (i.e., index) in the list.
        exported_workflow = exported_workflows[ int( index ) ]
        workflow_name = exported_workflow[ 'workflow_name' ]
        workflow, status, error_message = workflow_util.import_workflow( trans, repository, workflow_name )
        if status == 'error':
            log.debug( error_message )
            return {}
        return workflow.to_dict( view='element' )
    def exported_workflows( self, trans, id, **kwd ):
        """
        GET /api/tool_shed_repositories/{encoded_tool_shed_repository_id}/exported_workflows

        Display a list of dictionaries containing information about this tool shed repository's exported workflows.

        :param id: the encoded id of the ToolShedRepository object
        """
        # Example URL: http://localhost:8763/api/tool_shed_repositories/f2db41e1fa331b3e/exported_workflows
        # Since exported workflows are dictionaries with very few attributes that differentiate them from each
        # other, we'll build the list based on the following dictionary of those few attributes.
        exported_workflows = []
        repository = suc.get_tool_shed_repository_by_id( trans.app, id )
        metadata = repository.metadata
        if metadata:
            exported_workflow_tups = metadata.get( 'workflows', [] )
        else:
            exported_workflow_tups = []
        for index, exported_workflow_tup in enumerate( exported_workflow_tups ):
            # The exported_workflow_tup looks like ( relative_path, exported_workflow_dict ), where the value of
            # relative_path is the location on disk (relative to the root of the installed repository) where the
            # exported_workflow_dict file (.ga file) is located.
            exported_workflow_dict = exported_workflow_tup[ 1 ]
            annotation = exported_workflow_dict.get( 'annotation', '' )
            format_version = exported_workflow_dict.get( 'format-version', '' )
            workflow_name = exported_workflow_dict.get( 'name', '' )
            # Since we don't have an in-memory object with an id, we'll identify the exported workflow via its
            # location (i.e., index) in the list.
            display_dict = dict( index=index, annotation=annotation, format_version=format_version, workflow_name=workflow_name )
            exported_workflows.append( display_dict )
        return exported_workflows
Beispiel #8
0
def generate_workflow_image( trans, workflow_name, repository_metadata_id=None, repository_id=None ):
    """
    Return an svg image representation of a workflow dictionary created when the workflow was exported.  This method is called
    from both Galaxy and the tool shed.  When called from the tool shed, repository_metadata_id will have a value and repository_id
    will be None.  When called from Galaxy, repository_metadata_id will be None and repository_id will have a value.
    """
    workflow_name = encoding_util.tool_shed_decode( workflow_name )
    if trans.webapp.name == 'tool_shed':
        # We're in the tool shed.
        repository_metadata = metadata_util.get_repository_metadata_by_id( trans.app, repository_metadata_id )
        repository_id = trans.security.encode_id( repository_metadata.repository_id )
        changeset_revision = repository_metadata.changeset_revision
        metadata = repository_metadata.metadata
    else:
        # We're in Galaxy.
        repository = suc.get_tool_shed_repository_by_id( trans.app, repository_id )
        changeset_revision = repository.changeset_revision
        metadata = repository.metadata
    # metadata[ 'workflows' ] is a list of tuples where each contained tuple is
    # [ <relative path to the .ga file in the repository>, <exported workflow dict> ]
    for workflow_tup in metadata[ 'workflows' ]:
        workflow_dict = workflow_tup[1]
        if workflow_dict[ 'name' ] == workflow_name:
            break
    if 'tools' in metadata:
        tools_metadata = metadata[ 'tools' ]
    else:
        tools_metadata = []
    workflow, missing_tool_tups = get_workflow_from_dict( trans=trans,
                                                          workflow_dict=workflow_dict,
                                                          tools_metadata=tools_metadata,
                                                          repository_id=repository_id,
                                                          changeset_revision=changeset_revision )
    workflow_canvas = WorkflowCanvas()
    canvas = workflow_canvas.canvas
    # Store px width for boxes of each step.
    for step in workflow.steps:
        step.upgrade_messages = {}
        module = module_factory.from_workflow_step( trans, repository_id, changeset_revision, tools_metadata, step )
        tool_errors = module.type == 'tool' and not module.tool
        module_data_inputs = get_workflow_data_inputs( step, module )
        module_data_outputs = get_workflow_data_outputs( step, module, workflow.steps )
        module_name = get_workflow_module_name( module, missing_tool_tups )
        workflow_canvas.populate_data_for_step(
            step,
            module_name,
            module_data_inputs,
            module_data_outputs,
            tool_errors=tool_errors
        )
    workflow_canvas.add_steps( highlight_errors=True )
    workflow_canvas.finish( )
    trans.response.set_content_type( "image/svg+xml" )
    return canvas.tostring()
def generate_workflow_image( trans, workflow_name, repository_metadata_id=None, repository_id=None ):
    """
    Return an svg image representation of a workflow dictionary created when the workflow was exported.  This method is called
    from both Galaxy and the tool shed.  When called from the tool shed, repository_metadata_id will have a value and repository_id
    will be None.  When called from Galaxy, repository_metadata_id will be None and repository_id will have a value.
    """
    workflow_name = encoding_util.tool_shed_decode( workflow_name )
    if trans.webapp.name == 'tool_shed':
        # We're in the tool shed.
        repository_metadata = metadata_util.get_repository_metadata_by_id( trans.app, repository_metadata_id )
        repository_id = trans.security.encode_id( repository_metadata.repository_id )
        changeset_revision = repository_metadata.changeset_revision
        metadata = repository_metadata.metadata
    else:
        # We're in Galaxy.
        repository = suc.get_tool_shed_repository_by_id( trans.app, repository_id )
        changeset_revision = repository.changeset_revision
        metadata = repository.metadata
    # metadata[ 'workflows' ] is a list of tuples where each contained tuple is
    # [ <relative path to the .ga file in the repository>, <exported workflow dict> ]
    for workflow_tup in metadata[ 'workflows' ]:
        workflow_dict = workflow_tup[1]
        if workflow_dict[ 'name' ] == workflow_name:
            break
    if 'tools' in metadata:
        tools_metadata = metadata[ 'tools' ]
    else:
        tools_metadata = []
    workflow, missing_tool_tups = get_workflow_from_dict( trans=trans,
                                                          workflow_dict=workflow_dict,
                                                          tools_metadata=tools_metadata,
                                                          repository_id=repository_id,
                                                          changeset_revision=changeset_revision )
    workflow_canvas = WorkflowCanvas()
    canvas = workflow_canvas.canvas
    # Store px width for boxes of each step.
    for step in workflow.steps:
        step.upgrade_messages = {}
        module = module_factory.from_workflow_step( trans, repository_id, changeset_revision, tools_metadata, step )
        tool_errors = module.type == 'tool' and not module.tool
        module_data_inputs = get_workflow_data_inputs( step, module )
        module_data_outputs = get_workflow_data_outputs( step, module, workflow.steps )
        module_name = get_workflow_module_name( module, missing_tool_tups )
        workflow_canvas.populate_data_for_step(
            step,
            module_name,
            module_data_inputs,
            module_data_outputs,
            tool_errors=tool_errors
        )
    workflow_canvas.add_steps( highlight_errors=True )
    workflow_canvas.finish( )
    trans.response.set_content_type( "image/svg+xml" )
    return canvas.standalone_xml()
Beispiel #10
0
 def display_tool_help_image_in_repository( self, trans, **kwd ):
     repository_id = kwd.get( 'repository_id', None )
     image_file = kwd.get( 'image_file', None )
     if repository_id and image_file:
         repository = suc.get_tool_shed_repository_by_id( trans, repository_id )
         repo_files_dir = os.path.join( repository.repo_files_directory( trans.app ) )
         default_path = os.path.abspath( os.path.join( repo_files_dir, 'static', 'images', image_file ) )
         if os.path.exists( default_path ):
             return open( default_path, 'r' )
         else:
             path_to_file = suc.get_absolute_path_to_file_in_repository( repo_files_dir, image_file )
             if os.path.exists( path_to_file ):
                 return open( path_to_file, 'r' )
     return None
    def show( self, trans, id, **kwd ):
        """
        GET /api/tool_shed_repositories/{encoded_tool_shed_repsository_id}
        Display a dictionary containing information about a specified tool_shed_repository.

        :param id: the encoded id of the ToolShedRepository object
        """
        # Example URL: http://localhost:8763/api/tool_shed_repositories/df7a1f0c02a5b08e
        tool_shed_repository = suc.get_tool_shed_repository_by_id( trans.app, id )
        if tool_shed_repository is None:
            log.debug( "Unable to locate tool_shed_repository record for id %s." % ( str( id ) ) )
            return {}
        tool_shed_repository_dict = tool_shed_repository.as_dict( value_mapper=self.__get_value_mapper( trans, tool_shed_repository ) )
        tool_shed_repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories',
                                                          action='show',
                                                          id=trans.security.encode_id( tool_shed_repository.id ) )
        return tool_shed_repository_dict
    def show( self, trans, id, **kwd ):
        """
        GET /api/tool_shed_repositories/{encoded_tool_shed_repsository_id}
        Display a dictionary containing information about a specified tool_shed_repository.

        :param id: the encoded id of the ToolShedRepository object
        """
        # Example URL: http://localhost:8763/api/tool_shed_repositories/df7a1f0c02a5b08e
        tool_shed_repository = suc.get_tool_shed_repository_by_id( trans.app, id )
        if tool_shed_repository is None:
            log.debug( "Unable to locate tool_shed_repository record for id %s." % ( str( id ) ) )
            return {}
        tool_shed_repository_dict = tool_shed_repository.as_dict( value_mapper=self.__get_value_mapper( trans, tool_shed_repository ) )
        tool_shed_repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories',
                                                          action='show',
                                                          id=trans.security.encode_id( tool_shed_repository.id ) )
        return tool_shed_repository_dict
Beispiel #13
0
 def get_components_from_repository_dependency_for_installed_repository(
         self, repository_dependency):
     """
     Parse a repository dependency and return components necessary for proper display
     in Galaxy on the Manage repository page.
     """
     # Default prior_installation_required and only_if_compiling_contained_td to False.
     prior_installation_required = 'False'
     only_if_compiling_contained_td = 'False'
     if len(repository_dependency) == 6:
         # Metadata should have been reset on this installed repository, but it wasn't.
         tool_shed_repository_id = repository_dependency[4]
         installation_status = repository_dependency[5]
         tool_shed, name, owner, changeset_revision = repository_dependency[
             0:4]
         repository_dependency = [
             tool_shed, name, owner, changeset_revision,
             prior_installation_required, only_if_compiling_contained_td
         ]
     elif len(repository_dependency) == 7:
         # We have a repository dependency tuple that includes a prior_installation_required value but not a only_if_compiling_contained_td value.
         tool_shed_repository_id = repository_dependency[5]
         installation_status = repository_dependency[6]
         tool_shed, name, owner, changeset_revision, prior_installation_required = repository_dependency[
             0:5]
         repository_dependency = \
             [ tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td ]
     elif len(repository_dependency) == 8:
         # We have a repository dependency tuple that includes both a prior_installation_required value
         # and a only_if_compiling_contained_td value.
         tool_shed_repository_id = repository_dependency[6]
         installation_status = repository_dependency[7]
         repository_dependency = repository_dependency[0:6]
     else:
         tool_shed_repository_id = None
         installation_status = 'unknown'
     if tool_shed_repository_id:
         tool_shed_repository = suc.get_tool_shed_repository_by_id(
             self.app, self.app.security.encode_id(tool_shed_repository_id))
         if tool_shed_repository:
             if tool_shed_repository.missing_repository_dependencies:
                 installation_status = '%s, missing repository dependencies' % installation_status
             elif tool_shed_repository.missing_tool_dependencies:
                 installation_status = '%s, missing tool dependencies' % installation_status
     return tool_shed_repository_id, installation_status, repository_dependency
Beispiel #14
0
 def display_tool_help_image_in_repository(self, trans, **kwd):
     repository_id = kwd.get('repository_id', None)
     image_file = kwd.get('image_file', None)
     if repository_id and image_file:
         repository = suc.get_tool_shed_repository_by_id(
             trans, repository_id)
         repo_files_dir = os.path.join(
             repository.repo_files_directory(trans.app))
         default_path = os.path.abspath(
             os.path.join(repo_files_dir, 'static', 'images', image_file))
         if os.path.exists(default_path):
             return open(default_path, 'r')
         else:
             path_to_file = suc.get_absolute_path_to_file_in_repository(
                 repo_files_dir, image_file)
             if os.path.exists(path_to_file):
                 return open(path_to_file, 'r')
     return None
    def status( self, trans, id, **kwd ):
        """
        GET /api/tool_shed_repositories/{id}/status
        Display a dictionary containing information about a specified repository's installation
        status and a list of its dependencies and the status of each.

        :param id: the repository's encoded id
        """
        tool_shed_repository = suc.get_tool_shed_repository_by_id( self.app, id )
        if tool_shed_repository is None:
            log.debug( "Unable to locate tool_shed_repository record for id %s." % ( str( id ) ) )
            return {}
        tool_shed_repository_dict = tool_shed_repository.as_dict( value_mapper=self.__get_value_mapper( trans, tool_shed_repository ) )
        tool_shed_repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories',
                                                          action='show',
                                                          id=trans.security.encode_id( tool_shed_repository.id ) )
        tool_shed_repository_dict[ 'repository_dependencies' ] = self.__flatten_repository_dependency_list( trans, tool_shed_repository )
        return tool_shed_repository_dict
Beispiel #16
0
    def show( self, trans, id, **kwd ):
        """
        GET /api/tool_shed_repositories/{encoded_tool_shed_repsository_id}
        Display a dictionary containing information about a specified tool_shed_repository.

        :param id: the encoded id of the ToolShedRepository object
        """
        # Example URL: http://localhost:8763/api/tool_shed_repositories/df7a1f0c02a5b08e
        try:
            tool_shed_repository = suc.get_tool_shed_repository_by_id( trans, id )
            tool_shed_repository_dict = tool_shed_repository.as_dict( value_mapper=default_tool_shed_repository_value_mapper( trans, tool_shed_repository ) )
            tool_shed_repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories',
                                                              action='show',
                                                              id=trans.security.encode_id( tool_shed_repository.id ) )
            return tool_shed_repository_dict
        except Exception, e:
            message = "Error in tool_shed_repositories API in index: " + str( e )
            log.error( message, exc_info=True )
            trans.response.status = 500
            return message
 def get_components_from_repository_dependency_for_installed_repository( self, repository_dependency ):
     """
     Parse a repository dependency and return components necessary for proper display
     in Galaxy on the Manage repository page.
     """
     # Default prior_installation_required and only_if_compiling_contained_td to False.
     prior_installation_required = 'False'
     only_if_compiling_contained_td = 'False'
     if len( repository_dependency ) == 6:
         # Metadata should have been reset on this installed repository, but it wasn't.
         tool_shed_repository_id = repository_dependency[ 4 ]
         installation_status = repository_dependency[ 5 ]
         tool_shed, name, owner, changeset_revision = repository_dependency[ 0:4 ]
         repository_dependency = [ tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td ]
     elif len( repository_dependency ) == 7:
         # We have a repository dependency tuple that includes a prior_installation_required value but not a only_if_compiling_contained_td value.
         tool_shed_repository_id = repository_dependency[ 5 ]
         installation_status = repository_dependency[ 6 ]
         tool_shed, name, owner, changeset_revision, prior_installation_required = repository_dependency[ 0:5 ]
         repository_dependency = \
             [ tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td ]
     elif len( repository_dependency ) == 8:
         # We have a repository dependency tuple that includes both a prior_installation_required value
         # and a only_if_compiling_contained_td value.
         tool_shed_repository_id = repository_dependency[ 6 ]
         installation_status = repository_dependency[ 7 ]
         repository_dependency = repository_dependency[ 0:6 ]
     else:
         tool_shed_repository_id = None
         installation_status = 'unknown'
     if tool_shed_repository_id:
         tool_shed_repository = suc.get_tool_shed_repository_by_id( self.app,
                                                                    self.app.security.encode_id( tool_shed_repository_id ) )
         if tool_shed_repository:
             if tool_shed_repository.missing_repository_dependencies:
                 installation_status = '%s, missing repository dependencies' % installation_status
             elif tool_shed_repository.missing_tool_dependencies:
                 installation_status = '%s, missing tool dependencies' % installation_status
     return tool_shed_repository_id, installation_status, repository_dependency
def check_for_updates(app, model, repository_id=None):
    message = ''
    status = 'ok'
    if repository_id is None:
        success_count = 0
        repository_names_not_updated = []
        updated_count = 0
        for repository in model.context.query( model.ToolShedRepository ) \
                                       .filter( model.ToolShedRepository.table.c.deleted == false() ):
            ok, updated = \
                check_or_update_tool_shed_status_for_installed_repository( app, repository )
            if ok:
                success_count += 1
            else:
                repository_names_not_updated.append(
                    '<b>%s</b>' % escape(str(repository.name)))
            if updated:
                updated_count += 1
        message = "Checked the status in the tool shed for %d repositories.  " % success_count
        message += "Updated the tool shed status for %d repositories.  " % updated_count
        if repository_names_not_updated:
            message += "Unable to retrieve status from the tool shed for the following repositories:\n"
            message += ", ".join(repository_names_not_updated)
    else:
        repository = suc.get_tool_shed_repository_by_id(app, repository_id)
        ok, updated = \
            check_or_update_tool_shed_status_for_installed_repository( app, repository )
        if ok:
            if updated:
                message = "The tool shed status for repository <b>%s</b> has been updated." % escape(
                    str(repository.name))
            else:
                message = "The status has not changed in the tool shed for repository <b>%s</b>." % escape(
                    str(repository.name))
        else:
            message = "Unable to retrieve status from the tool shed for repository <b>%s</b>." % escape(
                str(repository.name))
            status = 'error'
    return message, status
    def status(self, trans, id, **kwd):
        """
        GET /api/tool_shed_repositories/{id}/status
        Display a dictionary containing information about a specified repository's installation
        status and a list of its dependencies and the status of each.

        :param id: the repository's encoded id
        """
        tool_shed_repository = suc.get_tool_shed_repository_by_id(self.app, id)
        if tool_shed_repository is None:
            log.debug(
                "Unable to locate tool_shed_repository record for id %s." %
                (str(id)))
            return {}
        tool_shed_repository_dict = tool_shed_repository.as_dict(
            value_mapper=self.__get_value_mapper(trans, tool_shed_repository))
        tool_shed_repository_dict['url'] = web.url_for(
            controller='tool_shed_repositories',
            action='show',
            id=trans.security.encode_id(tool_shed_repository.id))
        tool_shed_repository_dict[
            'repository_dependencies'] = self.__flatten_repository_dependency_list(
                trans, tool_shed_repository)
        return tool_shed_repository_dict
Beispiel #20
0
def generate_workflow_image(trans,
                            workflow_name,
                            repository_metadata_id=None,
                            repository_id=None):
    """
    Return an svg image representation of a workflow dictionary created when the workflow was exported.  This method is called
    from both Galaxy and the tool shed.  When called from the tool shed, repository_metadata_id will have a value and repository_id
    will be None.  When called from Galaxy, repository_metadata_id will be None and repository_id will have a value.
    """
    workflow_name = encoding_util.tool_shed_decode(workflow_name)
    if trans.webapp.name == 'tool_shed':
        # We're in the tool shed.
        repository_metadata = metadata_util.get_repository_metadata_by_id(
            trans, repository_metadata_id)
        repository_id = trans.security.encode_id(
            repository_metadata.repository_id)
        changeset_revision = repository_metadata.changeset_revision
        metadata = repository_metadata.metadata
    else:
        # We're in Galaxy.
        repository = suc.get_tool_shed_repository_by_id(trans, repository_id)
        changeset_revision = repository.changeset_revision
        metadata = repository.metadata
    # metadata[ 'workflows' ] is a list of tuples where each contained tuple is
    # [ <relative path to the .ga file in the repository>, <exported workflow dict> ]
    for workflow_tup in metadata['workflows']:
        workflow_dict = workflow_tup[1]
        if workflow_dict['name'] == workflow_name:
            break
    if 'tools' in metadata:
        tools_metadata = metadata['tools']
    else:
        tools_metadata = []
    workflow, missing_tool_tups = get_workflow_from_dict(
        trans=trans,
        workflow_dict=workflow_dict,
        tools_metadata=tools_metadata,
        repository_id=repository_id,
        changeset_revision=changeset_revision)
    data = []
    canvas = svgfig.canvas(
        style=
        "stroke:black; fill:none; stroke-width:1px; stroke-linejoin:round; text-anchor:left"
    )
    text = svgfig.SVG("g")
    connectors = svgfig.SVG("g")
    boxes = svgfig.SVG("g")
    svgfig.Text.defaults["font-size"] = "10px"
    in_pos = {}
    out_pos = {}
    margin = 5
    # Spacing between input/outputs.
    line_px = 16
    # Store px width for boxes of each step.
    widths = {}
    max_width, max_x, max_y = 0, 0, 0
    for step in workflow.steps:
        step.upgrade_messages = {}
        module = module_factory.from_workflow_step(trans, repository_id,
                                                   changeset_revision,
                                                   tools_metadata, step)
        tool_errors = module.type == 'tool' and not module.tool
        module_data_inputs = get_workflow_data_inputs(step, module)
        module_data_outputs = get_workflow_data_outputs(
            step, module, workflow.steps)
        step_dict = {
            'id': step.order_index,
            'data_inputs': module_data_inputs,
            'data_outputs': module_data_outputs,
            'position': step.position,
            'tool_errors': tool_errors
        }
        input_conn_dict = {}
        for conn in step.input_connections:
            input_conn_dict[conn.input_name] = dict(
                id=conn.output_step.order_index, output_name=conn.output_name)
        step_dict['input_connections'] = input_conn_dict
        data.append(step_dict)
        x, y = step.position['left'], step.position['top']
        count = 0
        module_name = get_workflow_module_name(module, missing_tool_tups)
        max_len = len(module_name) * 1.5
        text.append(
            svgfig.Text(x, y + 20, module_name, **{
                "font-size": "14px"
            }).SVG())
        y += 45
        for di in module_data_inputs:
            cur_y = y + count * line_px
            if step.order_index not in in_pos:
                in_pos[step.order_index] = {}
            in_pos[step.order_index][di['name']] = (x, cur_y)
            text.append(svgfig.Text(x, cur_y, di['label']).SVG())
            count += 1
            max_len = max(max_len, len(di['label']))
        if len(module.get_data_inputs()) > 0:
            y += 15
        for do in module_data_outputs:
            cur_y = y + count * line_px
            if step.order_index not in out_pos:
                out_pos[step.order_index] = {}
            out_pos[step.order_index][do['name']] = (x, cur_y)
            text.append(svgfig.Text(x, cur_y, do['name']).SVG())
            count += 1
            max_len = max(max_len, len(do['name']))
        widths[step.order_index] = max_len * 5.5
        max_x = max(max_x, step.position['left'])
        max_y = max(max_y, step.position['top'])
        max_width = max(max_width, widths[step.order_index])
    for step_dict in data:
        tool_unavailable = step_dict['tool_errors']
        width = widths[step_dict['id']]
        x, y = step_dict['position']['left'], step_dict['position']['top']
        # Only highlight missing tools if displaying in the tool shed.
        if trans.webapp.name == 'tool_shed' and tool_unavailable:
            fill = "#EBBCB2"
        else:
            fill = "#EBD9B2"
        boxes.append(
            svgfig.Rect(x - margin, y, x + width - margin, y + 30,
                        fill=fill).SVG())
        box_height = (len(step_dict['data_inputs']) +
                      len(step_dict['data_outputs'])) * line_px + margin
        # Draw separator line.
        if len(step_dict['data_inputs']) > 0:
            box_height += 15
            sep_y = y + len(step_dict['data_inputs']) * line_px + 40
            text.append(
                svgfig.Line(x - margin, sep_y, x + width - margin,
                            sep_y).SVG())
        # Define an input/output box.
        boxes.append(
            svgfig.Rect(x - margin,
                        y + 30,
                        x + width - margin,
                        y + 30 + box_height,
                        fill="#ffffff").SVG())
        for conn, output_dict in step_dict['input_connections'].iteritems():
            in_coords = in_pos[step_dict['id']][conn]
            # out_pos_index will be a step number like 1, 2, 3...
            out_pos_index = output_dict['id']
            # out_pos_name will be a string like 'o', 'o2', etc.
            out_pos_name = output_dict['output_name']
            if out_pos_index in out_pos:
                # out_conn_index_dict will be something like:
                # 7: {'o': (824.5, 618)}
                out_conn_index_dict = out_pos[out_pos_index]
                if out_pos_name in out_conn_index_dict:
                    out_conn_pos = out_pos[out_pos_index][out_pos_name]
                else:
                    # Take any key / value pair available in out_conn_index_dict.
                    # A problem will result if the dictionary is empty.
                    if out_conn_index_dict.keys():
                        key = out_conn_index_dict.keys()[0]
                        out_conn_pos = out_pos[out_pos_index][key]
            adjusted = (out_conn_pos[0] + widths[output_dict['id']],
                        out_conn_pos[1])
            text.append(
                svgfig.SVG("circle",
                           cx=out_conn_pos[0] + widths[output_dict['id']] -
                           margin,
                           cy=out_conn_pos[1] - margin,
                           r=5,
                           fill="#ffffff"))
            connectors.append(
                svgfig.Line(adjusted[0],
                            adjusted[1] - margin,
                            in_coords[0] - 10,
                            in_coords[1],
                            arrow_end="true").SVG())
    canvas.append(connectors)
    canvas.append(boxes)
    canvas.append(text)
    width, height = (max_x + max_width + 50), max_y + 300
    canvas['width'] = "%s px" % width
    canvas['height'] = "%s px" % height
    canvas['viewBox'] = "0 0 %s %s" % (width, height)
    trans.response.set_content_type("image/svg+xml")
    return canvas.standalone_xml()
Beispiel #21
0
def generate_workflow_image(trans, workflow_name, repository_metadata_id=None, repository_id=None):
    """
    Return an svg image representation of a workflow dictionary created when the workflow was exported.  This method is called
    from both Galaxy and the tool shed.  When called from the tool shed, repository_metadata_id will have a value and repository_id
    will be None.  When called from Galaxy, repository_metadata_id will be None and repository_id will have a value.
    """
    workflow_name = encoding_util.tool_shed_decode(workflow_name)
    if trans.webapp.name == "tool_shed":
        # We're in the tool shed.
        repository_metadata = metadata_util.get_repository_metadata_by_id(trans, repository_metadata_id)
        repository_id = trans.security.encode_id(repository_metadata.repository_id)
        changeset_revision = repository_metadata.changeset_revision
        metadata = repository_metadata.metadata
    else:
        # We're in Galaxy.
        repository = suc.get_tool_shed_repository_by_id(trans, repository_id)
        changeset_revision = repository.changeset_revision
        metadata = repository.metadata
    # metadata[ 'workflows' ] is a list of tuples where each contained tuple is
    # [ <relative path to the .ga file in the repository>, <exported workflow dict> ]
    for workflow_tup in metadata["workflows"]:
        workflow_dict = workflow_tup[1]
        if workflow_dict["name"] == workflow_name:
            break
    if "tools" in metadata:
        tools_metadata = metadata["tools"]
    else:
        tools_metadata = []
    workflow, missing_tool_tups = get_workflow_from_dict(
        trans=trans,
        workflow_dict=workflow_dict,
        tools_metadata=tools_metadata,
        repository_id=repository_id,
        changeset_revision=changeset_revision,
    )
    data = []
    canvas = svgfig.canvas(style="stroke:black; fill:none; stroke-width:1px; stroke-linejoin:round; text-anchor:left")
    text = svgfig.SVG("g")
    connectors = svgfig.SVG("g")
    boxes = svgfig.SVG("g")
    svgfig.Text.defaults["font-size"] = "10px"
    in_pos = {}
    out_pos = {}
    margin = 5
    # Spacing between input/outputs.
    line_px = 16
    # Store px width for boxes of each step.
    widths = {}
    max_width, max_x, max_y = 0, 0, 0
    for step in workflow.steps:
        step.upgrade_messages = {}
        module = module_factory.from_workflow_step(trans, repository_id, changeset_revision, tools_metadata, step)
        tool_errors = module.type == "tool" and not module.tool
        module_data_inputs = get_workflow_data_inputs(step, module)
        module_data_outputs = get_workflow_data_outputs(step, module, workflow.steps)
        step_dict = {
            "id": step.order_index,
            "data_inputs": module_data_inputs,
            "data_outputs": module_data_outputs,
            "position": step.position,
            "tool_errors": tool_errors,
        }
        input_conn_dict = {}
        for conn in step.input_connections:
            input_conn_dict[conn.input_name] = dict(id=conn.output_step.order_index, output_name=conn.output_name)
        step_dict["input_connections"] = input_conn_dict
        data.append(step_dict)
        x, y = step.position["left"], step.position["top"]
        count = 0
        module_name = get_workflow_module_name(module, missing_tool_tups)
        max_len = len(module_name) * 1.5
        text.append(svgfig.Text(x, y + 20, module_name, **{"font-size": "14px"}).SVG())
        y += 45
        for di in module_data_inputs:
            cur_y = y + count * line_px
            if step.order_index not in in_pos:
                in_pos[step.order_index] = {}
            in_pos[step.order_index][di["name"]] = (x, cur_y)
            text.append(svgfig.Text(x, cur_y, di["label"]).SVG())
            count += 1
            max_len = max(max_len, len(di["label"]))
        if len(module.get_data_inputs()) > 0:
            y += 15
        for do in module_data_outputs:
            cur_y = y + count * line_px
            if step.order_index not in out_pos:
                out_pos[step.order_index] = {}
            out_pos[step.order_index][do["name"]] = (x, cur_y)
            text.append(svgfig.Text(x, cur_y, do["name"]).SVG())
            count += 1
            max_len = max(max_len, len(do["name"]))
        widths[step.order_index] = max_len * 5.5
        max_x = max(max_x, step.position["left"])
        max_y = max(max_y, step.position["top"])
        max_width = max(max_width, widths[step.order_index])
    for step_dict in data:
        tool_unavailable = step_dict["tool_errors"]
        width = widths[step_dict["id"]]
        x, y = step_dict["position"]["left"], step_dict["position"]["top"]
        # Only highlight missing tools if displaying in the tool shed.
        if trans.webapp.name == "tool_shed" and tool_unavailable:
            fill = "#EBBCB2"
        else:
            fill = "#EBD9B2"
        boxes.append(svgfig.Rect(x - margin, y, x + width - margin, y + 30, fill=fill).SVG())
        box_height = (len(step_dict["data_inputs"]) + len(step_dict["data_outputs"])) * line_px + margin
        # Draw separator line.
        if len(step_dict["data_inputs"]) > 0:
            box_height += 15
            sep_y = y + len(step_dict["data_inputs"]) * line_px + 40
            text.append(svgfig.Line(x - margin, sep_y, x + width - margin, sep_y).SVG())
        # Define an input/output box.
        boxes.append(svgfig.Rect(x - margin, y + 30, x + width - margin, y + 30 + box_height, fill="#ffffff").SVG())
        for conn, output_dict in step_dict["input_connections"].iteritems():
            in_coords = in_pos[step_dict["id"]][conn]
            # out_pos_index will be a step number like 1, 2, 3...
            out_pos_index = output_dict["id"]
            # out_pos_name will be a string like 'o', 'o2', etc.
            out_pos_name = output_dict["output_name"]
            if out_pos_index in out_pos:
                # out_conn_index_dict will be something like:
                # 7: {'o': (824.5, 618)}
                out_conn_index_dict = out_pos[out_pos_index]
                if out_pos_name in out_conn_index_dict:
                    out_conn_pos = out_pos[out_pos_index][out_pos_name]
                else:
                    # Take any key / value pair available in out_conn_index_dict.
                    # A problem will result if the dictionary is empty.
                    if out_conn_index_dict.keys():
                        key = out_conn_index_dict.keys()[0]
                        out_conn_pos = out_pos[out_pos_index][key]
            adjusted = (out_conn_pos[0] + widths[output_dict["id"]], out_conn_pos[1])
            text.append(
                svgfig.SVG(
                    "circle",
                    cx=out_conn_pos[0] + widths[output_dict["id"]] - margin,
                    cy=out_conn_pos[1] - margin,
                    r=5,
                    fill="#ffffff",
                )
            )
            connectors.append(
                svgfig.Line(adjusted[0], adjusted[1] - margin, in_coords[0] - 10, in_coords[1], arrow_end="true").SVG()
            )
    canvas.append(connectors)
    canvas.append(boxes)
    canvas.append(text)
    width, height = (max_x + max_width + 50), max_y + 300
    canvas["width"] = "%s px" % width
    canvas["height"] = "%s px" % height
    canvas["viewBox"] = "0 0 %s %s" % (width, height)
    trans.response.set_content_type("image/svg+xml")
    return canvas.standalone_xml()