Exemplo n.º 1
0
def get_repository_dependencies(app, tool_shed_url, repository_name,
                                repository_owner, changeset_revision):
    repository_dependencies_dict = {}
    tool_shed_accessible = True
    params = dict(name=repository_name,
                  owner=repository_owner,
                  changeset_revision=changeset_revision)
    pathspec = ['repository', 'get_repository_dependencies']
    try:
        raw_text = util.url_get(
            tool_shed_url,
            password_mgr=app.tool_shed_registry.url_auth(tool_shed_url),
            pathspec=pathspec,
            params=params)
        tool_shed_accessible = True
    except Exception as e:
        tool_shed_accessible = False
        log.warning(
            "The URL\n%s\nraised the exception:\n%s\n",
            util.build_url(tool_shed_url, pathspec=pathspec, params=params), e)
    if tool_shed_accessible:
        if len(raw_text) > 2:
            encoded_text = json.loads(raw_text)
            repository_dependencies_dict = encoding_util.tool_shed_decode(
                encoded_text)
    return tool_shed_accessible, repository_dependencies_dict
Exemplo n.º 2
0
 def check_for_tool_dependencies( self, trans, migration_stage ):
     # Get the 000x_tools.xml file associated with migration_stage.
     tools_xml_file_path = os.path.abspath( os.path.join( trans.app.config.root, 'scripts', 'migrate_tools', '%04d_tools.xml' % migration_stage ) )
     tree = galaxy.util.parse_xml( tools_xml_file_path )
     root = tree.getroot()
     tool_shed = root.get( 'name' )
     tool_shed_url = self.get_tool_shed_url_from_tools_xml_file_path( trans, tool_shed )
     repo_name_dependency_tups = []
     if tool_shed_url:
         for elem in root:
             if elem.tag == 'repository':
                 tool_dependencies = []
                 tool_dependencies_dict = {}
                 repository_name = elem.get( 'name' )
                 changeset_revision = elem.get( 'changeset_revision' )
                 url = '%s/repository/get_tool_dependencies?name=%s&owner=devteam&changeset_revision=%s&from_install_manager=True' % \
                     ( tool_shed_url, repository_name, changeset_revision )
                 text = common_util.tool_shed_get( trans.app, tool_shed_url, url )
                 if text:
                     tool_dependencies_dict = encoding_util.tool_shed_decode( text )
                     for dependency_key, requirements_dict in tool_dependencies_dict.items():
                         tool_dependency_name = requirements_dict[ 'name' ]
                         tool_dependency_version = requirements_dict[ 'version' ]
                         tool_dependency_type = requirements_dict[ 'type' ]
                         tool_dependency_readme = requirements_dict.get( 'readme', '' )
                         tool_dependencies.append( ( tool_dependency_name, tool_dependency_version, tool_dependency_type, tool_dependency_readme ) )
                 repo_name_dependency_tups.append( ( repository_name, tool_dependencies ) )
     return repo_name_dependency_tups
Exemplo n.º 3
0
def get_tool_dependencies(app, tool_shed_url, repository_name,
                          repository_owner, changeset_revision):
    tool_dependencies = []
    tool_shed_accessible = True
    params = dict(name=repository_name,
                  owner=repository_owner,
                  changeset_revision=changeset_revision)
    pathspec = ['repository', 'get_tool_dependencies']
    try:
        text = util.url_get(
            tool_shed_url,
            password_mgr=app.tool_shed_registry.url_auth(tool_shed_url),
            pathspec=pathspec,
            params=params)
        tool_shed_accessible = True
    except Exception as e:
        tool_shed_accessible = False
        log.warning(
            "The URL\n%s\nraised the exception:\n%s\n",
            util.build_url(tool_shed_url, pathspec=pathspec, params=params), e)
    if tool_shed_accessible:
        if text:
            tool_dependencies_dict = encoding_util.tool_shed_decode(text)
            for requirements_dict in tool_dependencies_dict.values():
                tool_dependency_name = requirements_dict['name']
                tool_dependency_version = requirements_dict['version']
                tool_dependency_type = requirements_dict['type']
                tool_dependencies.append(
                    (tool_dependency_name, tool_dependency_version,
                     tool_dependency_type))
    return tool_shed_accessible, tool_dependencies
Exemplo n.º 4
0
 def check_for_tool_dependencies( self, trans, migration_stage ):
     # Get the 000x_tools.xml file associated with migration_stage.
     tools_xml_file_path = os.path.abspath( os.path.join( trans.app.config.root, 'scripts', 'migrate_tools', '%04d_tools.xml' % migration_stage ) )
     tree = galaxy.util.parse_xml( tools_xml_file_path )
     root = tree.getroot()
     tool_shed = root.get( 'name' )
     shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, tool_shed )
     repo_name_dependency_tups = []
     if shed_url:
         for elem in root:
             if elem.tag == 'repository':
                 tool_dependencies = []
                 tool_dependencies_dict = {}
                 repository_name = elem.get( 'name' )
                 changeset_revision = elem.get( 'changeset_revision' )
                 params = dict( name=repository_name, owner='devteam', changeset_revision=changeset_revision )
                 pathspec = [ 'repository', 'get_tool_dependencies' ]
                 text = url_get( shed_url, password_mgr=self.app.tool_shed_registry.url_auth( shed_url ), pathspec=pathspec, params=params )
                 if text:
                     tool_dependencies_dict = encoding_util.tool_shed_decode( text )
                     for dependency_key, requirements_dict in tool_dependencies_dict.items():
                         tool_dependency_name = requirements_dict[ 'name' ]
                         tool_dependency_version = requirements_dict[ 'version' ]
                         tool_dependency_type = requirements_dict[ 'type' ]
                         tool_dependency_readme = requirements_dict.get( 'readme', '' )
                         tool_dependencies.append( ( tool_dependency_name, tool_dependency_version, tool_dependency_type, tool_dependency_readme ) )
                 repo_name_dependency_tups.append( ( repository_name, tool_dependencies ) )
     return repo_name_dependency_tups
 def get_repository_dependencies_for_installed_tool_shed_repository(
         self, app, repository):
     """
     Send a request to the appropriate tool shed to retrieve the dictionary of repository dependencies defined
     for the received repository which is installed into Galaxy.  This method is called only from Galaxy.
     """
     tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry(
         app, str(repository.tool_shed))
     params = dict(name=str(repository.name),
                   owner=str(repository.owner),
                   changeset_revision=str(repository.changeset_revision))
     pathspec = ['repository', 'get_repository_dependencies']
     try:
         raw_text = url_get(
             tool_shed_url,
             password_mgr=app.tool_shed_registry.url_auth(tool_shed_url),
             pathspec=pathspec,
             params=params)
     except Exception as e:
         log.error(
             "The URL\n%s\nraised the exception:\n%s\n",
             build_url(tool_shed_url, pathspec=pathspec, params=params),
             str(e))
         return ''
     if len(raw_text) > 2:
         encoded_text = json.loads(raw_text)
         text = encoding_util.tool_shed_decode(encoded_text)
     else:
         text = ''
     return text
Exemplo n.º 6
0
def generate_workflow_image(trans, workflow_name, repository_metadata_id=None, repository_id=None):
    """
    Return an svg image representation of a workflow dictionary created when the workflow was exported.  This method is called
    from both Galaxy and the tool shed.  When called from the tool shed, repository_metadata_id will have a value and repository_id
    will be None.  When called from Galaxy, repository_metadata_id will be None and repository_id will have a value.
    """
    workflow_name = encoding_util.tool_shed_decode(workflow_name)
    if trans.webapp.name == 'tool_shed':
        # We're in the tool shed.
        repository_metadata = metadata_util.get_repository_metadata_by_id(trans.app, repository_metadata_id)
        repository_id = trans.security.encode_id(repository_metadata.repository_id)
        changeset_revision = repository_metadata.changeset_revision
        metadata = repository_metadata.metadata
    else:
        # We're in Galaxy.
        repository = repository_util.get_tool_shed_repository_by_id(trans.app, repository_id)
        changeset_revision = repository.changeset_revision
        metadata = repository.metadata
    # metadata[ 'workflows' ] is a list of tuples where each contained tuple is
    # [ <relative path to the .ga file in the repository>, <exported workflow dict> ]
    for workflow_tup in metadata['workflows']:
        workflow_dict = workflow_tup[1]
        if workflow_dict['name'] == workflow_name:
            break
    if 'tools' in metadata:
        tools_metadata = metadata['tools']
    else:
        tools_metadata = []
    workflow, missing_tool_tups = get_workflow_from_dict(trans=trans,
                                                         workflow_dict=workflow_dict,
                                                         tools_metadata=tools_metadata,
                                                         repository_id=repository_id,
                                                         changeset_revision=changeset_revision)
    workflow_canvas = WorkflowCanvas()
    canvas = workflow_canvas.canvas
    # Store px width for boxes of each step.
    for step in workflow.steps:
        step.upgrade_messages = {}
        module = module_factory.from_workflow_step(trans, repository_id, changeset_revision, tools_metadata, step)
        tool_errors = module.type == 'tool' and not module.tool
        module_data_inputs = get_workflow_data_inputs(step, module)
        module_data_outputs = get_workflow_data_outputs(step, module, workflow.steps)
        module_name = get_workflow_module_name(module, missing_tool_tups)
        workflow_canvas.populate_data_for_step(
            step,
            module_name,
            module_data_inputs,
            module_data_outputs,
            tool_errors=tool_errors
        )
    workflow_canvas.add_steps(highlight_errors=True)
    workflow_canvas.finish()
    trans.response.set_content_type("image/svg+xml")
    return canvas.tostring()
Exemplo n.º 7
0
def check_for_missing_tools( app, tool_panel_configs, latest_tool_migration_script_number ):
    # Get the 000x_tools.xml file associated with the current migrate_tools version number.
    tools_xml_file_path = os.path.abspath( os.path.join( 'scripts', 'migrate_tools', '%04d_tools.xml' % latest_tool_migration_script_number ) )
    # Parse the XML and load the file attributes for later checking against the proprietary tool_panel_config.
    migrated_tool_configs_dict = odict()
    tree, error_message = xml_util.parse_xml( tools_xml_file_path )
    if tree is None:
        return False, odict()
    root = tree.getroot()
    tool_shed = root.get( 'name' )
    tool_shed_url = get_tool_shed_url_from_tools_xml_file_path( app, tool_shed )
    # The default behavior is that the tool shed is down.
    tool_shed_accessible = False
    missing_tool_configs_dict = odict()
    if tool_shed_url:
        for elem in root:
            if elem.tag == 'repository':
                tool_dependencies = []
                tool_dependencies_dict = {}
                repository_name = elem.get( 'name' )
                changeset_revision = elem.get( 'changeset_revision' )
                url = '%s/repository/get_tool_dependencies?name=%s&owner=%s&changeset_revision=%s&from_install_manager=True' % \
                ( tool_shed_url, repository_name, REPOSITORY_OWNER, changeset_revision )
                try:
                    text = tool_shed_get( app, tool_shed_url, url )
                    tool_shed_accessible = True
                except Exception, e:
                    # Tool shed may be unavailable - we have to set tool_shed_accessible since we're looping.
                    tool_shed_accessible = False
                    print "The URL\n%s\nraised the exception:\n%s\n" % ( url, str( e ) )
                if tool_shed_accessible:
                    if text:
                        tool_dependencies_dict = encoding_util.tool_shed_decode( text )
                        for dependency_key, requirements_dict in tool_dependencies_dict.items():
                            tool_dependency_name = requirements_dict[ 'name' ]
                            tool_dependency_version = requirements_dict[ 'version' ]
                            tool_dependency_type = requirements_dict[ 'type' ]
                            tool_dependency_readme = requirements_dict.get( 'readme', '' )
                            tool_dependencies.append( ( tool_dependency_name, tool_dependency_version, tool_dependency_type, tool_dependency_readme ) )
                    for tool_elem in elem.findall( 'tool' ):
                        migrated_tool_configs_dict[ tool_elem.get( 'file' ) ] = tool_dependencies
        if tool_shed_accessible:
            # Parse the proprietary tool_panel_configs (the default is tool_conf.xml) and generate the list of missing tool config file names.
            for tool_panel_config in tool_panel_configs:
                tree, error_message = xml_util.parse_xml( tool_panel_config )
                if tree:
                    root = tree.getroot()
                    for elem in root:
                        if elem.tag == 'tool':
                            missing_tool_configs_dict = check_tool_tag_set( elem, migrated_tool_configs_dict, missing_tool_configs_dict )
                        elif elem.tag == 'section':
                            for section_elem in elem:
                                if section_elem.tag == 'tool':
                                    missing_tool_configs_dict = check_tool_tag_set( section_elem, migrated_tool_configs_dict, missing_tool_configs_dict )
Exemplo n.º 8
0
def generate_workflow_image( trans, workflow_name, repository_metadata_id=None, repository_id=None ):
    """
    Return an svg image representation of a workflow dictionary created when the workflow was exported.  This method is called
    from both Galaxy and the tool shed.  When called from the tool shed, repository_metadata_id will have a value and repository_id
    will be None.  When called from Galaxy, repository_metadata_id will be None and repository_id will have a value.
    """
    workflow_name = encoding_util.tool_shed_decode( workflow_name )
    if trans.webapp.name == 'tool_shed':
        # We're in the tool shed.
        repository_metadata = metadata_util.get_repository_metadata_by_id( trans.app, repository_metadata_id )
        repository_id = trans.security.encode_id( repository_metadata.repository_id )
        changeset_revision = repository_metadata.changeset_revision
        metadata = repository_metadata.metadata
    else:
        # We're in Galaxy.
        repository = suc.get_tool_shed_repository_by_id( trans.app, repository_id )
        changeset_revision = repository.changeset_revision
        metadata = repository.metadata
    # metadata[ 'workflows' ] is a list of tuples where each contained tuple is
    # [ <relative path to the .ga file in the repository>, <exported workflow dict> ]
    for workflow_tup in metadata[ 'workflows' ]:
        workflow_dict = workflow_tup[1]
        if workflow_dict[ 'name' ] == workflow_name:
            break
    if 'tools' in metadata:
        tools_metadata = metadata[ 'tools' ]
    else:
        tools_metadata = []
    workflow, missing_tool_tups = get_workflow_from_dict( trans=trans,
                                                          workflow_dict=workflow_dict,
                                                          tools_metadata=tools_metadata,
                                                          repository_id=repository_id,
                                                          changeset_revision=changeset_revision )
    workflow_canvas = WorkflowCanvas()
    canvas = workflow_canvas.canvas
    # Store px width for boxes of each step.
    for step in workflow.steps:
        step.upgrade_messages = {}
        module = module_factory.from_workflow_step( trans, repository_id, changeset_revision, tools_metadata, step )
        tool_errors = module.type == 'tool' and not module.tool
        module_data_inputs = get_workflow_data_inputs( step, module )
        module_data_outputs = get_workflow_data_outputs( step, module, workflow.steps )
        module_name = get_workflow_module_name( module, missing_tool_tups )
        workflow_canvas.populate_data_for_step(
            step,
            module_name,
            module_data_inputs,
            module_data_outputs,
            tool_errors=tool_errors
        )
    workflow_canvas.add_steps( highlight_errors=True )
    workflow_canvas.finish( )
    trans.response.set_content_type( "image/svg+xml" )
    return canvas.tostring()
Exemplo n.º 9
0
def get_repository_dependencies(app, tool_shed_url, repository_name, repository_owner, changeset_revision):
    repository_dependencies_dict = {}
    tool_shed_accessible = True
    params = dict(name=repository_name, owner=repository_owner, changeset_revision=changeset_revision)
    pathspec = ['repository', 'get_repository_dependencies']
    try:
        raw_text = util.url_get(tool_shed_url, password_mgr=app.tool_shed_registry.url_auth(tool_shed_url), pathspec=pathspec, params=params)
        tool_shed_accessible = True
    except Exception as e:
        tool_shed_accessible = False
        log.warning("The URL\n%s\nraised the exception:\n%s\n", util.build_url(tool_shed_url, pathspec=pathspec, params=params), e)
    if tool_shed_accessible:
        if len(raw_text) > 2:
            encoded_text = json.loads(raw_text)
            repository_dependencies_dict = encoding_util.tool_shed_decode(encoded_text)
    return tool_shed_accessible, repository_dependencies_dict
 def get_update_to_changeset_revision_and_ctx_rev( self, repository ):
     """Return the changeset revision hash to which the repository can be updated."""
     changeset_revision_dict = {}
     tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, str( repository.tool_shed ) )
     params = dict( name=str( repository.name ),
                    owner=str( repository.owner ),
                    changeset_revision=str( repository.installed_changeset_revision ) )
     pathspec = [ 'repository', 'get_changeset_revision_and_ctx_rev' ]
     try:
         encoded_update_dict = util.url_get( tool_shed_url, password_mgr=self.app.tool_shed_registry.url_auth( tool_shed_url ), pathspec=pathspec, params=params )
         if encoded_update_dict:
             update_dict = encoding_util.tool_shed_decode( encoded_update_dict )
             includes_data_managers = update_dict.get( 'includes_data_managers', False )
             includes_datatypes = update_dict.get( 'includes_datatypes', False )
             includes_tools = update_dict.get( 'includes_tools', False )
             includes_tools_for_display_in_tool_panel = update_dict.get( 'includes_tools_for_display_in_tool_panel', False )
             includes_tool_dependencies = update_dict.get( 'includes_tool_dependencies', False )
             includes_workflows = update_dict.get( 'includes_workflows', False )
             has_repository_dependencies = update_dict.get( 'has_repository_dependencies', False )
             has_repository_dependencies_only_if_compiling_contained_td = update_dict.get( 'has_repository_dependencies_only_if_compiling_contained_td', False )
             changeset_revision = update_dict.get( 'changeset_revision', None )
             ctx_rev = update_dict.get( 'ctx_rev', None )
         changeset_revision_dict[ 'includes_data_managers' ] = includes_data_managers
         changeset_revision_dict[ 'includes_datatypes' ] = includes_datatypes
         changeset_revision_dict[ 'includes_tools' ] = includes_tools
         changeset_revision_dict[ 'includes_tools_for_display_in_tool_panel' ] = includes_tools_for_display_in_tool_panel
         changeset_revision_dict[ 'includes_tool_dependencies' ] = includes_tool_dependencies
         changeset_revision_dict[ 'includes_workflows' ] = includes_workflows
         changeset_revision_dict[ 'has_repository_dependencies' ] = has_repository_dependencies
         changeset_revision_dict[ 'has_repository_dependencies_only_if_compiling_contained_td' ] = has_repository_dependencies_only_if_compiling_contained_td
         changeset_revision_dict[ 'changeset_revision' ] = changeset_revision
         changeset_revision_dict[ 'ctx_rev' ] = ctx_rev
     except Exception as e:
         log.debug( "Error getting change set revision for update from the tool shed for repository '%s': %s" % ( repository.name, str( e ) ) )
         changeset_revision_dict[ 'includes_data_managers' ] = False
         changeset_revision_dict[ 'includes_datatypes' ] = False
         changeset_revision_dict[ 'includes_tools' ] = False
         changeset_revision_dict[ 'includes_tools_for_display_in_tool_panel' ] = False
         changeset_revision_dict[ 'includes_tool_dependencies' ] = False
         changeset_revision_dict[ 'includes_workflows' ] = False
         changeset_revision_dict[ 'has_repository_dependencies' ] = False
         changeset_revision_dict[ 'has_repository_dependencies_only_if_compiling_contained_td' ] = False
         changeset_revision_dict[ 'changeset_revision' ] = None
         changeset_revision_dict[ 'ctx_rev' ] = None
     return changeset_revision_dict
Exemplo n.º 11
0
 def get_update_to_changeset_revision_and_ctx_rev( self, repository ):
     """Return the changeset revision hash to which the repository can be updated."""
     changeset_revision_dict = {}
     tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, str( repository.tool_shed ) )
     params = dict( name=str( repository.name ),
                    owner=str( repository.owner ),
                    changeset_revision=str( repository.installed_changeset_revision ) )
     pathspec = [ 'repository', 'get_changeset_revision_and_ctx_rev' ]
     try:
         encoded_update_dict = util.url_get( tool_shed_url, password_mgr=self.app.tool_shed_registry.url_auth( tool_shed_url ), pathspec=pathspec, params=params )
         if encoded_update_dict:
             update_dict = encoding_util.tool_shed_decode( encoded_update_dict )
             includes_data_managers = update_dict.get( 'includes_data_managers', False )
             includes_datatypes = update_dict.get( 'includes_datatypes', False )
             includes_tools = update_dict.get( 'includes_tools', False )
             includes_tools_for_display_in_tool_panel = update_dict.get( 'includes_tools_for_display_in_tool_panel', False )
             includes_tool_dependencies = update_dict.get( 'includes_tool_dependencies', False )
             includes_workflows = update_dict.get( 'includes_workflows', False )
             has_repository_dependencies = update_dict.get( 'has_repository_dependencies', False )
             has_repository_dependencies_only_if_compiling_contained_td = update_dict.get( 'has_repository_dependencies_only_if_compiling_contained_td', False )
             changeset_revision = update_dict.get( 'changeset_revision', None )
             ctx_rev = update_dict.get( 'ctx_rev', None )
         changeset_revision_dict[ 'includes_data_managers' ] = includes_data_managers
         changeset_revision_dict[ 'includes_datatypes' ] = includes_datatypes
         changeset_revision_dict[ 'includes_tools' ] = includes_tools
         changeset_revision_dict[ 'includes_tools_for_display_in_tool_panel' ] = includes_tools_for_display_in_tool_panel
         changeset_revision_dict[ 'includes_tool_dependencies' ] = includes_tool_dependencies
         changeset_revision_dict[ 'includes_workflows' ] = includes_workflows
         changeset_revision_dict[ 'has_repository_dependencies' ] = has_repository_dependencies
         changeset_revision_dict[ 'has_repository_dependencies_only_if_compiling_contained_td' ] = has_repository_dependencies_only_if_compiling_contained_td
         changeset_revision_dict[ 'changeset_revision' ] = changeset_revision
         changeset_revision_dict[ 'ctx_rev' ] = ctx_rev
     except Exception as e:
         log.debug( "Error getting change set revision for update from the tool shed for repository '%s': %s" % ( repository.name, str( e ) ) )
         changeset_revision_dict[ 'includes_data_managers' ] = False
         changeset_revision_dict[ 'includes_datatypes' ] = False
         changeset_revision_dict[ 'includes_tools' ] = False
         changeset_revision_dict[ 'includes_tools_for_display_in_tool_panel' ] = False
         changeset_revision_dict[ 'includes_tool_dependencies' ] = False
         changeset_revision_dict[ 'includes_workflows' ] = False
         changeset_revision_dict[ 'has_repository_dependencies' ] = False
         changeset_revision_dict[ 'has_repository_dependencies_only_if_compiling_contained_td' ] = False
         changeset_revision_dict[ 'changeset_revision' ] = None
         changeset_revision_dict[ 'ctx_rev' ] = None
     return changeset_revision_dict
Exemplo n.º 12
0
def get_tool_dependencies(app, tool_shed_url, repository_name, repository_owner, changeset_revision):
    tool_dependencies = []
    tool_shed_accessible = True
    params = dict(name=repository_name, owner=repository_owner, changeset_revision=changeset_revision)
    pathspec = ['repository', 'get_tool_dependencies']
    try:
        text = util.url_get(tool_shed_url, password_mgr=app.tool_shed_registry.url_auth(tool_shed_url), pathspec=pathspec, params=params)
        tool_shed_accessible = True
    except Exception as e:
        tool_shed_accessible = False
        log.warning("The URL\n%s\nraised the exception:\n%s\n", util.build_url(tool_shed_url, pathspec=pathspec, params=params), e)
    if tool_shed_accessible:
        if text:
            tool_dependencies_dict = encoding_util.tool_shed_decode(text)
            for requirements_dict in tool_dependencies_dict.values():
                tool_dependency_name = requirements_dict['name']
                tool_dependency_version = requirements_dict['version']
                tool_dependency_type = requirements_dict['type']
                tool_dependencies.append((tool_dependency_name, tool_dependency_version, tool_dependency_type))
    return tool_shed_accessible, tool_dependencies
Exemplo n.º 13
0
 def get_repository_dependencies_for_installed_tool_shed_repository(self, app, repository):
     """
     Send a request to the appropriate tool shed to retrieve the dictionary of repository dependencies defined
     for the received repository which is installed into Galaxy.  This method is called only from Galaxy.
     """
     tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry(app, str(repository.tool_shed))
     params = dict(name=str(repository.name),
                   owner=str(repository.owner),
                   changeset_revision=str(repository.changeset_revision))
     pathspec = ['repository', 'get_repository_dependencies']
     try:
         raw_text = url_get(tool_shed_url, password_mgr=app.tool_shed_registry.url_auth(tool_shed_url), pathspec=pathspec, params=params)
     except Exception as e:
         log.error("The URL\n%s\nraised the exception:\n%s\n", build_url(tool_shed_url, pathspec=pathspec, params=params), str(e))
         return ''
     if len(raw_text) > 2:
         encoded_text = json.loads(raw_text)
         text = encoding_util.tool_shed_decode(encoded_text)
     else:
         text = ''
     return text
Exemplo n.º 14
0
 def check_for_tool_dependencies(self, trans, migration_stage):
     # Get the 000x_tools.xml file associated with migration_stage.
     tools_xml_file_path = os.path.abspath(
         os.path.join(trans.app.config.root, 'scripts', 'migrate_tools',
                      '%04d_tools.xml' % migration_stage))
     tree = galaxy.util.parse_xml(tools_xml_file_path)
     root = tree.getroot()
     tool_shed = root.get('name')
     tool_shed_url = self.get_tool_shed_url_from_tools_xml_file_path(
         trans, tool_shed)
     repo_name_dependency_tups = []
     if tool_shed_url:
         for elem in root:
             if elem.tag == 'repository':
                 tool_dependencies = []
                 tool_dependencies_dict = {}
                 repository_name = elem.get('name')
                 changeset_revision = elem.get('changeset_revision')
                 url = '%s/repository/get_tool_dependencies?name=%s&owner=devteam&changeset_revision=%s&from_install_manager=True' % \
                     ( tool_shed_url, repository_name, changeset_revision )
                 text = common_util.tool_shed_get(trans.app, tool_shed_url,
                                                  url)
                 if text:
                     tool_dependencies_dict = encoding_util.tool_shed_decode(
                         text)
                     for dependency_key, requirements_dict in tool_dependencies_dict.items(
                     ):
                         tool_dependency_name = requirements_dict['name']
                         tool_dependency_version = requirements_dict[
                             'version']
                         tool_dependency_type = requirements_dict['type']
                         tool_dependency_readme = requirements_dict.get(
                             'readme', '')
                         tool_dependencies.append(
                             (tool_dependency_name, tool_dependency_version,
                              tool_dependency_type, tool_dependency_readme))
                 repo_name_dependency_tups.append(
                     (repository_name, tool_dependencies))
     return repo_name_dependency_tups
Exemplo n.º 15
0
 def check_for_tool_dependencies(self, trans, migration_stage):
     # Get the 000x_tools.xml file associated with migration_stage.
     tools_xml_file_path = os.path.abspath(
         os.path.join(trans.app.config.root, "scripts", "migrate_tools", "%04d_tools.xml" % migration_stage)
     )
     tree = galaxy.util.parse_xml(tools_xml_file_path)
     root = tree.getroot()
     tool_shed = root.get("name")
     tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry(trans.app, tool_shed)
     repo_name_dependency_tups = []
     if tool_shed_url:
         for elem in root:
             if elem.tag == "repository":
                 tool_dependencies = []
                 tool_dependencies_dict = {}
                 repository_name = elem.get("name")
                 changeset_revision = elem.get("changeset_revision")
                 params = dict(name=repository_name, owner="devteam", changeset_revision=changeset_revision)
                 pathspec = ["repository", "get_tool_dependencies"]
                 text = common_util.tool_shed_get(trans.app, tool_shed_url, pathspec=pathspec, params=params)
                 if text:
                     tool_dependencies_dict = encoding_util.tool_shed_decode(text)
                     for dependency_key, requirements_dict in tool_dependencies_dict.items():
                         tool_dependency_name = requirements_dict["name"]
                         tool_dependency_version = requirements_dict["version"]
                         tool_dependency_type = requirements_dict["type"]
                         tool_dependency_readme = requirements_dict.get("readme", "")
                         tool_dependencies.append(
                             (
                                 tool_dependency_name,
                                 tool_dependency_version,
                                 tool_dependency_type,
                                 tool_dependency_readme,
                             )
                         )
                 repo_name_dependency_tups.append((repository_name, tool_dependencies))
     return repo_name_dependency_tups
Exemplo n.º 16
0
def validate_capsule( trans, **kwd ):
    """Inspect the uploaded capsule's manifest and it's contained files to ensure it is a valid repository capsule."""
    capsule_dict = {}
    capsule_dict.update( kwd )
    encoded_file_path = capsule_dict.get( 'encoded_file_path', '' )
    file_path = encoding_util.tool_shed_decode( encoded_file_path )
    # The capsule must contain a valid XML file named export_info.xml.
    export_info_file_path = os.path.join( file_path, 'export_info.xml' )
    export_info_tree, error_message = xml_util.parse_xml( export_info_file_path )
    if error_message:
        capsule_dict[ 'error_message' ] = error_message
        capsule_dict[ 'status' ] = 'error'
        return capsule_dict
    # The capsule must contain a valid XML file named manifest.xml.
    manifest_file_path = os.path.join( file_path, 'manifest.xml' )
    # Validate the capsule manifest by inspecting name, owner, changeset_revision and type information contained within
    # each <repository> tag set.
    repository_info_dicts, error_message = get_repository_info_from_manifest( manifest_file_path )
    if error_message:
        capsule_dict[ 'error_message' ] = error_message
        capsule_dict[ 'status' ] = 'error'
        return capsule_dict
    # Validate the capsule manifest by ensuring all <repository> tag sets contain a valid <archive> sub-element.
    archives, error_message = get_archives_from_manifest( manifest_file_path )
    if error_message:
        capsule_dict[ 'error_message' ] = error_message
        capsule_dict[ 'status' ] = 'error'
        return capsule_dict
    # Validate the capsule manifest by ensuring each defined archive file name exists within the capsule.
    error_message = verify_archives_in_capsule( file_path, archives )
    if error_message:
        capsule_dict[ 'error_message' ] = error_message
        capsule_dict[ 'status' ] = 'error'
        return capsule_dict
    capsule_dict[ 'status' ] = 'ok'
    return capsule_dict
Exemplo n.º 17
0
    def install(self, trans, **kwd):
        """
        POST /api/tool_shed_repositories/install
        Initiate the installation of a repository.

        :param install_resolver_dependencies: True to install resolvable dependencies.
        :param install_tool_dependencies: True to install tool dependencies.
        :param install_repository_dependencies: True to install repository dependencies.
        :param tool_panel_section_id: The unique identifier for an existing tool panel section
        :param new_tool_panel_section_label: Create a new tool panel section with this label
        :param shed_tool_conf: The shed tool config file to use for this installation
        :param tool_shed_url: The URL for the toolshed whence this repository is being installed
        :param changeset: The changeset to update to after cloning the repository
        """
        irm = InstallRepositoryManager(self.app)
        tool_shed_url = kwd.get('tool_shed_url', None)
        repositories = json.loads(kwd.get('repositories', '[]'))
        repo_info_dict = self.__get_repo_info_dict(trans, repositories,
                                                   tool_shed_url)
        includes_tools = False
        includes_tools_for_display_in_tool_panel = False
        has_repository_dependencies = False
        includes_tool_dependencies = False
        install_resolver_dependencies = util.asbool(
            kwd.get('install_resolver_dependencies', False))
        for encoded_repo_info_dict in repo_info_dict.get(
                'repo_info_dicts', []):
            decoded_repo_info_dict = encoding_util.tool_shed_decode(
                encoded_repo_info_dict)
            if not includes_tools:
                includes_tools = util.string_as_bool(
                    decoded_repo_info_dict.get('includes_tools', False))
            if not includes_tools_for_display_in_tool_panel:
                includes_tools_for_display_in_tool_panel = \
                    util.string_as_bool( decoded_repo_info_dict.get( 'includes_tools_for_display_in_tool_panel', False ) )
            if not has_repository_dependencies:
                has_repository_dependencies = util.string_as_bool(
                    repo_info_dict.get('has_repository_dependencies', False))
            if not includes_tool_dependencies:
                includes_tool_dependencies = util.string_as_bool(
                    repo_info_dict.get('includes_tool_dependencies', False))
        encoded_repo_info_dicts = util.listify(
            repo_info_dict.get('repo_info_dicts', []))
        repo_info_dicts = [
            encoding_util.tool_shed_decode(encoded_repo_info_dict)
            for encoded_repo_info_dict in encoded_repo_info_dicts
        ]
        tool_panel_section_id = kwd.get('tool_panel_section_id', None)
        new_tool_panel_section_label = kwd.get('new_tool_panel_section', None)
        tool_panel_section_mapping = json.loads(
            kwd.get('tool_panel_section', '{}'))
        install_tool_dependencies = util.asbool(
            kwd.get('install_tool_dependencies', False))
        install_repository_dependencies = util.asbool(
            kwd.get('install_repository_dependencies', False))
        shed_tool_conf = kwd.get('shed_tool_conf', None)
        tool_path = suc.get_tool_path_by_shed_tool_conf_filename(
            self.app, shed_tool_conf)
        installation_dict = dict(
            install_repository_dependencies=install_repository_dependencies,
            new_tool_panel_section_label=new_tool_panel_section_label,
            no_changes_checked=False,
            repo_info_dicts=repo_info_dicts,
            tool_panel_section_id=tool_panel_section_id,
            tool_path=tool_path,
            tool_shed_url=tool_shed_url)
        new_repositories, tool_panel_keys, repo_info_dicts, filtered_repos = irm.handle_tool_shed_repositories(
            installation_dict)
        if new_repositories:
            installation_dict = dict(
                created_or_updated_tool_shed_repositories=new_repositories,
                filtered_repo_info_dicts=filtered_repos,
                has_repository_dependencies=has_repository_dependencies,
                includes_tool_dependencies=includes_tool_dependencies,
                includes_tools=includes_tools,
                includes_tools_for_display_in_tool_panel=
                includes_tools_for_display_in_tool_panel,
                install_repository_dependencies=install_repository_dependencies,
                install_tool_dependencies=install_tool_dependencies,
                message='',
                new_tool_panel_section_label=new_tool_panel_section_label,
                tool_panel_section_mapping=tool_panel_section_mapping,
                install_resolver_dependencies=install_resolver_dependencies,
                shed_tool_conf=shed_tool_conf,
                status='ok',
                tool_panel_section_id=tool_panel_section_id,
                tool_panel_section_keys=tool_panel_keys,
                tool_path=tool_path,
                tool_shed_url=tool_shed_url)
            encoded_kwd, query, tool_shed_repositories, encoded_repository_ids = \
                irm.initiate_repository_installation( installation_dict )
            return json.dumps(
                dict(
                    operation='install',
                    api=True,
                    install_resolver_dependencies=install_resolver_dependencies,
                    install_tool_dependencies=install_tool_dependencies,
                    encoded_kwd=encoded_kwd,
                    reinstalling=False,
                    tool_shed_repository_ids=json.dumps(
                        [repo[0] for repo in repositories]),
                    repositories=[
                        trans.security.encode_id(repo.id)
                        for repo in new_repositories
                    ]))
Exemplo n.º 18
0
def import_repository_archive( trans, repository, repository_archive_dict ):
    """Import a repository archive contained within a repository capsule."""
    archive_file_name = repository_archive_dict.get( 'archive_file_name', None )
    capsule_file_name = repository_archive_dict[ 'capsule_file_name' ]
    encoded_file_path = repository_archive_dict[ 'encoded_file_path' ]
    file_path = encoding_util.tool_shed_decode( encoded_file_path )
    results_dict = dict( ok=True, error_message='' )
    archive_file_path = os.path.join( file_path, archive_file_name )
    archive = tarfile.open( archive_file_path, 'r:*' )
    repo_dir = repository.repo_path( trans.app )
    repo = hg.repository( suc.get_configured_ui(), repo_dir )
    undesirable_dirs_removed = 0
    undesirable_files_removed = 0
    ok, error_message = commit_util.check_archive( repository, archive )
    if ok:
        full_path = os.path.abspath( repo_dir )
        filenames_in_archive = []
        for tarinfo_obj in archive.getmembers():
            # Check files and directories in the archive.
            ok = os.path.basename( tarinfo_obj.name ) not in commit_util.UNDESIRABLE_FILES
            if ok:
                for file_path_item in tarinfo_obj.name.split( '/' ):
                    if file_path_item in commit_util.UNDESIRABLE_DIRS:
                        undesirable_dirs_removed += 1
                        error_message = 'Import failed: invalid file path <b>%s</b> in archive <b>%s</b>' % \
                            ( str( file_path_item ), str( archive_file_name ) )
                        results_dict[ 'ok' ] = False
                        results_dict[ 'error_message' ] += error_message
                        return results_dict
                filenames_in_archive.append( tarinfo_obj.name )
            else:
                undesirable_files_removed += 1
        # Extract the uploaded archive to the repository root.
        archive.extractall( path=full_path )
        archive.close()
        for filename in filenames_in_archive:
            uploaded_file_name = os.path.join( full_path, filename )
            if os.path.split( uploaded_file_name )[ -1 ] == suc.REPOSITORY_DEPENDENCY_DEFINITION_FILENAME:
                # Inspect the contents of the file to see if changeset_revision values are missing and if so, set them appropriately.
                altered, root_elem, error_message = commit_util.handle_repository_dependencies_definition( trans,
                                                                                                           uploaded_file_name,
                                                                                                           unpopulate=False )
                if error_message:
                    results_dict[ 'ok' ] = False
                    results_dict[ 'error_message' ] += error_message
                if altered:
                    tmp_filename = xml_util.create_and_write_tmp_file( root_elem )
                    shutil.move( tmp_filename, uploaded_file_name )
            elif os.path.split( uploaded_file_name )[ -1 ] == suc.TOOL_DEPENDENCY_DEFINITION_FILENAME:
                # Inspect the contents of the file to see if changeset_revision values are missing and if so, set them appropriately.
                altered, root_elem, error_message = commit_util.handle_tool_dependencies_definition( trans, uploaded_file_name )
                if error_message:
                    results_dict[ 'ok' ] = False
                    results_dict[ 'error_message' ] += error_message
                if altered:
                    tmp_filename = xml_util.create_and_write_tmp_file( root_elem )
                    shutil.move( tmp_filename, uploaded_file_name )
        commit_message = 'Imported from capsule %s' % str( capsule_file_name )
        # Send email notification to those that have registered to receive alerts for new repositories in this Tool Shed.
        new_repo_alert = True
        # Since the repository is new, the following must be False.
        remove_repo_files_not_in_tar = False
        ok, error_message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed = \
            commit_util.handle_directory_changes( trans,
                                                  repository,
                                                  full_path,
                                                  filenames_in_archive,
                                                  remove_repo_files_not_in_tar,
                                                  new_repo_alert,
                                                  commit_message,
                                                  undesirable_dirs_removed,
                                                  undesirable_files_removed )
        try:
            metadata_util.set_repository_metadata_due_to_new_tip( trans, repository, content_alert_str=content_alert_str )
        except Exception, e:
            log.debug( "Error setting metadata on repository %s created from imported archive %s: %s" % \
                ( str( repository.name ), str( archive_file_name ), str( e ) ) )
        results_dict[ 'ok' ] = ok
        results_dict[ 'error_message' ] += error_message
Exemplo n.º 19
0
    def install( self, trans, **kwd ):
        """
        POST /api/tool_shed_repositories/install
        Initiate the installation of a repository.

        :param install_resolver_dependencies: True to install resolvable dependencies.
        :param install_tool_dependencies: True to install tool dependencies.
        :param install_repository_dependencies: True to install repository dependencies.
        :param tool_panel_section_id: The unique identifier for an existing tool panel section
        :param new_tool_panel_section_label: Create a new tool panel section with this label
        :param shed_tool_conf: The shed tool config file to use for this installation
        :param tool_shed_url: The URL for the toolshed whence this repository is being installed
        :param changeset: The changeset to update to after cloning the repository
        """
        irm = InstallRepositoryManager( self.app )
        tool_shed_url = kwd.get( 'tool_shed_url', None )
        repositories = json.loads( kwd.get( 'repositories', '[]' ) )
        repo_info_dict = self.__get_repo_info_dict( trans, repositories, tool_shed_url )
        includes_tools = False
        includes_tools_for_display_in_tool_panel = False
        has_repository_dependencies = False
        includes_tool_dependencies = False
        install_resolver_dependencies = util.asbool( kwd.get( 'install_resolver_dependencies', False ) )
        for encoded_repo_info_dict in repo_info_dict.get( 'repo_info_dicts', [] ):
            decoded_repo_info_dict = encoding_util.tool_shed_decode( encoded_repo_info_dict )
            if not includes_tools:
                includes_tools = util.string_as_bool( decoded_repo_info_dict.get( 'includes_tools', False ) )
            if not includes_tools_for_display_in_tool_panel:
                includes_tools_for_display_in_tool_panel = \
                    util.string_as_bool( decoded_repo_info_dict.get( 'includes_tools_for_display_in_tool_panel', False ) )
            if not has_repository_dependencies:
                has_repository_dependencies = util.string_as_bool( repo_info_dict.get( 'has_repository_dependencies', False ) )
            if not includes_tool_dependencies:
                includes_tool_dependencies = util.string_as_bool( repo_info_dict.get( 'includes_tool_dependencies', False ) )
        encoded_repo_info_dicts = util.listify( repo_info_dict.get( 'repo_info_dicts', [] ) )
        repo_info_dicts = [ encoding_util.tool_shed_decode( encoded_repo_info_dict ) for encoded_repo_info_dict in encoded_repo_info_dicts ]
        tool_panel_section_id = kwd.get( 'tool_panel_section_id', None )
        new_tool_panel_section_label = kwd.get( 'new_tool_panel_section', None )
        tool_panel_section_mapping = json.loads( kwd.get( 'tool_panel_section', '{}' ) )
        install_tool_dependencies = util.asbool( kwd.get( 'install_tool_dependencies', False ) )
        install_repository_dependencies = util.asbool( kwd.get( 'install_repository_dependencies', False ) )
        shed_tool_conf = kwd.get( 'shed_tool_conf', None )
        tool_path = suc.get_tool_path_by_shed_tool_conf_filename( self.app, shed_tool_conf )
        installation_dict = dict( install_repository_dependencies=install_repository_dependencies,
                                  new_tool_panel_section_label=new_tool_panel_section_label,
                                  no_changes_checked=False,
                                  repo_info_dicts=repo_info_dicts,
                                  tool_panel_section_id=tool_panel_section_id,
                                  tool_path=tool_path,
                                  tool_shed_url=tool_shed_url )
        new_repositories, tool_panel_keys, repo_info_dicts, filtered_repos = irm.handle_tool_shed_repositories( installation_dict )
        if new_repositories:
            installation_dict = dict( created_or_updated_tool_shed_repositories=new_repositories,
                                      filtered_repo_info_dicts=filtered_repos,
                                      has_repository_dependencies=has_repository_dependencies,
                                      includes_tool_dependencies=includes_tool_dependencies,
                                      includes_tools=includes_tools,
                                      includes_tools_for_display_in_tool_panel=includes_tools_for_display_in_tool_panel,
                                      install_repository_dependencies=install_repository_dependencies,
                                      install_tool_dependencies=install_tool_dependencies,
                                      message='',
                                      new_tool_panel_section_label=new_tool_panel_section_label,
                                      tool_panel_section_mapping=tool_panel_section_mapping,
                                      install_resolver_dependencies=install_resolver_dependencies,
                                      shed_tool_conf=shed_tool_conf,
                                      status='ok',
                                      tool_panel_section_id=tool_panel_section_id,
                                      tool_panel_section_keys=tool_panel_keys,
                                      tool_path=tool_path,
                                      tool_shed_url=tool_shed_url )
            encoded_kwd, query, tool_shed_repositories, encoded_repository_ids = \
                irm.initiate_repository_installation( installation_dict )
            return json.dumps( dict( operation='install',
                                     api=True,
                                     install_resolver_dependencies=install_resolver_dependencies,
                                     install_tool_dependencies=install_tool_dependencies,
                                     encoded_kwd=encoded_kwd,
                                     reinstalling=False,
                                     tool_shed_repository_ids=json.dumps( [ repo[0] for repo in repositories ] ),
                                     repositories=[ trans.security.encode_id( repo.id ) for repo in new_repositories ] ) )
Exemplo n.º 20
0
class RepositoriesController(BaseAPIController):
    """RESTful controller for interactions with repositories in the Tool Shed."""
    @web.expose_api
    def add_repository_registry_entry(self, trans, payload, **kwd):
        """
        POST /api/repositories/add_repository_registry_entry
        Adds appropriate entries to the repository registry for the repository defined by the received name and owner.

        :param key: the user's API key
        
        The following parameters are included in the payload.
        :param tool_shed_url (required): the base URL of the Tool Shed containing the Repository
        :param name (required): the name of the Repository
        :param owner (required): the owner of the Repository
        """
        response_dict = {}
        if not trans.user_is_admin():
            response_dict['status'] = 'error'
            response_dict[
                'message'] = "You are not authorized to add entries to this Tool Shed's repository registry."
            return response_dict
        tool_shed_url = payload.get('tool_shed_url', '')
        if not tool_shed_url:
            raise HTTPBadRequest(
                detail="Missing required parameter 'tool_shed_url'.")
        tool_shed_url = tool_shed_url.rstrip('/')
        name = payload.get('name', '')
        if not name:
            raise HTTPBadRequest(detail="Missing required parameter 'name'.")
        owner = payload.get('owner', '')
        if not owner:
            raise HTTPBadRequest(detail="Missing required parameter 'owner'.")
        repository = suc.get_repository_by_name_and_owner(
            trans.app, name, owner)
        if repository is None:
            error_message = 'Cannot locate repository with name %s and owner %s,' % (
                str(name), str(owner))
            log.debug(error_message)
            response_dict['status'] = 'error'
            response_dict['message'] = error_message
            return response_dict
        # Update the repository registry.
        trans.app.repository_registry.add_entry(repository)
        response_dict['status'] = 'ok'
        response_dict[ 'message' ] = 'Entries for repository %s owned by %s have been added to the Tool Shed repository registry.' \
            % ( name, owner )
        return response_dict

    @web.expose_api_anonymous
    def get_ordered_installable_revisions(self, trans, name, owner, **kwd):
        """
        GET /api/repositories/get_ordered_installable_revisions

        :param name: the name of the Repository
        :param owner: the owner of the Repository

        Returns the ordered list of changeset revision hash strings that are associated with installable revisions.
        As in the changelog, the list is ordered oldest to newest.
        """
        # Example URL: http://localhost:9009/api/repositories/get_installable_revisions?name=add_column&owner=test
        if name and owner:
            # Get the repository information.
            repository = suc.get_repository_by_name_and_owner(
                trans.app, name, owner)
            if repository is None:
                error_message = "Error in the Tool Shed repositories API in get_ordered_installable_revisions: "
                error_message += "cannot locate repository %s owned by %s." % (
                    str(name), str(owner))
                log.debug(error_message)
                return []
            repo = hg_util.get_repo_for_repository(trans.app,
                                                   repository=repository,
                                                   repo_path=None,
                                                   create=False)
            ordered_installable_revisions = suc.get_ordered_metadata_changeset_revisions(
                repository, repo, downloadable=True)
            return ordered_installable_revisions
        else:
            error_message = "Error in the Tool Shed repositories API in get_ordered_installable_revisions: "
            error_message += "invalid name %s or owner %s received." % (
                str(name), str(owner))
            log.debug(error_message)
            return []

    @web.expose_api_anonymous
    def get_repository_revision_install_info(self, trans, name, owner,
                                             changeset_revision, **kwd):
        """
        GET /api/repositories/get_repository_revision_install_info

        :param name: the name of the Repository
        :param owner: the owner of the Repository
        :param changeset_revision: the changeset_revision of the RepositoryMetadata object associated with the Repository

        Returns a list of the following dictionaries::
        - a dictionary defining the Repository.  For example:
        {
            "deleted": false,
            "deprecated": false,
            "description": "add_column hello",
            "id": "f9cad7b01a472135",
            "long_description": "add_column hello",
            "name": "add_column",
            "owner": "test",
            "private": false,
            "times_downloaded": 6,
            "url": "/api/repositories/f9cad7b01a472135",
            "user_id": "f9cad7b01a472135"
        }
        - a dictionary defining the Repository revision (RepositoryMetadata).  For example:
        {
            "changeset_revision": "3a08cc21466f",
            "downloadable": true,
            "has_repository_dependencies": false,
            "has_repository_dependencies_only_if_compiling_contained_td": false,
            "id": "f9cad7b01a472135",
            "includes_datatypes": false,
            "includes_tool_dependencies": false,
            "includes_tools": true,
            "includes_tools_for_display_in_tool_panel": true,
            "includes_workflows": false,
            "malicious": false,
            "repository_id": "f9cad7b01a472135",
            "url": "/api/repository_revisions/f9cad7b01a472135"
        }
        - a dictionary including the additional information required to install the repository.  For example:
        {
            "add_column": [
                "add_column hello",
                "http://test@localhost:9009/repos/test/add_column",
                "3a08cc21466f",
                "1",
                "test",
                {},
                {}
            ]
        }
        """
        # Example URL:
        # http://<xyz>/api/repositories/get_repository_revision_install_info?name=<n>&owner=<o>&changeset_revision=<cr>
        if name and owner and changeset_revision:
            # Get the repository information.
            repository = suc.get_repository_by_name_and_owner(
                trans.app, name, owner)
            if repository is None:
                log.debug('Cannot locate repository %s owned by %s' %
                          (str(name), str(owner)))
                return {}, {}, {}
            encoded_repository_id = trans.security.encode_id(repository.id)
            repository_dict = repository.to_dict(
                view='element', value_mapper=self.__get_value_mapper(trans))
            repository_dict['url'] = web.url_for(controller='repositories',
                                                 action='show',
                                                 id=encoded_repository_id)
            # Get the repository_metadata information.
            repository_metadata = suc.get_repository_metadata_by_changeset_revision(
                trans.app, encoded_repository_id, changeset_revision)
            if repository_metadata is None:
                # The changeset_revision column in the repository_metadata table has been updated with a new
                # value value, so find the changeset_revision to which we need to update.
                repo = hg_util.get_repo_for_repository(trans.app,
                                                       repository=repository,
                                                       repo_path=None,
                                                       create=False)
                new_changeset_revision = suc.get_next_downloadable_changeset_revision(
                    repository, repo, changeset_revision)
                repository_metadata = suc.get_repository_metadata_by_changeset_revision(
                    trans.app, encoded_repository_id, new_changeset_revision)
                changeset_revision = new_changeset_revision
            if repository_metadata is not None:
                encoded_repository_metadata_id = trans.security.encode_id(
                    repository_metadata.id)
                repository_metadata_dict = repository_metadata.to_dict(
                    view='collection',
                    value_mapper=self.__get_value_mapper(trans))
                repository_metadata_dict['url'] = web.url_for(
                    controller='repository_revisions',
                    action='show',
                    id=encoded_repository_metadata_id)
                # Get the repo_info_dict for installing the repository.
                repo_info_dict, \
                includes_tools, \
                includes_tool_dependencies, \
                includes_tools_for_display_in_tool_panel, \
                has_repository_dependencies, \
                has_repository_dependencies_only_if_compiling_contained_td = \
                    repository_util.get_repo_info_dict( trans.app,
                                                        trans.user,
                                                        encoded_repository_id,
                                                        changeset_revision )
                return repository_dict, repository_metadata_dict, repo_info_dict
            else:
                log.debug( "Unable to locate repository_metadata record for repository id %s and changeset_revision %s" % \
                    ( str( repository.id ), str( changeset_revision ) ) )
                return repository_dict, {}, {}
        else:
            debug_msg = "Error in the Tool Shed repositories API in get_repository_revision_install_info: "
            debug_msg += "Invalid name %s or owner %s or changeset_revision %s received." % \
                ( str( name ), str( owner ), str( changeset_revision ) )
            log.debug(debug_msg)
            return {}, {}, {}

    def __get_value_mapper(self, trans):
        value_mapper = {
            'id': trans.security.encode_id,
            'repository_id': trans.security.encode_id,
            'user_id': trans.security.encode_id
        }
        return value_mapper

    @web.expose_api
    def import_capsule(self, trans, payload, **kwd):
        """
        POST /api/repositories/new/import_capsule
        Import a repository capsule into the Tool Shed.

        :param key: the user's API key

        The following parameters are included in the payload.
        :param tool_shed_url (required): the base URL of the Tool Shed into which the capsule should be imported.
        :param capsule_file_name (required): the name of the capsule file.
        """
        # Get the information about the capsule to be imported from the payload.
        tool_shed_url = payload.get('tool_shed_url', '')
        if not tool_shed_url:
            raise HTTPBadRequest(
                detail="Missing required parameter 'tool_shed_url'.")
        capsule_file_name = payload.get('capsule_file_name', '')
        if not capsule_file_name:
            raise HTTPBadRequest(
                detail="Missing required parameter 'capsule_file_name'.")
        capsule_file_path = os.path.abspath(capsule_file_name)
        capsule_dict = dict(error_message='',
                            encoded_file_path=None,
                            status='ok',
                            tar_archive=None,
                            uploaded_file=None,
                            capsule_file_name=None)
        if os.path.getsize(os.path.abspath(capsule_file_name)) == 0:
            log.debug('Your capsule file %s is empty.' %
                      str(capsule_file_name))
            return {}
        try:
            # Open for reading with transparent compression.
            tar_archive = tarfile.open(capsule_file_path, 'r:*')
        except tarfile.ReadError, e:
            log.debug('Error opening capsule file %s: %s' %
                      (str(capsule_file_name), str(e)))
            return {}
        irm = capsule_manager.ImportRepositoryManager(trans.app,
                                                      trans.request.host,
                                                      trans.user,
                                                      trans.user_is_admin())
        capsule_dict['tar_archive'] = tar_archive
        capsule_dict['capsule_file_name'] = capsule_file_name
        capsule_dict = irm.extract_capsule_files(**capsule_dict)
        capsule_dict = irm.validate_capsule(**capsule_dict)
        status = capsule_dict.get('status', 'error')
        if status == 'error':
            log.debug( 'The capsule contents are invalid and cannot be imported:<br/>%s' % \
                str( capsule_dict.get( 'error_message', '' ) ) )
            return {}
        encoded_file_path = capsule_dict.get('encoded_file_path', None)
        if encoded_file_path is None:
            log.debug(
                'The capsule_dict %s is missing the required encoded_file_path entry.'
                % str(capsule_dict))
            return {}
        file_path = encoding_util.tool_shed_decode(encoded_file_path)
        export_info_file_path = os.path.join(file_path, 'export_info.xml')
        export_info_dict = irm.get_export_info_dict(export_info_file_path)
        manifest_file_path = os.path.join(file_path, 'manifest.xml')
        # The manifest.xml file has already been validated, so no error_message should be returned here.
        repository_info_dicts, error_message = irm.get_repository_info_from_manifest(
            manifest_file_path)
        # Determine the status for each exported repository archive contained within the capsule.
        repository_status_info_dicts = irm.get_repository_status_from_tool_shed(
            repository_info_dicts)
        # Generate a list of repository name / import results message tuples for display after the capsule is imported.
        import_results_tups = []
        # Only create repositories that do not yet exist and that the current user is authorized to create.  The
        # status will be None for repositories that fall into the intersection of these 2 categories.
        for repository_status_info_dict in repository_status_info_dicts:
            # Add the capsule_file_name and encoded_file_path to the repository_status_info_dict.
            repository_status_info_dict[
                'capsule_file_name'] = capsule_file_name
            repository_status_info_dict[
                'encoded_file_path'] = encoded_file_path
            import_results_tups = irm.create_repository_and_import_archive(
                repository_status_info_dict, import_results_tups)
        irm.check_status_and_reset_downloadable(import_results_tups)
        basic_util.remove_dir(file_path)
        # NOTE: the order of installation is defined in import_results_tups, but order will be lost
        # when transferred to return_dict.
        return_dict = {}
        for import_results_tup in import_results_tups:
            ok, name_owner, message = import_results_tup
            name, owner = name_owner
            key = 'Archive of repository "%s" owned by "%s"' % (str(name),
                                                                str(owner))
            val = message.replace('<b>', '"').replace('</b>', '"')
            return_dict[key] = val
        return return_dict
Exemplo n.º 21
0
                  changeset_revision=changeset_revision)
    pathspec = ['repository', 'get_repository_dependencies']
    try:
        raw_text = tool_shed_get(app,
                                 tool_shed_url,
                                 pathspec=pathspec,
                                 params=params)
        tool_shed_accessible = True
    except Exception, e:
        tool_shed_accessible = False
        print "The URL\n%s\nraised the exception:\n%s\n" % (url_join(
            tool_shed_url, pathspec=pathspec, params=params), str(e))
    if tool_shed_accessible:
        if len(raw_text) > 2:
            encoded_text = json.loads(raw_text)
            repository_dependencies_dict = encoding_util.tool_shed_decode(
                encoded_text)
    return tool_shed_accessible, repository_dependencies_dict


def get_protocol_from_tool_shed_url(tool_shed_url):
    """Return the protocol from the received tool_shed_url if it exists."""
    try:
        if tool_shed_url.find('://') > 0:
            return tool_shed_url.split('://')[0].lower()
    except Exception, e:
        # We receive a lot of calls here where the tool_shed_url is None.  The container_util uses
        # that value when creating a header row.  If the tool_shed_url is not None, we have a problem.
        if tool_shed_url is not None:
            log.exception(
                "Handled exception getting the protocol from Tool Shed URL %s:\n%s"
                % (str(tool_shed_url), str(e)))
Exemplo n.º 22
0
def get_repository_dependencies( app, tool_shed_url, repository_name, repository_owner, changeset_revision ):
    repository_dependencies_dict = {}
    tool_shed_accessible = True
    params = dict( name=repository_name, owner=repository_owner, changeset_revision=changeset_revision )
    pathspec = [ 'repository', 'get_repository_dependencies' ]
    try:
        raw_text = tool_shed_get( app, tool_shed_url, pathspec=pathspec, params=params )
        tool_shed_accessible = True
    except Exception, e:
        tool_shed_accessible = False
        log.warn( "The URL\n%s\nraised the exception:\n%s\n", url_join( tool_shed_url, pathspec=pathspec, params=params ), e )
    if tool_shed_accessible:
        if len( raw_text ) > 2:
            encoded_text = json.loads( raw_text )
            repository_dependencies_dict = encoding_util.tool_shed_decode( encoded_text )
    return tool_shed_accessible, repository_dependencies_dict


def get_protocol_from_tool_shed_url( tool_shed_url ):
    """Return the protocol from the received tool_shed_url if it exists."""
    try:
        if tool_shed_url.find( '://' ) > 0:
            return tool_shed_url.split( '://' )[0].lower()
    except Exception, e:
        # We receive a lot of calls here where the tool_shed_url is None.  The container_util uses
        # that value when creating a header row.  If the tool_shed_url is not None, we have a problem.
        if tool_shed_url is not None:
            log.exception( "Handled exception getting the protocol from Tool Shed URL %s:\n%s", str( tool_shed_url ), e )
        # Default to HTTP protocol.
        return 'http'
 def get_required_repo_info_dicts( self, tool_shed_url, repo_info_dicts ):
     """
     Inspect the list of repo_info_dicts for repository dependencies and append a repo_info_dict for each of
     them to the list.  All repository_dependency entries in each of the received repo_info_dicts includes
     all required repositories, so only one pass through this method is required to retrieve all repository
     dependencies.
     """
     all_required_repo_info_dict = {}
     all_repo_info_dicts = []
     if repo_info_dicts:
         # We'll send tuples of ( tool_shed, repository_name, repository_owner, changeset_revision ) to the tool
         # shed to discover repository ids.
         required_repository_tups = []
         for repo_info_dict in repo_info_dicts:
             if repo_info_dict not in all_repo_info_dicts:
                 all_repo_info_dicts.append( repo_info_dict )
             for repository_name, repo_info_tup in repo_info_dict.items():
                 description, \
                     repository_clone_url, \
                     changeset_revision, \
                     ctx_rev, \
                     repository_owner, \
                     repository_dependencies, \
                     tool_dependencies = \
                     suc.get_repo_info_tuple_contents( repo_info_tup )
                 if repository_dependencies:
                     for key, val in repository_dependencies.items():
                         if key in [ 'root_key', 'description' ]:
                             continue
                         repository_components_tuple = container_util.get_components_from_key( key )
                         components_list = suc.extract_components_from_tuple( repository_components_tuple )
                         # Skip listing a repository dependency if it is required only to compile a tool dependency
                         # defined for the dependent repository since in this case, the repository dependency is really
                         # a dependency of the dependent repository's contained tool dependency, and only if that
                         # tool dependency requires compilation.
                         # For backward compatibility to the 12/20/12 Galaxy release.
                         prior_installation_required = 'False'
                         only_if_compiling_contained_td = 'False'
                         if len( components_list ) == 4:
                             prior_installation_required = 'False'
                             only_if_compiling_contained_td = 'False'
                         elif len( components_list ) == 5:
                             prior_installation_required = components_list[ 4 ]
                             only_if_compiling_contained_td = 'False'
                         if not asbool( only_if_compiling_contained_td ):
                             if components_list not in required_repository_tups:
                                 required_repository_tups.append( components_list )
                         for components_list in val:
                             try:
                                 only_if_compiling_contained_td = components_list[ 5 ]
                             except:
                                 only_if_compiling_contained_td = 'False'
                             # Skip listing a repository dependency if it is required only to compile a tool dependency
                             # defined for the dependent repository (see above comment).
                             if not asbool( only_if_compiling_contained_td ):
                                 if components_list not in required_repository_tups:
                                     required_repository_tups.append( components_list )
                 else:
                     # We have a single repository with no dependencies.
                     components_list = [ tool_shed_url, repository_name, repository_owner, changeset_revision ]
                     required_repository_tups.append( components_list )
             if required_repository_tups:
                 # The value of required_repository_tups is a list of tuples, so we need to encode it.
                 encoded_required_repository_tups = []
                 for required_repository_tup in required_repository_tups:
                     # Convert every item in required_repository_tup to a string.
                     required_repository_tup = [ str( item ) for item in required_repository_tup ]
                     encoded_required_repository_tups.append( encoding_util.encoding_sep.join( required_repository_tup ) )
                 encoded_required_repository_str = encoding_util.encoding_sep2.join( encoded_required_repository_tups )
                 encoded_required_repository_str = encoding_util.tool_shed_encode( encoded_required_repository_str )
                 if suc.is_tool_shed_client( self.app ):
                     # Handle secure / insecure Tool Shed URL protocol changes and port changes.
                     tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, tool_shed_url )
                 url = common_util.url_join( tool_shed_url, '/repository/get_required_repo_info_dict' )
                 # Fix for handling 307 redirect not being handled nicely by urllib2.urlopen when the urllib2.Request has data provided
                 url = urllib2.urlopen( urllib2.Request( url ) ).geturl()
                 request = urllib2.Request( url, data=urllib.urlencode( dict( encoded_str=encoded_required_repository_str ) ) )
                 response = urllib2.urlopen( request ).read()
                 if response:
                     try:
                         required_repo_info_dict = json.loads( response )
                     except Exception, e:
                         log.exception( e )
                         return all_repo_info_dicts
                     required_repo_info_dicts = []
                     for k, v in required_repo_info_dict.items():
                         if k == 'repo_info_dicts':
                             encoded_dict_strings = required_repo_info_dict[ 'repo_info_dicts' ]
                             for encoded_dict_str in encoded_dict_strings:
                                 decoded_dict = encoding_util.tool_shed_decode( encoded_dict_str )
                                 required_repo_info_dicts.append( decoded_dict )
                         else:
                             if k not in all_required_repo_info_dict:
                                 all_required_repo_info_dict[ k ] = v
                             else:
                                 if v and not all_required_repo_info_dict[ k ]:
                                     all_required_repo_info_dict[ k ] = v
                         if required_repo_info_dicts:
                             for required_repo_info_dict in required_repo_info_dicts:
                                 # Each required_repo_info_dict has a single entry, and all_repo_info_dicts is a list
                                 # of dictionaries, each of which has a single entry.  We'll check keys here rather than
                                 # the entire dictionary because a dictionary entry in all_repo_info_dicts will include
                                 # lists of discovered repository dependencies, but these lists will be empty in the
                                 # required_repo_info_dict since dependency discovery has not yet been performed for these
                                 # dictionaries.
                                 required_repo_info_dict_key = required_repo_info_dict.keys()[ 0 ]
                                 all_repo_info_dicts_keys = [ d.keys()[ 0 ] for d in all_repo_info_dicts ]
                                 if required_repo_info_dict_key not in all_repo_info_dicts_keys:
                                     all_repo_info_dicts.append( required_repo_info_dict )
                     all_required_repo_info_dict[ 'all_repo_info_dicts' ] = all_repo_info_dicts
Exemplo n.º 24
0
def generate_workflow_image(trans, workflow_name, repository_metadata_id=None, repository_id=None):
    """
    Return an svg image representation of a workflow dictionary created when the workflow was exported.  This method is called
    from both Galaxy and the tool shed.  When called from the tool shed, repository_metadata_id will have a value and repository_id
    will be None.  When called from Galaxy, repository_metadata_id will be None and repository_id will have a value.
    """
    workflow_name = encoding_util.tool_shed_decode(workflow_name)
    if trans.webapp.name == "tool_shed":
        # We're in the tool shed.
        repository_metadata = metadata_util.get_repository_metadata_by_id(trans, repository_metadata_id)
        repository_id = trans.security.encode_id(repository_metadata.repository_id)
        changeset_revision = repository_metadata.changeset_revision
        metadata = repository_metadata.metadata
    else:
        # We're in Galaxy.
        repository = suc.get_tool_shed_repository_by_id(trans, repository_id)
        changeset_revision = repository.changeset_revision
        metadata = repository.metadata
    # metadata[ 'workflows' ] is a list of tuples where each contained tuple is
    # [ <relative path to the .ga file in the repository>, <exported workflow dict> ]
    for workflow_tup in metadata["workflows"]:
        workflow_dict = workflow_tup[1]
        if workflow_dict["name"] == workflow_name:
            break
    if "tools" in metadata:
        tools_metadata = metadata["tools"]
    else:
        tools_metadata = []
    workflow, missing_tool_tups = get_workflow_from_dict(
        trans=trans,
        workflow_dict=workflow_dict,
        tools_metadata=tools_metadata,
        repository_id=repository_id,
        changeset_revision=changeset_revision,
    )
    data = []
    canvas = svgfig.canvas(style="stroke:black; fill:none; stroke-width:1px; stroke-linejoin:round; text-anchor:left")
    text = svgfig.SVG("g")
    connectors = svgfig.SVG("g")
    boxes = svgfig.SVG("g")
    svgfig.Text.defaults["font-size"] = "10px"
    in_pos = {}
    out_pos = {}
    margin = 5
    # Spacing between input/outputs.
    line_px = 16
    # Store px width for boxes of each step.
    widths = {}
    max_width, max_x, max_y = 0, 0, 0
    for step in workflow.steps:
        step.upgrade_messages = {}
        module = module_factory.from_workflow_step(trans, repository_id, changeset_revision, tools_metadata, step)
        tool_errors = module.type == "tool" and not module.tool
        module_data_inputs = get_workflow_data_inputs(step, module)
        module_data_outputs = get_workflow_data_outputs(step, module, workflow.steps)
        step_dict = {
            "id": step.order_index,
            "data_inputs": module_data_inputs,
            "data_outputs": module_data_outputs,
            "position": step.position,
            "tool_errors": tool_errors,
        }
        input_conn_dict = {}
        for conn in step.input_connections:
            input_conn_dict[conn.input_name] = dict(id=conn.output_step.order_index, output_name=conn.output_name)
        step_dict["input_connections"] = input_conn_dict
        data.append(step_dict)
        x, y = step.position["left"], step.position["top"]
        count = 0
        module_name = get_workflow_module_name(module, missing_tool_tups)
        max_len = len(module_name) * 1.5
        text.append(svgfig.Text(x, y + 20, module_name, **{"font-size": "14px"}).SVG())
        y += 45
        for di in module_data_inputs:
            cur_y = y + count * line_px
            if step.order_index not in in_pos:
                in_pos[step.order_index] = {}
            in_pos[step.order_index][di["name"]] = (x, cur_y)
            text.append(svgfig.Text(x, cur_y, di["label"]).SVG())
            count += 1
            max_len = max(max_len, len(di["label"]))
        if len(module.get_data_inputs()) > 0:
            y += 15
        for do in module_data_outputs:
            cur_y = y + count * line_px
            if step.order_index not in out_pos:
                out_pos[step.order_index] = {}
            out_pos[step.order_index][do["name"]] = (x, cur_y)
            text.append(svgfig.Text(x, cur_y, do["name"]).SVG())
            count += 1
            max_len = max(max_len, len(do["name"]))
        widths[step.order_index] = max_len * 5.5
        max_x = max(max_x, step.position["left"])
        max_y = max(max_y, step.position["top"])
        max_width = max(max_width, widths[step.order_index])
    for step_dict in data:
        tool_unavailable = step_dict["tool_errors"]
        width = widths[step_dict["id"]]
        x, y = step_dict["position"]["left"], step_dict["position"]["top"]
        # Only highlight missing tools if displaying in the tool shed.
        if trans.webapp.name == "tool_shed" and tool_unavailable:
            fill = "#EBBCB2"
        else:
            fill = "#EBD9B2"
        boxes.append(svgfig.Rect(x - margin, y, x + width - margin, y + 30, fill=fill).SVG())
        box_height = (len(step_dict["data_inputs"]) + len(step_dict["data_outputs"])) * line_px + margin
        # Draw separator line.
        if len(step_dict["data_inputs"]) > 0:
            box_height += 15
            sep_y = y + len(step_dict["data_inputs"]) * line_px + 40
            text.append(svgfig.Line(x - margin, sep_y, x + width - margin, sep_y).SVG())
        # Define an input/output box.
        boxes.append(svgfig.Rect(x - margin, y + 30, x + width - margin, y + 30 + box_height, fill="#ffffff").SVG())
        for conn, output_dict in step_dict["input_connections"].iteritems():
            in_coords = in_pos[step_dict["id"]][conn]
            # out_pos_index will be a step number like 1, 2, 3...
            out_pos_index = output_dict["id"]
            # out_pos_name will be a string like 'o', 'o2', etc.
            out_pos_name = output_dict["output_name"]
            if out_pos_index in out_pos:
                # out_conn_index_dict will be something like:
                # 7: {'o': (824.5, 618)}
                out_conn_index_dict = out_pos[out_pos_index]
                if out_pos_name in out_conn_index_dict:
                    out_conn_pos = out_pos[out_pos_index][out_pos_name]
                else:
                    # Take any key / value pair available in out_conn_index_dict.
                    # A problem will result if the dictionary is empty.
                    if out_conn_index_dict.keys():
                        key = out_conn_index_dict.keys()[0]
                        out_conn_pos = out_pos[out_pos_index][key]
            adjusted = (out_conn_pos[0] + widths[output_dict["id"]], out_conn_pos[1])
            text.append(
                svgfig.SVG(
                    "circle",
                    cx=out_conn_pos[0] + widths[output_dict["id"]] - margin,
                    cy=out_conn_pos[1] - margin,
                    r=5,
                    fill="#ffffff",
                )
            )
            connectors.append(
                svgfig.Line(adjusted[0], adjusted[1] - margin, in_coords[0] - 10, in_coords[1], arrow_end="true").SVG()
            )
    canvas.append(connectors)
    canvas.append(boxes)
    canvas.append(text)
    width, height = (max_x + max_width + 50), max_y + 300
    canvas["width"] = "%s px" % width
    canvas["height"] = "%s px" % height
    canvas["viewBox"] = "0 0 %s %s" % (width, height)
    trans.response.set_content_type("image/svg+xml")
    return canvas.standalone_xml()
def get_required_repo_info_dicts( trans, tool_shed_url, repo_info_dicts ):
    """
    Inspect the list of repo_info_dicts for repository dependencies and append a repo_info_dict for each of them to the list.  All
    repository_dependencies entries in each of the received repo_info_dicts includes all required repositories, so only one pass through
    this method is required to retrieve all repository dependencies.
    """
    all_required_repo_info_dict = {}
    all_repo_info_dicts = []
    if repo_info_dicts:
        # We'll send tuples of ( tool_shed, repository_name, repository_owner, changeset_revision ) to the tool shed to discover repository ids.
        required_repository_tups = []
        for repo_info_dict in repo_info_dicts:
            if repo_info_dict not in all_repo_info_dicts:
                all_repo_info_dicts.append( repo_info_dict )
            for repository_name, repo_info_tup in repo_info_dict.items():
                description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
                    suc.get_repo_info_tuple_contents( repo_info_tup )
                if repository_dependencies:
                    for key, val in repository_dependencies.items():
                        if key in [ 'root_key', 'description' ]:
                            continue
                        repository_components_tuple = container_util.get_components_from_key( key )
                        components_list = suc.extract_components_from_tuple( repository_components_tuple )
                        # Skip listing a repository dependency if it is required only to compile a tool dependency defined for the dependent repository since
                        # in this case, the repository dependency is really a dependency of the dependent repository's contained tool dependency, and only if
                        # that tool dependency requires compilation.
                        # For backward compatibility to the 12/20/12 Galaxy release.
                        prior_installation_required = 'False'
                        only_if_compiling_contained_td = 'False'
                        if len( components_list ) == 4:
                            prior_installation_required = 'False'
                            only_if_compiling_contained_td = 'False'
                        elif len( components_list ) == 5:
                            prior_installation_required = components_list[ 4 ]
                            only_if_compiling_contained_td = 'False'
                        if not util.asbool( only_if_compiling_contained_td ):
                            if components_list not in required_repository_tups:
                                required_repository_tups.append( components_list )
                        for components_list in val:
                            try:
                                only_if_compiling_contained_td = components_list[ 5 ]
                            except:
                                only_if_compiling_contained_td = 'False'
                            # Skip listing a repository dependency if it is required only to compile a tool dependency defined for the dependent repository
                            # (see above comment).
                            if not util.asbool( only_if_compiling_contained_td ):
                                if components_list not in required_repository_tups:
                                    required_repository_tups.append( components_list )
                else:
                    # We have a single repository with no dependencies.
                    components_list = [ tool_shed_url, repository_name, repository_owner, changeset_revision ]
                    required_repository_tups.append( components_list )
            if required_repository_tups:
                # The value of required_repository_tups is a list of tuples, so we need to encode it.
                encoded_required_repository_tups = []
                for required_repository_tup in required_repository_tups:
                    # Convert every item in required_repository_tup to a string.
                    required_repository_tup = [ str( item ) for item in required_repository_tup ]
                    encoded_required_repository_tups.append( encoding_util.encoding_sep.join( required_repository_tup ) )
                encoded_required_repository_str = encoding_util.encoding_sep2.join( encoded_required_repository_tups )
                encoded_required_repository_str = encoding_util.tool_shed_encode( encoded_required_repository_str )
                url = suc.url_join( tool_shed_url, '/repository/get_required_repo_info_dict' )
                request = urllib2.Request( url, data=urllib.urlencode( dict( encoded_str=encoded_required_repository_str ) ) )
                response = urllib2.urlopen( request ).read()
                if response:
                    try:
                        required_repo_info_dict = json.from_json_string( response )
                    except Exception, e:
                        log.exception( e )
                        return all_repo_info_dicts
                    required_repo_info_dicts = []
                    for k, v in required_repo_info_dict.items():
                        if k == 'repo_info_dicts':
                            encoded_dict_strings = required_repo_info_dict[ 'repo_info_dicts' ]
                            for encoded_dict_str in encoded_dict_strings:
                                decoded_dict = encoding_util.tool_shed_decode( encoded_dict_str )
                                required_repo_info_dicts.append( decoded_dict )
                        else:
                            if k not in all_required_repo_info_dict:
                                all_required_repo_info_dict[ k ] = v
                            else:
                                if v and not all_required_repo_info_dict[ k ]:
                                    all_required_repo_info_dict[ k ] = v
                        if required_repo_info_dicts:
                            for required_repo_info_dict in required_repo_info_dicts:
                                if required_repo_info_dict not in all_repo_info_dicts:
                                    all_repo_info_dicts.append( required_repo_info_dict )
                    all_required_repo_info_dict[ 'all_repo_info_dicts' ] = all_repo_info_dicts
Exemplo n.º 26
0
 def get_required_repo_info_dicts( self, tool_shed_url, repo_info_dicts ):
     """
     Inspect the list of repo_info_dicts for repository dependencies and append a repo_info_dict for each of
     them to the list.  All repository_dependency entries in each of the received repo_info_dicts includes
     all required repositories, so only one pass through this method is required to retrieve all repository
     dependencies.
     """
     all_required_repo_info_dict = {}
     all_repo_info_dicts = []
     if repo_info_dicts:
         # We'll send tuples of ( tool_shed, repository_name, repository_owner, changeset_revision ) to the tool
         # shed to discover repository ids.
         required_repository_tups = []
         for repo_info_dict in repo_info_dicts:
             if repo_info_dict not in all_repo_info_dicts:
                 all_repo_info_dicts.append( repo_info_dict )
             for repository_name, repo_info_tup in repo_info_dict.items():
                 description, \
                     repository_clone_url, \
                     changeset_revision, \
                     ctx_rev, \
                     repository_owner, \
                     repository_dependencies, \
                     tool_dependencies = \
                     suc.get_repo_info_tuple_contents( repo_info_tup )
                 if repository_dependencies:
                     for key, val in repository_dependencies.items():
                         if key in [ 'root_key', 'description' ]:
                             continue
                         repository_components_tuple = container_util.get_components_from_key( key )
                         components_list = suc.extract_components_from_tuple( repository_components_tuple )
                         # Skip listing a repository dependency if it is required only to compile a tool dependency
                         # defined for the dependent repository since in this case, the repository dependency is really
                         # a dependency of the dependent repository's contained tool dependency, and only if that
                         # tool dependency requires compilation.
                         # For backward compatibility to the 12/20/12 Galaxy release.
                         only_if_compiling_contained_td = 'False'
                         if len( components_list ) == 4:
                             only_if_compiling_contained_td = 'False'
                         elif len( components_list ) == 5:
                             only_if_compiling_contained_td = 'False'
                         if not asbool( only_if_compiling_contained_td ):
                             if components_list not in required_repository_tups:
                                 required_repository_tups.append( components_list )
                         for components_list in val:
                             try:
                                 only_if_compiling_contained_td = components_list[ 5 ]
                             except:
                                 only_if_compiling_contained_td = 'False'
                             # Skip listing a repository dependency if it is required only to compile a tool dependency
                             # defined for the dependent repository (see above comment).
                             if not asbool( only_if_compiling_contained_td ):
                                 if components_list not in required_repository_tups:
                                     required_repository_tups.append( components_list )
                 else:
                     # We have a single repository with no dependencies.
                     components_list = [ tool_shed_url, repository_name, repository_owner, changeset_revision ]
                     required_repository_tups.append( components_list )
             if required_repository_tups:
                 # The value of required_repository_tups is a list of tuples, so we need to encode it.
                 encoded_required_repository_tups = []
                 for required_repository_tup in required_repository_tups:
                     # Convert every item in required_repository_tup to a string.
                     required_repository_tup = [ str( item ) for item in required_repository_tup ]
                     encoded_required_repository_tups.append( encoding_util.encoding_sep.join( required_repository_tup ) )
                 encoded_required_repository_str = encoding_util.encoding_sep2.join( encoded_required_repository_tups )
                 encoded_required_repository_str = encoding_util.tool_shed_encode( encoded_required_repository_str )
                 if suc.is_tool_shed_client( self.app ):
                     # Handle secure / insecure Tool Shed URL protocol changes and port changes.
                     tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, tool_shed_url )
                 pathspec = [ 'repository', 'get_required_repo_info_dict' ]
                 url = common_util.url_join( tool_shed_url, pathspec=pathspec )
                 # Fix for handling 307 redirect not being handled nicely by urllib2.urlopen when the urllib2.Request has data provided
                 url = urllib2.urlopen( urllib2.Request( url ) ).geturl()
                 request = urllib2.Request( url, data=urllib.urlencode( dict( encoded_str=encoded_required_repository_str ) ) )
                 response = urllib2.urlopen( request ).read()
                 if response:
                     try:
                         required_repo_info_dict = json.loads( response )
                     except Exception, e:
                         log.exception( e )
                         return all_repo_info_dicts
                     required_repo_info_dicts = []
                     for k, v in required_repo_info_dict.items():
                         if k == 'repo_info_dicts':
                             encoded_dict_strings = required_repo_info_dict[ 'repo_info_dicts' ]
                             for encoded_dict_str in encoded_dict_strings:
                                 decoded_dict = encoding_util.tool_shed_decode( encoded_dict_str )
                                 required_repo_info_dicts.append( decoded_dict )
                         else:
                             if k not in all_required_repo_info_dict:
                                 all_required_repo_info_dict[ k ] = v
                             else:
                                 if v and not all_required_repo_info_dict[ k ]:
                                     all_required_repo_info_dict[ k ] = v
                         if required_repo_info_dicts:
                             for required_repo_info_dict in required_repo_info_dicts:
                                 # Each required_repo_info_dict has a single entry, and all_repo_info_dicts is a list
                                 # of dictionaries, each of which has a single entry.  We'll check keys here rather than
                                 # the entire dictionary because a dictionary entry in all_repo_info_dicts will include
                                 # lists of discovered repository dependencies, but these lists will be empty in the
                                 # required_repo_info_dict since dependency discovery has not yet been performed for these
                                 # dictionaries.
                                 required_repo_info_dict_key = required_repo_info_dict.keys()[ 0 ]
                                 all_repo_info_dicts_keys = [ d.keys()[ 0 ] for d in all_repo_info_dicts ]
                                 if required_repo_info_dict_key not in all_repo_info_dicts_keys:
                                     all_repo_info_dicts.append( required_repo_info_dict )
                     all_required_repo_info_dict[ 'all_repo_info_dicts' ] = all_repo_info_dicts
Exemplo n.º 27
0
class RepositoryDependencyInstallManager( object ):

    def __init__( self, app ):
        self.app = app

    def build_repository_dependency_relationships( self, repo_info_dicts, tool_shed_repositories ):
        """
        Build relationships between installed tool shed repositories and other installed
        tool shed repositories upon which they depend.  These relationships are defined in
        the repository_dependencies entry for each dictionary in the received list of repo_info_dicts.
        Each of these dictionaries is associated with a repository in the received tool_shed_repositories
        list.
        """
        install_model = self.app.install_model
        log.debug( "Building repository dependency relationships..." )
        for repo_info_dict in repo_info_dicts:
            for name, repo_info_tuple in repo_info_dict.items():
                description, \
                    repository_clone_url, \
                    changeset_revision, \
                    ctx_rev, \
                    repository_owner, \
                    repository_dependencies, \
                    tool_dependencies = \
                    suc.get_repo_info_tuple_contents( repo_info_tuple )
                if repository_dependencies:
                    for key, val in repository_dependencies.items():
                        if key in [ 'root_key', 'description' ]:
                            continue
                        d_repository = None
                        repository_components_tuple = container_util.get_components_from_key( key )
                        components_list = suc.extract_components_from_tuple( repository_components_tuple )
                        d_toolshed, d_name, d_owner, d_changeset_revision = components_list[ 0:4 ]
                        for tsr in tool_shed_repositories:
                            # Get the the tool_shed_repository defined by name, owner and changeset_revision.  This is
                            # the repository that will be dependent upon each of the tool shed repositories contained in
                            # val.  We'll need to check tool_shed_repository.tool_shed as well if/when repository dependencies
                            # across tool sheds is supported.
                            if tsr.name == d_name and tsr.owner == d_owner and tsr.changeset_revision == d_changeset_revision:
                                d_repository = tsr
                                break
                        if d_repository is None:
                            # The dependent repository is not in the received list so look in the database.
                            d_repository = self.get_or_create_tool_shed_repository( d_toolshed,
                                                                                    d_name,
                                                                                    d_owner,
                                                                                    d_changeset_revision )
                        # Process each repository_dependency defined for the current dependent repository.
                        for repository_dependency_components_list in val:
                            required_repository = None
                            rd_toolshed, \
                                rd_name, \
                                rd_owner, \
                                rd_changeset_revision, \
                                rd_prior_installation_required, \
                                rd_only_if_compiling_contained_td = \
                                common_util.parse_repository_dependency_tuple( repository_dependency_components_list )
                            # Get the the tool_shed_repository defined by rd_name, rd_owner and rd_changeset_revision.  This
                            # is the repository that will be required by the current d_repository.
                            # TODO: Check tool_shed_repository.tool_shed as well when repository dependencies across tool sheds is supported.
                            for tsr in tool_shed_repositories:
                                if tsr.name == rd_name and tsr.owner == rd_owner and tsr.changeset_revision == rd_changeset_revision:
                                    required_repository = tsr
                                    break
                            if required_repository is None:
                                # The required repository is not in the received list so look in the database.
                                required_repository = self.get_or_create_tool_shed_repository( rd_toolshed,
                                                                                               rd_name,
                                                                                               rd_owner,
                                                                                               rd_changeset_revision )
                            # Ensure there is a repository_dependency relationship between d_repository and required_repository.
                            rrda = None
                            for rd in d_repository.repository_dependencies:
                                if rd.id == required_repository.id:
                                    rrda = rd
                                    break
                            if not rrda:
                                # Make sure required_repository is in the repository_dependency table.
                                repository_dependency = self.get_repository_dependency_by_repository_id( install_model,
                                                                                                         required_repository.id )
                                if not repository_dependency:
                                    log.debug( 'Creating new repository_dependency record for installed revision %s of repository: %s owned by %s.' %
                                               ( str( required_repository.installed_changeset_revision ),
                                                 str( required_repository.name ),
                                                   str( required_repository.owner ) ) )
                                    repository_dependency = install_model.RepositoryDependency( tool_shed_repository_id=required_repository.id )
                                    install_model.context.add( repository_dependency )
                                    install_model.context.flush()
                                # Build the relationship between the d_repository and the required_repository.
                                rrda = install_model.RepositoryRepositoryDependencyAssociation( tool_shed_repository_id=d_repository.id,
                                                                                                repository_dependency_id=repository_dependency.id )
                                install_model.context.add( rrda )
                                install_model.context.flush()

    def create_repository_dependency_objects( self, tool_path, tool_shed_url, repo_info_dicts, install_repository_dependencies=False,
                                              no_changes_checked=False, tool_panel_section_id=None, new_tool_panel_section_label=None ):
        """
        Discover all repository dependencies and make sure all tool_shed_repository and
        associated repository_dependency records exist as well as the dependency relationships
        between installed repositories.  This method is called when uninstalled repositories
        are being reinstalled.  If the user elected to install repository dependencies, all
        items in the all_repo_info_dicts list will be processed.  However, if repository
        dependencies are not to be installed, only those items contained in the received
        repo_info_dicts list will be processed.
        """
        install_model = self.app.install_model
        log.debug( "Creating repository dependency objects..." )
        # The following list will be maintained within this method to contain all created
        # or updated tool shed repositories, including repository dependencies that may not
        # be installed.
        all_created_or_updated_tool_shed_repositories = []
        # There will be a one-to-one mapping between items in 3 lists:
        # created_or_updated_tool_shed_repositories, tool_panel_section_keys
        # and filtered_repo_info_dicts.  The 3 lists will filter out repository
        # dependencies that are not to be installed.
        created_or_updated_tool_shed_repositories = []
        tool_panel_section_keys = []
        # Repositories will be filtered (e.g., if already installed, if elected
        # to not be installed, etc), so filter the associated repo_info_dicts accordingly.
        filtered_repo_info_dicts = []
        # Discover all repository dependencies and retrieve information for installing
        # them.  Even if the user elected to not install repository dependencies we have
        # to make sure all repository dependency objects exist so that the appropriate
        # repository dependency relationships can be built.
        all_required_repo_info_dict = self.get_required_repo_info_dicts( tool_shed_url, repo_info_dicts )
        all_repo_info_dicts = all_required_repo_info_dict.get( 'all_repo_info_dicts', [] )
        if not all_repo_info_dicts:
            # No repository dependencies were discovered so process the received repositories.
            all_repo_info_dicts = [ rid for rid in repo_info_dicts ]
        for repo_info_dict in all_repo_info_dicts:
            # If the user elected to install repository dependencies, all items in the
            # all_repo_info_dicts list will be processed.  However, if repository dependencies
            # are not to be installed, only those items contained in the received repo_info_dicts
            # list will be processed but the all_repo_info_dicts list will be used to create all
            # defined repository dependency relationships.
            if self.is_in_repo_info_dicts( repo_info_dict, repo_info_dicts ) or install_repository_dependencies:
                for name, repo_info_tuple in repo_info_dict.items():
                    can_update_db_record = False
                    description, \
                        repository_clone_url, \
                        changeset_revision, \
                        ctx_rev, \
                        repository_owner, \
                        repository_dependencies, \
                        tool_dependencies = \
                        suc.get_repo_info_tuple_contents( repo_info_tuple )
                    # See if the repository has an existing record in the database.
                    repository_db_record, installed_changeset_revision = \
                        suc.repository_was_previously_installed( self.app, tool_shed_url, name, repo_info_tuple, from_tip=False )
                    if repository_db_record:
                        if repository_db_record.status in [ install_model.ToolShedRepository.installation_status.INSTALLED,
                                                            install_model.ToolShedRepository.installation_status.CLONING,
                                                            install_model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS,
                                                            install_model.ToolShedRepository.installation_status.INSTALLING_REPOSITORY_DEPENDENCIES,
                                                            install_model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES,
                                                            install_model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES ]:
                            debug_msg = "Skipping installation of revision %s of repository '%s' because it was installed " % \
                                ( str( changeset_revision ), str( repository_db_record.name ) )
                            debug_msg += "with the (possibly updated) revision %s and its current installation status is '%s'." % \
                                ( str( installed_changeset_revision ), str( repository_db_record.status ) )
                            log.debug( debug_msg )
                            can_update_db_record = False
                        else:
                            if repository_db_record.status in [ install_model.ToolShedRepository.installation_status.ERROR,
                                                                install_model.ToolShedRepository.installation_status.NEW,
                                                                install_model.ToolShedRepository.installation_status.UNINSTALLED ]:
                                # The current tool shed repository is not currently installed, so we can update its
                                # record in the database.
                                name = repository_db_record.name
                                installed_changeset_revision = repository_db_record.installed_changeset_revision
                                can_update_db_record = True
                            elif repository_db_record.status in [ install_model.ToolShedRepository.installation_status.DEACTIVATED ]:
                                # The current tool shed repository is deactivated, so updating its database record
                                # is not necessary - just activate it.
                                log.debug( "Reactivating deactivated tool_shed_repository '%s'." % str( repository_db_record.name ) )
                                self.app.installed_repository_manager.activate_repository( repository_db_record )
                                # No additional updates to the database record are necessary.
                                can_update_db_record = False
                            elif repository_db_record.status not in [ install_model.ToolShedRepository.installation_status.NEW ]:
                                # Set changeset_revision here so suc.create_or_update_tool_shed_repository will find
                                # the previously installed and uninstalled repository instead of creating a new record.
                                changeset_revision = repository_db_record.installed_changeset_revision
                                self.reset_previously_installed_repository( repository_db_record )
                                can_update_db_record = True
                    else:
                        # No record exists in the database for the repository currently being processed.
                        installed_changeset_revision = changeset_revision
                        can_update_db_record = True
                    if can_update_db_record:
                        # The database record for the tool shed repository currently being processed can be updated.
                        # Get the repository metadata to see where it was previously located in the tool panel.
                        tpm = tool_panel_manager.ToolPanelManager( self.app )
                        if repository_db_record and repository_db_record.metadata:
                            _, tool_panel_section_key = \
                                tpm.handle_tool_panel_selection( toolbox=self.app.toolbox,
                                                                 metadata=repository_db_record.metadata,
                                                                 no_changes_checked=no_changes_checked,
                                                                 tool_panel_section_id=tool_panel_section_id,
                                                                 new_tool_panel_section_label=new_tool_panel_section_label )
                        else:
                            # We're installing a new tool shed repository that does not yet have a database record.
                            tool_panel_section_key, _ = \
                                tpm.handle_tool_panel_section( self.app.toolbox,
                                                               tool_panel_section_id=tool_panel_section_id,
                                                               new_tool_panel_section_label=new_tool_panel_section_label )
                        tool_shed_repository = \
                            suc.create_or_update_tool_shed_repository( app=self.app,
                                                                       name=name,
                                                                       description=description,
                                                                       installed_changeset_revision=installed_changeset_revision,
                                                                       ctx_rev=ctx_rev,
                                                                       repository_clone_url=repository_clone_url,
                                                                       metadata_dict={},
                                                                       status=install_model.ToolShedRepository.installation_status.NEW,
                                                                       current_changeset_revision=changeset_revision,
                                                                       owner=repository_owner,
                                                                       dist_to_shed=False )
                        if tool_shed_repository not in all_created_or_updated_tool_shed_repositories:
                            all_created_or_updated_tool_shed_repositories.append( tool_shed_repository )
                        # Only append the tool shed repository to the list of created_or_updated_tool_shed_repositories if
                        # it is supposed to be installed.
                        if install_repository_dependencies or self.is_in_repo_info_dicts( repo_info_dict, repo_info_dicts ):
                            if tool_shed_repository not in created_or_updated_tool_shed_repositories:
                                # Keep the one-to-one mapping between items in 3 lists.
                                created_or_updated_tool_shed_repositories.append( tool_shed_repository )
                                tool_panel_section_keys.append( tool_panel_section_key )
                                filtered_repo_info_dicts.append( repo_info_dict )
        # Build repository dependency relationships even if the user chose to not install repository dependencies.
        self.build_repository_dependency_relationships( all_repo_info_dicts, all_created_or_updated_tool_shed_repositories )
        return created_or_updated_tool_shed_repositories, tool_panel_section_keys, all_repo_info_dicts, filtered_repo_info_dicts

    def get_or_create_tool_shed_repository( self, tool_shed, name, owner, changeset_revision ):
        """
        Return a tool shed repository database record defined by the combination of
        tool shed, repository name, repository owner and changeset_revision or
        installed_changeset_revision.  A new tool shed repository record will be
        created if one is not located.
        """
        install_model = self.app.install_model
        # We store the port in the database.
        tool_shed = common_util.remove_protocol_from_tool_shed_url( tool_shed )
        # This method is used only in Galaxy, not the tool shed.
        repository = suc.get_repository_for_dependency_relationship( self.app, tool_shed, name, owner, changeset_revision )
        if not repository:
            tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, tool_shed )
            repository_clone_url = os.path.join( tool_shed_url, 'repos', owner, name )
            ctx_rev = suc.get_ctx_rev( self.app, tool_shed_url, name, owner, changeset_revision )
            repository = suc.create_or_update_tool_shed_repository( app=self.app,
                                                                    name=name,
                                                                    description=None,
                                                                    installed_changeset_revision=changeset_revision,
                                                                    ctx_rev=ctx_rev,
                                                                    repository_clone_url=repository_clone_url,
                                                                    metadata_dict={},
                                                                    status=install_model.ToolShedRepository.installation_status.NEW,
                                                                    current_changeset_revision=None,
                                                                    owner=owner,
                                                                    dist_to_shed=False )
        return repository

    def get_repository_dependencies_for_installed_tool_shed_repository( self, app, repository ):
        """
        Send a request to the appropriate tool shed to retrieve the dictionary of repository dependencies defined
        for the received repository which is installed into Galaxy.  This method is called only from Galaxy.
        """
        tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( app, str( repository.tool_shed ) )
        params = dict( name=str( repository.name ),
                       owner=str( repository.owner ),
                       changeset_revision=str( repository.changeset_revision ) )
        pathspec = [ 'repository', 'get_repository_dependencies' ]
        try:
            raw_text = common_util.tool_shed_get( app, tool_shed_url, pathspec=pathspec, params=params )
        except Exception, e:
            print "The URL\n%s\nraised the exception:\n%s\n" % ( common_util.url_join( tool_shed_url, pathspec=pathspec, params=params ), str( e ) )
            return ''
        if len( raw_text ) > 2:
            encoded_text = json.loads( raw_text )
            text = encoding_util.tool_shed_decode( encoded_text )
        else:
            text = ''
        return text
Exemplo n.º 28
0
     message = 'Error opening file %s: %s' % ( str( capsule_file_name ), str( e ) )
     log.error( message, exc_info=True )
     trans.response.status = 500
     return message
 capsule_dict[ 'tar_archive' ] = tar_archive
 capsule_dict[ 'capsule_file_name' ] = capsule_file_name
 capsule_dict = import_util.extract_capsule_files( trans, **capsule_dict )
 capsule_dict = import_util.validate_capsule( trans, **capsule_dict )
 status = capsule_dict.get( 'status', 'error' )
 if status == 'error':
     message = 'The capsule contents are invalid and cannpt be imported:<br/>%s' % str( capsule_dict.get( 'error_message', '' ) )
     log.error( message, exc_info=True )
     trans.response.status = 500
     return message
 encoded_file_path = capsule_dict.get( 'encoded_file_path', None )
 file_path = encoding_util.tool_shed_decode( encoded_file_path )
 export_info_file_path = os.path.join( file_path, 'export_info.xml' )
 export_info_dict = import_util.get_export_info_dict( export_info_file_path )
 manifest_file_path = os.path.join( file_path, 'manifest.xml' )
 # The manifest.xml file has already been validated, so no error_message should be returned here.
 repository_info_dicts, error_message = import_util.get_repository_info_from_manifest( manifest_file_path )
 # Determine the status for each exported repository archive contained within the capsule.
 repository_status_info_dicts = import_util.get_repository_status_from_tool_shed( trans, repository_info_dicts )
 # Generate a list of repository name / import results message tuples for display after the capsule is imported.
 import_results_tups = []
 # Only create repositories that do not yet exist and that the current user is authorized to create.  The
 # status will be None for repositories that fall into the intersection of these 2 categories.
 for repository_status_info_dict in repository_status_info_dicts:
     # Add the capsule_file_name and encoded_file_path to the repository_status_info_dict.
     repository_status_info_dict[ 'capsule_file_name' ] = capsule_file_name
     repository_status_info_dict[ 'encoded_file_path' ] = encoded_file_path
Exemplo n.º 29
0
def generate_workflow_image(trans,
                            workflow_name,
                            repository_metadata_id=None,
                            repository_id=None):
    """
    Return an svg image representation of a workflow dictionary created when the workflow was exported.  This method is called
    from both Galaxy and the tool shed.  When called from the tool shed, repository_metadata_id will have a value and repository_id
    will be None.  When called from Galaxy, repository_metadata_id will be None and repository_id will have a value.
    """
    workflow_name = encoding_util.tool_shed_decode(workflow_name)
    if trans.webapp.name == 'tool_shed':
        # We're in the tool shed.
        repository_metadata = metadata_util.get_repository_metadata_by_id(
            trans, repository_metadata_id)
        repository_id = trans.security.encode_id(
            repository_metadata.repository_id)
        changeset_revision = repository_metadata.changeset_revision
        metadata = repository_metadata.metadata
    else:
        # We're in Galaxy.
        repository = suc.get_tool_shed_repository_by_id(trans, repository_id)
        changeset_revision = repository.changeset_revision
        metadata = repository.metadata
    # metadata[ 'workflows' ] is a list of tuples where each contained tuple is
    # [ <relative path to the .ga file in the repository>, <exported workflow dict> ]
    for workflow_tup in metadata['workflows']:
        workflow_dict = workflow_tup[1]
        if workflow_dict['name'] == workflow_name:
            break
    if 'tools' in metadata:
        tools_metadata = metadata['tools']
    else:
        tools_metadata = []
    workflow, missing_tool_tups = get_workflow_from_dict(
        trans=trans,
        workflow_dict=workflow_dict,
        tools_metadata=tools_metadata,
        repository_id=repository_id,
        changeset_revision=changeset_revision)
    data = []
    canvas = svgfig.canvas(
        style=
        "stroke:black; fill:none; stroke-width:1px; stroke-linejoin:round; text-anchor:left"
    )
    text = svgfig.SVG("g")
    connectors = svgfig.SVG("g")
    boxes = svgfig.SVG("g")
    svgfig.Text.defaults["font-size"] = "10px"
    in_pos = {}
    out_pos = {}
    margin = 5
    # Spacing between input/outputs.
    line_px = 16
    # Store px width for boxes of each step.
    widths = {}
    max_width, max_x, max_y = 0, 0, 0
    for step in workflow.steps:
        step.upgrade_messages = {}
        module = module_factory.from_workflow_step(trans, repository_id,
                                                   changeset_revision,
                                                   tools_metadata, step)
        tool_errors = module.type == 'tool' and not module.tool
        module_data_inputs = get_workflow_data_inputs(step, module)
        module_data_outputs = get_workflow_data_outputs(
            step, module, workflow.steps)
        step_dict = {
            'id': step.order_index,
            'data_inputs': module_data_inputs,
            'data_outputs': module_data_outputs,
            'position': step.position,
            'tool_errors': tool_errors
        }
        input_conn_dict = {}
        for conn in step.input_connections:
            input_conn_dict[conn.input_name] = dict(
                id=conn.output_step.order_index, output_name=conn.output_name)
        step_dict['input_connections'] = input_conn_dict
        data.append(step_dict)
        x, y = step.position['left'], step.position['top']
        count = 0
        module_name = get_workflow_module_name(module, missing_tool_tups)
        max_len = len(module_name) * 1.5
        text.append(
            svgfig.Text(x, y + 20, module_name, **{
                "font-size": "14px"
            }).SVG())
        y += 45
        for di in module_data_inputs:
            cur_y = y + count * line_px
            if step.order_index not in in_pos:
                in_pos[step.order_index] = {}
            in_pos[step.order_index][di['name']] = (x, cur_y)
            text.append(svgfig.Text(x, cur_y, di['label']).SVG())
            count += 1
            max_len = max(max_len, len(di['label']))
        if len(module.get_data_inputs()) > 0:
            y += 15
        for do in module_data_outputs:
            cur_y = y + count * line_px
            if step.order_index not in out_pos:
                out_pos[step.order_index] = {}
            out_pos[step.order_index][do['name']] = (x, cur_y)
            text.append(svgfig.Text(x, cur_y, do['name']).SVG())
            count += 1
            max_len = max(max_len, len(do['name']))
        widths[step.order_index] = max_len * 5.5
        max_x = max(max_x, step.position['left'])
        max_y = max(max_y, step.position['top'])
        max_width = max(max_width, widths[step.order_index])
    for step_dict in data:
        tool_unavailable = step_dict['tool_errors']
        width = widths[step_dict['id']]
        x, y = step_dict['position']['left'], step_dict['position']['top']
        # Only highlight missing tools if displaying in the tool shed.
        if trans.webapp.name == 'tool_shed' and tool_unavailable:
            fill = "#EBBCB2"
        else:
            fill = "#EBD9B2"
        boxes.append(
            svgfig.Rect(x - margin, y, x + width - margin, y + 30,
                        fill=fill).SVG())
        box_height = (len(step_dict['data_inputs']) +
                      len(step_dict['data_outputs'])) * line_px + margin
        # Draw separator line.
        if len(step_dict['data_inputs']) > 0:
            box_height += 15
            sep_y = y + len(step_dict['data_inputs']) * line_px + 40
            text.append(
                svgfig.Line(x - margin, sep_y, x + width - margin,
                            sep_y).SVG())
        # Define an input/output box.
        boxes.append(
            svgfig.Rect(x - margin,
                        y + 30,
                        x + width - margin,
                        y + 30 + box_height,
                        fill="#ffffff").SVG())
        for conn, output_dict in step_dict['input_connections'].iteritems():
            in_coords = in_pos[step_dict['id']][conn]
            # out_pos_index will be a step number like 1, 2, 3...
            out_pos_index = output_dict['id']
            # out_pos_name will be a string like 'o', 'o2', etc.
            out_pos_name = output_dict['output_name']
            if out_pos_index in out_pos:
                # out_conn_index_dict will be something like:
                # 7: {'o': (824.5, 618)}
                out_conn_index_dict = out_pos[out_pos_index]
                if out_pos_name in out_conn_index_dict:
                    out_conn_pos = out_pos[out_pos_index][out_pos_name]
                else:
                    # Take any key / value pair available in out_conn_index_dict.
                    # A problem will result if the dictionary is empty.
                    if out_conn_index_dict.keys():
                        key = out_conn_index_dict.keys()[0]
                        out_conn_pos = out_pos[out_pos_index][key]
            adjusted = (out_conn_pos[0] + widths[output_dict['id']],
                        out_conn_pos[1])
            text.append(
                svgfig.SVG("circle",
                           cx=out_conn_pos[0] + widths[output_dict['id']] -
                           margin,
                           cy=out_conn_pos[1] - margin,
                           r=5,
                           fill="#ffffff"))
            connectors.append(
                svgfig.Line(adjusted[0],
                            adjusted[1] - margin,
                            in_coords[0] - 10,
                            in_coords[1],
                            arrow_end="true").SVG())
    canvas.append(connectors)
    canvas.append(boxes)
    canvas.append(text)
    width, height = (max_x + max_width + 50), max_y + 300
    canvas['width'] = "%s px" % width
    canvas['height'] = "%s px" % height
    canvas['viewBox'] = "0 0 %s %s" % (width, height)
    trans.response.set_content_type("image/svg+xml")
    return canvas.standalone_xml()
Exemplo n.º 30
0
    def import_capsule(self, trans, payload, **kwd):
        """
        POST /api/repositories/new/import_capsule
        Import a repository capsule into the Tool Shed.

        :param key: the user's API key

        The following parameters are included in the payload.
        :param tool_shed_url (required): the base URL of the Tool Shed into which the capsule should be imported.
        :param capsule_file_name (required): the name of the capsule file.
        """
        # Get the information about the capsule to be imported from the payload.
        tool_shed_url = payload.get("tool_shed_url", "")
        if not tool_shed_url:
            raise HTTPBadRequest(detail="Missing required parameter 'tool_shed_url'.")
        capsule_file_name = payload.get("capsule_file_name", "")
        if not capsule_file_name:
            raise HTTPBadRequest(detail="Missing required parameter 'capsule_file_name'.")
        capsule_file_path = os.path.abspath(capsule_file_name)
        capsule_dict = dict(
            error_message="",
            encoded_file_path=None,
            status="ok",
            tar_archive=None,
            uploaded_file=None,
            capsule_file_name=None,
        )
        if os.path.getsize(os.path.abspath(capsule_file_name)) == 0:
            log.debug("Your capsule file %s is empty." % str(capsule_file_name))
            return {}
        try:
            # Open for reading with transparent compression.
            tar_archive = tarfile.open(capsule_file_path, "r:*")
        except tarfile.ReadError as e:
            log.debug("Error opening capsule file %s: %s" % (str(capsule_file_name), str(e)))
            return {}
        irm = capsule_manager.ImportRepositoryManager(self.app, trans.request.host, trans.user, trans.user_is_admin())
        capsule_dict["tar_archive"] = tar_archive
        capsule_dict["capsule_file_name"] = capsule_file_name
        capsule_dict = irm.extract_capsule_files(**capsule_dict)
        capsule_dict = irm.validate_capsule(**capsule_dict)
        status = capsule_dict.get("status", "error")
        if status == "error":
            log.debug(
                "The capsule contents are invalid and cannot be imported:<br/>%s"
                % str(capsule_dict.get("error_message", ""))
            )
            return {}
        encoded_file_path = capsule_dict.get("encoded_file_path", None)
        if encoded_file_path is None:
            log.debug("The capsule_dict %s is missing the required encoded_file_path entry." % str(capsule_dict))
            return {}
        file_path = encoding_util.tool_shed_decode(encoded_file_path)
        manifest_file_path = os.path.join(file_path, "manifest.xml")
        # The manifest.xml file has already been validated, so no error_message should be returned here.
        repository_info_dicts, error_message = irm.get_repository_info_from_manifest(manifest_file_path)
        # Determine the status for each exported repository archive contained within the capsule.
        repository_status_info_dicts = irm.get_repository_status_from_tool_shed(repository_info_dicts)
        # Generate a list of repository name / import results message tuples for display after the capsule is imported.
        import_results_tups = []
        # Only create repositories that do not yet exist and that the current user is authorized to create.  The
        # status will be None for repositories that fall into the intersection of these 2 categories.
        for repository_status_info_dict in repository_status_info_dicts:
            # Add the capsule_file_name and encoded_file_path to the repository_status_info_dict.
            repository_status_info_dict["capsule_file_name"] = capsule_file_name
            repository_status_info_dict["encoded_file_path"] = encoded_file_path
            import_results_tups = irm.create_repository_and_import_archive(
                repository_status_info_dict, import_results_tups
            )
        irm.check_status_and_reset_downloadable(import_results_tups)
        basic_util.remove_dir(file_path)
        # NOTE: the order of installation is defined in import_results_tups, but order will be lost
        # when transferred to return_dict.
        return_dict = {}
        for import_results_tup in import_results_tups:
            ok, name_owner, message = import_results_tup
            name, owner = name_owner
            key = 'Archive of repository "%s" owned by "%s"' % (str(name), str(owner))
            val = message.replace("<b>", '"').replace("</b>", '"')
            return_dict[key] = val
        return return_dict
Exemplo n.º 31
0
 def get_required_repo_info_dicts(self, tool_shed_url, repo_info_dicts):
     """
     Inspect the list of repo_info_dicts for repository dependencies and append a repo_info_dict for each of
     them to the list.  All repository_dependency entries in each of the received repo_info_dicts includes
     all required repositories, so only one pass through this method is required to retrieve all repository
     dependencies.
     """
     all_required_repo_info_dict = {}
     all_repo_info_dicts = []
     if repo_info_dicts:
         # We'll send tuples of ( tool_shed, repository_name, repository_owner, changeset_revision ) to the tool
         # shed to discover repository ids.
         required_repository_tups = []
         for repo_info_dict in repo_info_dicts:
             if repo_info_dict not in all_repo_info_dicts:
                 all_repo_info_dicts.append(repo_info_dict)
             for repository_name, repo_info_tup in repo_info_dict.items():
                 description, \
                     repository_clone_url, \
                     changeset_revision, \
                     ctx_rev, \
                     repository_owner, \
                     repository_dependencies, \
                     tool_dependencies = \
                     repository_util.get_repo_info_tuple_contents(repo_info_tup)
                 if repository_dependencies:
                     for key, val in repository_dependencies.items():
                         if key in ['root_key', 'description']:
                             continue
                         repository_components_tuple = container_util.get_components_from_key(key)
                         components_list = repository_util.extract_components_from_tuple(repository_components_tuple)
                         # Skip listing a repository dependency if it is required only to compile a tool dependency
                         # defined for the dependent repository since in this case, the repository dependency is really
                         # a dependency of the dependent repository's contained tool dependency, and only if that
                         # tool dependency requires compilation.
                         # For backward compatibility to the 12/20/12 Galaxy release.
                         only_if_compiling_contained_td = 'False'
                         if len(components_list) == 4:
                             only_if_compiling_contained_td = 'False'
                         elif len(components_list) == 5:
                             only_if_compiling_contained_td = 'False'
                         if not asbool(only_if_compiling_contained_td):
                             if components_list not in required_repository_tups:
                                 required_repository_tups.append(components_list)
                         for components_list in val:
                             try:
                                 only_if_compiling_contained_td = components_list[5]
                             except:
                                 only_if_compiling_contained_td = 'False'
                             # Skip listing a repository dependency if it is required only to compile a tool dependency
                             # defined for the dependent repository (see above comment).
                             if not asbool(only_if_compiling_contained_td):
                                 if components_list not in required_repository_tups:
                                     required_repository_tups.append(components_list)
                 else:
                     # We have a single repository with no dependencies.
                     components_list = [tool_shed_url, repository_name, repository_owner, changeset_revision]
                     required_repository_tups.append(components_list)
             if required_repository_tups:
                 # The value of required_repository_tups is a list of tuples, so we need to encode it.
                 encoded_required_repository_tups = []
                 for required_repository_tup in required_repository_tups:
                     # Convert every item in required_repository_tup to a string.
                     required_repository_tup = [str(item) for item in required_repository_tup]
                     encoded_required_repository_tups.append(encoding_util.encoding_sep.join(required_repository_tup))
                 encoded_required_repository_str = encoding_util.encoding_sep2.join(encoded_required_repository_tups)
                 encoded_required_repository_str = encoding_util.tool_shed_encode(encoded_required_repository_str)
                 if repository_util.is_tool_shed_client(self.app):
                     # Handle secure / insecure Tool Shed URL protocol changes and port changes.
                     tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry(self.app, tool_shed_url)
                 pathspec = ['repository', 'get_required_repo_info_dict']
                 url = build_url(tool_shed_url, pathspec=pathspec)
                 # Fix for handling 307 redirect not being handled nicely by urlopen() when the Request() has data provided
                 url = _urlopen(url).geturl()
                 response = _urlopen(url, urlencode(dict(encoded_str=encoded_required_repository_str))).read()
                 if response:
                     try:
                         required_repo_info_dict = json.loads(response)
                     except Exception as e:
                         log.exception(e)
                         return all_repo_info_dicts
                     required_repo_info_dicts = []
                     for k, v in required_repo_info_dict.items():
                         if k == 'repo_info_dicts':
                             encoded_dict_strings = required_repo_info_dict['repo_info_dicts']
                             for encoded_dict_str in encoded_dict_strings:
                                 decoded_dict = encoding_util.tool_shed_decode(encoded_dict_str)
                                 required_repo_info_dicts.append(decoded_dict)
                         else:
                             if k not in all_required_repo_info_dict:
                                 all_required_repo_info_dict[k] = v
                             else:
                                 if v and not all_required_repo_info_dict[k]:
                                     all_required_repo_info_dict[k] = v
                         if required_repo_info_dicts:
                             for required_repo_info_dict in required_repo_info_dicts:
                                 # Each required_repo_info_dict has a single entry, and all_repo_info_dicts is a list
                                 # of dictionaries, each of which has a single entry.  We'll check keys here rather than
                                 # the entire dictionary because a dictionary entry in all_repo_info_dicts will include
                                 # lists of discovered repository dependencies, but these lists will be empty in the
                                 # required_repo_info_dict since dependency discovery has not yet been performed for these
                                 # dictionaries.
                                 required_repo_info_dict_key = next(iter(required_repo_info_dict))
                                 all_repo_info_dicts_keys = [next(iter(d)) for d in all_repo_info_dicts]
                                 if required_repo_info_dict_key not in all_repo_info_dicts_keys:
                                     all_repo_info_dicts.append(required_repo_info_dict)
                                 else:
                                     # required_repo_info_dict_key corresponds to the repo name.
                                     # A single install transaction might require the installation of 2 or more repos
                                     # with the same repo name but different owners or versions.
                                     # Therefore, if required_repo_info_dict_key is already in all_repo_info_dicts,
                                     # check that the tool id is already present. If it is not, we are dealing with the same repo name,
                                     # but a different owner/changeset revision or version and we add the repo to the list of repos to be installed.
                                     tool_id = required_repo_info_dict[required_repo_info_dict_key][1]
                                     is_present = False
                                     for repo_info_dict in all_repo_info_dicts:
                                         for k, v in repo_info_dict.items():
                                             if required_repo_info_dict_key == k:
                                                 if tool_id == v[1]:
                                                     is_present = True
                                                     break
                                     if not is_present:
                                         all_repo_info_dicts.append(required_repo_info_dict)
                     all_required_repo_info_dict['all_repo_info_dicts'] = all_repo_info_dicts
     return all_required_repo_info_dict
Exemplo n.º 32
0
 def get_required_repo_info_dicts(self, tool_shed_url, repo_info_dicts):
     """
     Inspect the list of repo_info_dicts for repository dependencies and append a repo_info_dict for each of
     them to the list.  All repository_dependency entries in each of the received repo_info_dicts includes
     all required repositories, so only one pass through this method is required to retrieve all repository
     dependencies.
     """
     all_required_repo_info_dict = {}
     all_repo_info_dicts = []
     if repo_info_dicts:
         # We'll send tuples of ( tool_shed, repository_name, repository_owner, changeset_revision ) to the tool
         # shed to discover repository ids.
         required_repository_tups = []
         for repo_info_dict in repo_info_dicts:
             if repo_info_dict not in all_repo_info_dicts:
                 all_repo_info_dicts.append(repo_info_dict)
             for repository_name, repo_info_tup in repo_info_dict.items():
                 description, \
                     repository_clone_url, \
                     changeset_revision, \
                     ctx_rev, \
                     repository_owner, \
                     repository_dependencies, \
                     tool_dependencies = \
                     repository_util.get_repo_info_tuple_contents(repo_info_tup)
                 if repository_dependencies:
                     for key, val in repository_dependencies.items():
                         if key in ['root_key', 'description']:
                             continue
                         repository_components_tuple = container_util.get_components_from_key(
                             key)
                         components_list = repository_util.extract_components_from_tuple(
                             repository_components_tuple)
                         # Skip listing a repository dependency if it is required only to compile a tool dependency
                         # defined for the dependent repository since in this case, the repository dependency is really
                         # a dependency of the dependent repository's contained tool dependency, and only if that
                         # tool dependency requires compilation.
                         # For backward compatibility to the 12/20/12 Galaxy release.
                         only_if_compiling_contained_td = 'False'
                         if len(components_list) == 4:
                             only_if_compiling_contained_td = 'False'
                         elif len(components_list) == 5:
                             only_if_compiling_contained_td = 'False'
                         if not asbool(only_if_compiling_contained_td):
                             if components_list not in required_repository_tups:
                                 required_repository_tups.append(
                                     components_list)
                         for components_list in val:
                             try:
                                 only_if_compiling_contained_td = components_list[
                                     5]
                             except IndexError:
                                 only_if_compiling_contained_td = 'False'
                             # Skip listing a repository dependency if it is required only to compile a tool dependency
                             # defined for the dependent repository (see above comment).
                             if not asbool(only_if_compiling_contained_td):
                                 if components_list not in required_repository_tups:
                                     required_repository_tups.append(
                                         components_list)
                 else:
                     # We have a single repository with no dependencies.
                     components_list = [
                         tool_shed_url, repository_name, repository_owner,
                         changeset_revision
                     ]
                     required_repository_tups.append(components_list)
             if required_repository_tups:
                 # The value of required_repository_tups is a list of tuples, so we need to encode it.
                 encoded_required_repository_tups = []
                 for required_repository_tup in required_repository_tups:
                     # Convert every item in required_repository_tup to a string.
                     required_repository_tup = [
                         str(item) for item in required_repository_tup
                     ]
                     encoded_required_repository_tups.append(
                         encoding_util.encoding_sep.join(
                             required_repository_tup))
                 encoded_required_repository_str = encoding_util.encoding_sep2.join(
                     encoded_required_repository_tups)
                 encoded_required_repository_str = encoding_util.tool_shed_encode(
                     encoded_required_repository_str)
                 if repository_util.is_tool_shed_client(self.app):
                     # Handle secure / insecure Tool Shed URL protocol changes and port changes.
                     tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry(
                         self.app, tool_shed_url)
                 pathspec = ['repository', 'get_required_repo_info_dict']
                 url = build_url(tool_shed_url, pathspec=pathspec)
                 # Fix for handling 307 redirect not being handled nicely by urlopen() when the Request() has data provided
                 try:
                     url = _urlopen(url).geturl()
                 except HTTPError as e:
                     if e.code == 502:
                         pass
                     else:
                         raise
                 payload = urlencode(
                     dict(encoded_str=encoded_required_repository_str))
                 response = _urlopen(url, payload).read()
                 if response:
                     try:
                         required_repo_info_dict = json.loads(
                             unicodify(response))
                     except Exception as e:
                         log.exception(e)
                         return all_repo_info_dicts
                     required_repo_info_dicts = []
                     for k, v in required_repo_info_dict.items():
                         if k == 'repo_info_dicts':
                             encoded_dict_strings = required_repo_info_dict[
                                 'repo_info_dicts']
                             for encoded_dict_str in encoded_dict_strings:
                                 decoded_dict = encoding_util.tool_shed_decode(
                                     encoded_dict_str)
                                 required_repo_info_dicts.append(
                                     decoded_dict)
                         else:
                             if k not in all_required_repo_info_dict:
                                 all_required_repo_info_dict[k] = v
                             else:
                                 if v and not all_required_repo_info_dict[k]:
                                     all_required_repo_info_dict[k] = v
                         if required_repo_info_dicts:
                             for required_repo_info_dict in required_repo_info_dicts:
                                 # Each required_repo_info_dict has a single entry, and all_repo_info_dicts is a list
                                 # of dictionaries, each of which has a single entry.  We'll check keys here rather than
                                 # the entire dictionary because a dictionary entry in all_repo_info_dicts will include
                                 # lists of discovered repository dependencies, but these lists will be empty in the
                                 # required_repo_info_dict since dependency discovery has not yet been performed for these
                                 # dictionaries.
                                 required_repo_info_dict_key = next(
                                     iter(required_repo_info_dict))
                                 all_repo_info_dicts_keys = [
                                     next(iter(d))
                                     for d in all_repo_info_dicts
                                 ]
                                 if required_repo_info_dict_key not in all_repo_info_dicts_keys:
                                     all_repo_info_dicts.append(
                                         required_repo_info_dict)
                                 else:
                                     # required_repo_info_dict_key corresponds to the repo name.
                                     # A single install transaction might require the installation of 2 or more repos
                                     # with the same repo name but different owners or versions.
                                     # Therefore, if required_repo_info_dict_key is already in all_repo_info_dicts,
                                     # check that the tool id is already present. If it is not, we are dealing with the same repo name,
                                     # but a different owner/changeset revision or version and we add the repo to the list of repos to be installed.
                                     tool_id = required_repo_info_dict[
                                         required_repo_info_dict_key][1]
                                     is_present = False
                                     for repo_info_dict in all_repo_info_dicts:
                                         for k, v in repo_info_dict.items():
                                             if required_repo_info_dict_key == k:
                                                 if tool_id == v[1]:
                                                     is_present = True
                                                     break
                                     if not is_present:
                                         all_repo_info_dicts.append(
                                             required_repo_info_dict)
                     all_required_repo_info_dict[
                         'all_repo_info_dicts'] = all_repo_info_dicts
     return all_required_repo_info_dict
Exemplo n.º 33
0
def get_required_repo_info_dicts( trans, tool_shed_url, repo_info_dicts ):
    """
    Inspect the list of repo_info_dicts for repository dependencies and append a repo_info_dict for each of them to the list.  All
    repository_dependencies entries in each of the received repo_info_dicts includes all required repositories, so only one pass through
    this method is required to retrieve all repository dependencies.
    """
    all_required_repo_info_dict = {}
    all_repo_info_dicts = []
    if repo_info_dicts:
        # We'll send tuples of ( tool_shed, repository_name, repository_owner, changeset_revision ) to the tool shed to discover repository ids.
        required_repository_tups = []
        for repo_info_dict in repo_info_dicts:
            if repo_info_dict not in all_repo_info_dicts:
                all_repo_info_dicts.append( repo_info_dict )
            for repository_name, repo_info_tup in repo_info_dict.items():
                description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
                    suc.get_repo_info_tuple_contents( repo_info_tup )
                if repository_dependencies:
                    for key, val in repository_dependencies.items():
                        if key in [ 'root_key', 'description' ]:
                            continue
                        try:
                            toolshed, name, owner, changeset_revision, prior_installation_required = container_util.get_components_from_key( key )
                            components_list = [ toolshed, name, owner, changeset_revision, prior_installation_required ]
                        except ValueError:
                            # For backward compatibility to the 12/20/12 Galaxy release, default prior_installation_required to False in the caller.
                            toolshed, name, owner, changeset_revision = container_util.get_components_from_key( key )
                            components_list = [ toolshed, name, owner, changeset_revision ]
                        if components_list not in required_repository_tups:
                            required_repository_tups.append( components_list )
                        for components_list in val:
                            if components_list not in required_repository_tups:
                                required_repository_tups.append( components_list )
                else:
                    # We have a single repository with no dependencies.
                    components_list = [ tool_shed_url, repository_name, repository_owner, changeset_revision, 'False' ]
                    required_repository_tups.append( components_list )
            if required_repository_tups:
                # The value of required_repository_tups is a list of tuples, so we need to encode it.
                encoded_required_repository_tups = []
                for required_repository_tup in required_repository_tups:
                    # Convert every item in required_repository_tup to a string.
                    required_repository_tup = [ str( item ) for item in required_repository_tup ]
                    encoded_required_repository_tups.append( encoding_util.encoding_sep.join( required_repository_tup ) )
                encoded_required_repository_str = encoding_util.encoding_sep2.join( encoded_required_repository_tups )
                encoded_required_repository_str = encoding_util.tool_shed_encode( encoded_required_repository_str )
                url = suc.url_join( tool_shed_url, '/repository/get_required_repo_info_dict' )
                request = urllib2.Request( url, data=urllib.urlencode( dict( encoded_str=encoded_required_repository_str ) ) )
                response = urllib2.urlopen( request ).read()
                if response:
                    try:
                        required_repo_info_dict = json.from_json_string( response )
                    except Exception, e:
                        log.exception( e )
                        return all_repo_info_dicts
                    required_repo_info_dicts = []
                    for k, v in required_repo_info_dict.items():
                        if k == 'repo_info_dicts':
                            encoded_dict_strings = required_repo_info_dict[ 'repo_info_dicts' ]
                            for encoded_dict_str in encoded_dict_strings:
                                decoded_dict = encoding_util.tool_shed_decode( encoded_dict_str )
                                required_repo_info_dicts.append( decoded_dict )
                        else:
                            if k not in all_required_repo_info_dict:
                                all_required_repo_info_dict[ k ] = v
                            else:
                                if v and not all_required_repo_info_dict[ k ]:
                                    all_required_repo_info_dict[ k ] = v
                        if required_repo_info_dicts:                            
                            for required_repo_info_dict in required_repo_info_dicts:
                                if required_repo_info_dict not in all_repo_info_dicts:
                                    all_repo_info_dicts.append( required_repo_info_dict )
                    all_required_repo_info_dict[ 'all_repo_info_dicts' ] = all_repo_info_dicts