def get_latest_installable_revision(self, trans, payload, **kwd):
        """
        POST /api/tool_shed_repositories/get_latest_installable_revision
        Get the latest installable revision of a specified repository from a specified Tool Shed.

        :param key: the current Galaxy admin user's API key

        The following parameters are included in the payload.
        :param tool_shed_url (required): the base URL of the Tool Shed from which to retrieve the Repository revision.
        :param name (required): the name of the Repository
        :param owner (required): the owner of the Repository
        """
        # Get the information about the repository to be installed from the payload.
        tool_shed_url, name, owner = self.__parse_repository_from_payload(
            payload)
        # Make sure the current user's API key proves he is an admin user in this Galaxy instance.
        if not trans.user_is_admin():
            raise exceptions.AdminRequiredException(
                'You are not authorized to request the latest installable revision for a repository in this Galaxy instance.'
            )
        params = '?name=%s&owner=%s' % (name, owner)
        url = common_util.url_join(
            tool_shed_url,
            'api/repositories/get_ordered_installable_revisions%s' % params)
        try:
            raw_text = common_util.tool_shed_get(trans.app, tool_shed_url, url)
        except Exception, e:
            message = "Error attempting to retrieve the latest installable revision from tool shed %s for repository %s owned by %s: %s" % \
                ( str( tool_shed_url ), str( name ), str( owner ), str( e ) )
            log.debug(message)
            return dict(status='error', error=message)
    def get_latest_installable_revision( self, trans, payload, **kwd ):
        """
        POST /api/tool_shed_repositories/get_latest_installable_revision
        Get the latest installable revision of a specified repository from a specified Tool Shed.

        :param key: the current Galaxy admin user's API key

        The following parameters are included in the payload.
        :param tool_shed_url (required): the base URL of the Tool Shed from which to retrieve the Repository revision.
        :param name (required): the name of the Repository
        :param owner (required): the owner of the Repository
        """
        # Get the information about the repository to be installed from the payload.
        tool_shed_url, name, owner = self.__parse_repository_from_payload( payload )
        # Make sure the current user's API key proves he is an admin user in this Galaxy instance.
        if not trans.user_is_admin():
            raise exceptions.AdminRequiredException( 'You are not authorized to request the latest installable revision for a repository in this Galaxy instance.' )
        params = '?name=%s&owner=%s' % ( name, owner )
        url = common_util.url_join( tool_shed_url,
                                    'api/repositories/get_ordered_installable_revisions%s' % params )
        try:
            raw_text = common_util.tool_shed_get( trans.app, tool_shed_url, url )
        except Exception, e:
            message = "Error attempting to retrieve the latest installable revision from tool shed %s for repository %s owned by %s: %s" % \
                ( str( tool_shed_url ), str( name ), str( owner ), str( e ) )
            log.debug( message )
            return dict( status='error', error=message )
Beispiel #3
0
def get_api_url(base, parts=[], params=None):
    """Compose and return a URL for the Tool Shed API."""
    if 'api' in parts and parts.index('api') != 0:
        parts.pop(parts.index('api'))
        parts.insert(0, 'api')
    elif 'api' not in parts:
        parts.insert(0, 'api')
    url = common_util.url_join(base, pathspec=parts, params=params)
    return url
Beispiel #4
0
def get_api_url(base, parts=[], params=None):
    """Compose and return a URL for the Tool Shed API."""
    if "api" in parts and parts.index("api") != 0:
        parts.pop(parts.index("api"))
        parts.insert(0, "api")
    elif "api" not in parts:
        parts.insert(0, "api")
    url = common_util.url_join(base, pathspec=parts, params=params)
    return url
 def get_repository_dependencies_for_installed_tool_shed_repository( self, app, repository ):
     """
     Send a request to the appropriate tool shed to retrieve the dictionary of repository dependencies defined
     for the received repository which is installed into Galaxy.  This method is called only from Galaxy.
     """
     tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( app, str( repository.tool_shed ) )
     params = dict( name=str( repository.name ),
                    owner=str( repository.owner ),
                    changeset_revision=str( repository.changeset_revision ) )
     pathspec = [ 'repository', 'get_repository_dependencies' ]
     try:
         raw_text = common_util.tool_shed_get( app, tool_shed_url, pathspec=pathspec, params=params )
     except Exception, e:
         print "The URL\n%s\nraised the exception:\n%s\n" % ( common_util.url_join( tool_shed_url, pathspec=pathspec, params=params ), str( e ) )
         return ''
 def get_repository_dependencies_for_installed_tool_shed_repository( self, app, repository ):
     """
     Send a request to the appropriate tool shed to retrieve the dictionary of repository dependencies defined
     for the received repository which is installed into Galaxy.  This method is called only from Galaxy.
     """
     tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( app, str( repository.tool_shed ) )
     params = dict( name=str( repository.name ),
                    owner=str( repository.owner ),
                    changeset_revision=str( repository.changeset_revision ) )
     pathspec = [ 'repository', 'get_repository_dependencies' ]
     try:
         raw_text = common_util.tool_shed_get( app, tool_shed_url, pathspec=pathspec, params=params )
     except Exception, e:
         log.error("The URL\n%s\nraised the exception:\n%s\n", common_util.url_join( tool_shed_url, pathspec=pathspec, params=params ), str( e ) )
         return ''
Beispiel #7
0
def get_api_url( base, parts=[], params=None ):
    """Compose and return a URL for the Tool Shed API."""
    if 'api' in parts and parts.index( 'api' ) != 0:
        parts.pop( parts.index( 'api' ) )
        parts.insert( 0, 'api' )
    elif 'api' not in parts:
        parts.insert( 0, 'api' )
    url = common_util.url_join( base, *parts )
    if params is not None:
        try:
            query_string = urllib.urlencode( params )
        except Exception, e:
            # The value of params must be a string.
            query_string = params
        url += '?%s' % query_string
Beispiel #8
0
def get_api_url(base, parts=[], params=None):
    """Compose and return a URL for the Tool Shed API."""
    if 'api' in parts and parts.index('api') != 0:
        parts.pop(parts.index('api'))
        parts.insert(0, 'api')
    elif 'api' not in parts:
        parts.insert(0, 'api')
    url = common_util.url_join(base, *parts)
    if params is not None:
        try:
            query_string = urllib.urlencode(params)
        except Exception, e:
            # The value of params must be a string.
            query_string = params
        url += '?%s' % query_string
 def get_repository_dependencies_for_installed_tool_shed_repository( self, app, repository ):
     """
     Send a request to the appropriate tool shed to retrieve the dictionary of repository dependencies defined
     for the received repository which is installed into Galaxy.  This method is called only from Galaxy.
     """
     tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( app, str( repository.tool_shed ) )
     params = '?name=%s&owner=%s&changeset_revision=%s' % ( str( repository.name ),
                                                            str( repository.owner ),
                                                            str( repository.changeset_revision ) )
     url = common_util.url_join( tool_shed_url,
                                 'repository/get_repository_dependencies%s' % params )
     try:
         raw_text = common_util.tool_shed_get( app, tool_shed_url, url )
     except Exception, e:
         print "The URL\n%s\nraised the exception:\n%s\n" % ( url, str( e ) )
         return ''
def get_readme_files_dict_for_display( app, tool_shed_url, repo_info_dict ):
    """
    Return a dictionary of README files contained in the single repository being installed so they can be displayed on the tool panel section
    selection page.
    """
    name = repo_info_dict.keys()[ 0 ]
    repo_info_tuple = repo_info_dict[ name ]
    description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, installed_td = \
        suc.get_repo_info_tuple_contents( repo_info_tuple )
    # Handle changing HTTP protocols over time.
    tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( app, tool_shed_url )
    params = '?name=%s&owner=%s&changeset_revision=%s' % ( name, repository_owner, changeset_revision )
    url = common_util.url_join( tool_shed_url,
                                'repository/get_readme_files%s' % params )
    raw_text = common_util.tool_shed_get( app, tool_shed_url, url )
    readme_files_dict = json.from_json_string( raw_text )
    return readme_files_dict
 def get_update_to_changeset_revision_and_ctx_rev( self, repository ):
     """Return the changeset revision hash to which the repository can be updated."""
     changeset_revision_dict = {}
     tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, str( repository.tool_shed ) )
     params = '?name=%s&owner=%s&changeset_revision=%s' % ( str( repository.name ),
                                                            str( repository.owner ),
                                                            str( repository.installed_changeset_revision ) )
     url = common_util.url_join( tool_shed_url, 'repository/get_changeset_revision_and_ctx_rev%s' % params )
     try:
         encoded_update_dict = common_util.tool_shed_get( self.app, tool_shed_url, url )
         if encoded_update_dict:
             update_dict = encoding_util.tool_shed_decode( encoded_update_dict )
             includes_data_managers = update_dict.get( 'includes_data_managers', False )
             includes_datatypes = update_dict.get( 'includes_datatypes', False )
             includes_tools = update_dict.get( 'includes_tools', False )
             includes_tools_for_display_in_tool_panel = update_dict.get( 'includes_tools_for_display_in_tool_panel', False )
             includes_tool_dependencies = update_dict.get( 'includes_tool_dependencies', False )
             includes_workflows = update_dict.get( 'includes_workflows', False )
             has_repository_dependencies = update_dict.get( 'has_repository_dependencies', False )
             has_repository_dependencies_only_if_compiling_contained_td = update_dict.get( 'has_repository_dependencies_only_if_compiling_contained_td', False )
             changeset_revision = update_dict.get( 'changeset_revision', None )
             ctx_rev = update_dict.get( 'ctx_rev', None )
         changeset_revision_dict[ 'includes_data_managers' ] = includes_data_managers
         changeset_revision_dict[ 'includes_datatypes' ] = includes_datatypes
         changeset_revision_dict[ 'includes_tools' ] = includes_tools
         changeset_revision_dict[ 'includes_tools_for_display_in_tool_panel' ] = includes_tools_for_display_in_tool_panel
         changeset_revision_dict[ 'includes_tool_dependencies' ] = includes_tool_dependencies
         changeset_revision_dict[ 'includes_workflows' ] = includes_workflows
         changeset_revision_dict[ 'has_repository_dependencies' ] = has_repository_dependencies
         changeset_revision_dict[ 'has_repository_dependencies_only_if_compiling_contained_td' ] = has_repository_dependencies_only_if_compiling_contained_td
         changeset_revision_dict[ 'changeset_revision' ] = changeset_revision
         changeset_revision_dict[ 'ctx_rev' ] = ctx_rev
     except Exception, e:
         log.debug( "Error getting change set revision for update from the tool shed for repository '%s': %s" % ( repository.name, str( e ) ) )
         changeset_revision_dict[ 'includes_data_managers' ] = False
         changeset_revision_dict[ 'includes_datatypes' ] = False
         changeset_revision_dict[ 'includes_tools' ] = False
         changeset_revision_dict[ 'includes_tools_for_display_in_tool_panel' ] = False
         changeset_revision_dict[ 'includes_tool_dependencies' ] = False
         changeset_revision_dict[ 'includes_workflows' ] = False
         changeset_revision_dict[ 'has_repository_dependencies' ] = False
         changeset_revision_dict[ 'has_repository_dependencies_only_if_compiling_contained_td' ] = False
         changeset_revision_dict[ 'changeset_revision' ] = None
         changeset_revision_dict[ 'ctx_rev' ] = None
 def get_required_repo_info_dicts( self, tool_shed_url, repo_info_dicts ):
     """
     Inspect the list of repo_info_dicts for repository dependencies and append a repo_info_dict for each of
     them to the list.  All repository_dependency entries in each of the received repo_info_dicts includes
     all required repositories, so only one pass through this method is required to retrieve all repository
     dependencies.
     """
     all_required_repo_info_dict = {}
     all_repo_info_dicts = []
     if repo_info_dicts:
         # We'll send tuples of ( tool_shed, repository_name, repository_owner, changeset_revision ) to the tool
         # shed to discover repository ids.
         required_repository_tups = []
         for repo_info_dict in repo_info_dicts:
             if repo_info_dict not in all_repo_info_dicts:
                 all_repo_info_dicts.append( repo_info_dict )
             for repository_name, repo_info_tup in repo_info_dict.items():
                 description, \
                     repository_clone_url, \
                     changeset_revision, \
                     ctx_rev, \
                     repository_owner, \
                     repository_dependencies, \
                     tool_dependencies = \
                     suc.get_repo_info_tuple_contents( repo_info_tup )
                 if repository_dependencies:
                     for key, val in repository_dependencies.items():
                         if key in [ 'root_key', 'description' ]:
                             continue
                         repository_components_tuple = container_util.get_components_from_key( key )
                         components_list = suc.extract_components_from_tuple( repository_components_tuple )
                         # Skip listing a repository dependency if it is required only to compile a tool dependency
                         # defined for the dependent repository since in this case, the repository dependency is really
                         # a dependency of the dependent repository's contained tool dependency, and only if that
                         # tool dependency requires compilation.
                         # For backward compatibility to the 12/20/12 Galaxy release.
                         prior_installation_required = 'False'
                         only_if_compiling_contained_td = 'False'
                         if len( components_list ) == 4:
                             prior_installation_required = 'False'
                             only_if_compiling_contained_td = 'False'
                         elif len( components_list ) == 5:
                             prior_installation_required = components_list[ 4 ]
                             only_if_compiling_contained_td = 'False'
                         if not asbool( only_if_compiling_contained_td ):
                             if components_list not in required_repository_tups:
                                 required_repository_tups.append( components_list )
                         for components_list in val:
                             try:
                                 only_if_compiling_contained_td = components_list[ 5 ]
                             except:
                                 only_if_compiling_contained_td = 'False'
                             # Skip listing a repository dependency if it is required only to compile a tool dependency
                             # defined for the dependent repository (see above comment).
                             if not asbool( only_if_compiling_contained_td ):
                                 if components_list not in required_repository_tups:
                                     required_repository_tups.append( components_list )
                 else:
                     # We have a single repository with no dependencies.
                     components_list = [ tool_shed_url, repository_name, repository_owner, changeset_revision ]
                     required_repository_tups.append( components_list )
             if required_repository_tups:
                 # The value of required_repository_tups is a list of tuples, so we need to encode it.
                 encoded_required_repository_tups = []
                 for required_repository_tup in required_repository_tups:
                     # Convert every item in required_repository_tup to a string.
                     required_repository_tup = [ str( item ) for item in required_repository_tup ]
                     encoded_required_repository_tups.append( encoding_util.encoding_sep.join( required_repository_tup ) )
                 encoded_required_repository_str = encoding_util.encoding_sep2.join( encoded_required_repository_tups )
                 encoded_required_repository_str = encoding_util.tool_shed_encode( encoded_required_repository_str )
                 if suc.is_tool_shed_client( self.app ):
                     # Handle secure / insecure Tool Shed URL protocol changes and port changes.
                     tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, tool_shed_url )
                 url = common_util.url_join( tool_shed_url, '/repository/get_required_repo_info_dict' )
                 # Fix for handling 307 redirect not being handled nicely by urllib2.urlopen when the urllib2.Request has data provided
                 url = urllib2.urlopen( urllib2.Request( url ) ).geturl()
                 request = urllib2.Request( url, data=urllib.urlencode( dict( encoded_str=encoded_required_repository_str ) ) )
                 response = urllib2.urlopen( request ).read()
                 if response:
                     try:
                         required_repo_info_dict = json.loads( response )
                     except Exception, e:
                         log.exception( e )
                         return all_repo_info_dicts
                     required_repo_info_dicts = []
                     for k, v in required_repo_info_dict.items():
                         if k == 'repo_info_dicts':
                             encoded_dict_strings = required_repo_info_dict[ 'repo_info_dicts' ]
                             for encoded_dict_str in encoded_dict_strings:
                                 decoded_dict = encoding_util.tool_shed_decode( encoded_dict_str )
                                 required_repo_info_dicts.append( decoded_dict )
                         else:
                             if k not in all_required_repo_info_dict:
                                 all_required_repo_info_dict[ k ] = v
                             else:
                                 if v and not all_required_repo_info_dict[ k ]:
                                     all_required_repo_info_dict[ k ] = v
                         if required_repo_info_dicts:
                             for required_repo_info_dict in required_repo_info_dicts:
                                 # Each required_repo_info_dict has a single entry, and all_repo_info_dicts is a list
                                 # of dictionaries, each of which has a single entry.  We'll check keys here rather than
                                 # the entire dictionary because a dictionary entry in all_repo_info_dicts will include
                                 # lists of discovered repository dependencies, but these lists will be empty in the
                                 # required_repo_info_dict since dependency discovery has not yet been performed for these
                                 # dictionaries.
                                 required_repo_info_dict_key = required_repo_info_dict.keys()[ 0 ]
                                 all_repo_info_dicts_keys = [ d.keys()[ 0 ] for d in all_repo_info_dicts ]
                                 if required_repo_info_dict_key not in all_repo_info_dicts_keys:
                                     all_repo_info_dicts.append( required_repo_info_dict )
                     all_required_repo_info_dict[ 'all_repo_info_dicts' ] = all_repo_info_dicts
 def populate_containers_dict_from_repository_metadata(
         self,
         tool_shed_url,
         tool_path,
         repository,
         reinstalling=False,
         required_repo_info_dicts=None):
     """
     Retrieve necessary information from the received repository's metadata to populate the
     containers_dict for display.  This method is called only from Galaxy (not the tool shed)
     when displaying repository dependencies for installed repositories and when displaying
     them for uninstalled repositories that are being reinstalled.
     """
     metadata = repository.metadata
     if metadata:
         # Handle proprietary datatypes.
         datatypes = metadata.get('datatypes', None)
         # Handle invalid tools.
         invalid_tools = metadata.get('invalid_tools', None)
         # Handle README files.
         if repository.has_readme_files:
             if reinstalling or repository.status not in \
                 [ self.app.install_model.ToolShedRepository.installation_status.DEACTIVATED,
                   self.app.install_model.ToolShedRepository.installation_status.INSTALLED ]:
                 # Since we're reinstalling, we need to send a request to the tool shed to get the README files.
                 tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry(
                     self.app, tool_shed_url)
                 params = '?name=%s&owner=%s&changeset_revision=%s' % (
                     str(repository.name), str(repository.owner),
                     str(repository.installed_changeset_revision))
                 url = common_util.url_join(
                     tool_shed_url,
                     'repository/get_readme_files%s' % params)
                 raw_text = common_util.tool_shed_get(
                     self.app, tool_shed_url, url)
                 readme_files_dict = json.loads(raw_text)
             else:
                 readme_files_dict = readme_util.build_readme_files_dict(
                     self.app, repository, repository.changeset_revision,
                     repository.metadata, tool_path)
         else:
             readme_files_dict = None
         # Handle repository dependencies.
         installed_repository_dependencies, missing_repository_dependencies = \
             self.app.installed_repository_manager.get_installed_and_missing_repository_dependencies( repository )
         # Handle the current repository's tool dependencies.
         repository_tool_dependencies = metadata.get(
             'tool_dependencies', None)
         # Make sure to display missing tool dependencies as well.
         repository_invalid_tool_dependencies = metadata.get(
             'invalid_tool_dependencies', None)
         if repository_invalid_tool_dependencies is not None:
             if repository_tool_dependencies is None:
                 repository_tool_dependencies = {}
             repository_tool_dependencies.update(
                 repository_invalid_tool_dependencies)
         repository_installed_tool_dependencies, repository_missing_tool_dependencies = \
             self.get_installed_and_missing_tool_dependencies_for_installed_repository( repository,
                                                                                        repository_tool_dependencies )
         if reinstalling:
             installed_tool_dependencies, missing_tool_dependencies = \
                 self.populate_tool_dependencies_dicts( tool_shed_url,
                                                        tool_path,
                                                        repository_installed_tool_dependencies,
                                                        repository_missing_tool_dependencies,
                                                        required_repo_info_dicts )
         else:
             installed_tool_dependencies = repository_installed_tool_dependencies
             missing_tool_dependencies = repository_missing_tool_dependencies
         # Handle valid tools.
         valid_tools = metadata.get('tools', None)
         # Handle workflows.
         workflows = metadata.get('workflows', None)
         # Handle Data Managers
         valid_data_managers = None
         invalid_data_managers = None
         data_managers_errors = None
         if 'data_manager' in metadata:
             valid_data_managers = metadata['data_manager'].get(
                 'data_managers', None)
             invalid_data_managers = metadata['data_manager'].get(
                 'invalid_data_managers', None)
             data_managers_errors = metadata['data_manager'].get(
                 'messages', None)
         gucm = GalaxyUtilityContainerManager(self.app)
         containers_dict = gucm.build_repository_containers(
             repository=repository,
             datatypes=datatypes,
             invalid_tools=invalid_tools,
             missing_repository_dependencies=missing_repository_dependencies,
             missing_tool_dependencies=missing_tool_dependencies,
             readme_files_dict=readme_files_dict,
             repository_dependencies=installed_repository_dependencies,
             tool_dependencies=installed_tool_dependencies,
             valid_tools=valid_tools,
             workflows=workflows,
             valid_data_managers=valid_data_managers,
             invalid_data_managers=invalid_data_managers,
             data_managers_errors=data_managers_errors,
             new_install=False,
             reinstalling=reinstalling)
     else:
         containers_dict = dict(datatypes=None,
                                invalid_tools=None,
                                readme_files_dict=None,
                                repository_dependencies=None,
                                tool_dependencies=None,
                                valid_tools=None,
                                workflows=None)
     return containers_dict
Beispiel #14
0
def build_citable_url(host, repository):
    return url_join(
        host, pathspec=['view', repository.user.username, repository.name])
 def get_required_repo_info_dicts( self, tool_shed_url, repo_info_dicts ):
     """
     Inspect the list of repo_info_dicts for repository dependencies and append a repo_info_dict for each of
     them to the list.  All repository_dependency entries in each of the received repo_info_dicts includes
     all required repositories, so only one pass through this method is required to retrieve all repository
     dependencies.
     """
     all_required_repo_info_dict = {}
     all_repo_info_dicts = []
     if repo_info_dicts:
         # We'll send tuples of ( tool_shed, repository_name, repository_owner, changeset_revision ) to the tool
         # shed to discover repository ids.
         required_repository_tups = []
         for repo_info_dict in repo_info_dicts:
             if repo_info_dict not in all_repo_info_dicts:
                 all_repo_info_dicts.append( repo_info_dict )
             for repository_name, repo_info_tup in repo_info_dict.items():
                 description, \
                     repository_clone_url, \
                     changeset_revision, \
                     ctx_rev, \
                     repository_owner, \
                     repository_dependencies, \
                     tool_dependencies = \
                     suc.get_repo_info_tuple_contents( repo_info_tup )
                 if repository_dependencies:
                     for key, val in repository_dependencies.items():
                         if key in [ 'root_key', 'description' ]:
                             continue
                         repository_components_tuple = container_util.get_components_from_key( key )
                         components_list = suc.extract_components_from_tuple( repository_components_tuple )
                         # Skip listing a repository dependency if it is required only to compile a tool dependency
                         # defined for the dependent repository since in this case, the repository dependency is really
                         # a dependency of the dependent repository's contained tool dependency, and only if that
                         # tool dependency requires compilation.
                         # For backward compatibility to the 12/20/12 Galaxy release.
                         only_if_compiling_contained_td = 'False'
                         if len( components_list ) == 4:
                             only_if_compiling_contained_td = 'False'
                         elif len( components_list ) == 5:
                             only_if_compiling_contained_td = 'False'
                         if not asbool( only_if_compiling_contained_td ):
                             if components_list not in required_repository_tups:
                                 required_repository_tups.append( components_list )
                         for components_list in val:
                             try:
                                 only_if_compiling_contained_td = components_list[ 5 ]
                             except:
                                 only_if_compiling_contained_td = 'False'
                             # Skip listing a repository dependency if it is required only to compile a tool dependency
                             # defined for the dependent repository (see above comment).
                             if not asbool( only_if_compiling_contained_td ):
                                 if components_list not in required_repository_tups:
                                     required_repository_tups.append( components_list )
                 else:
                     # We have a single repository with no dependencies.
                     components_list = [ tool_shed_url, repository_name, repository_owner, changeset_revision ]
                     required_repository_tups.append( components_list )
             if required_repository_tups:
                 # The value of required_repository_tups is a list of tuples, so we need to encode it.
                 encoded_required_repository_tups = []
                 for required_repository_tup in required_repository_tups:
                     # Convert every item in required_repository_tup to a string.
                     required_repository_tup = [ str( item ) for item in required_repository_tup ]
                     encoded_required_repository_tups.append( encoding_util.encoding_sep.join( required_repository_tup ) )
                 encoded_required_repository_str = encoding_util.encoding_sep2.join( encoded_required_repository_tups )
                 encoded_required_repository_str = encoding_util.tool_shed_encode( encoded_required_repository_str )
                 if suc.is_tool_shed_client( self.app ):
                     # Handle secure / insecure Tool Shed URL protocol changes and port changes.
                     tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, tool_shed_url )
                 pathspec = [ 'repository', 'get_required_repo_info_dict' ]
                 url = common_util.url_join( tool_shed_url, pathspec=pathspec )
                 # Fix for handling 307 redirect not being handled nicely by urllib2.urlopen when the urllib2.Request has data provided
                 url = urllib2.urlopen( urllib2.Request( url ) ).geturl()
                 request = urllib2.Request( url, data=urllib.urlencode( dict( encoded_str=encoded_required_repository_str ) ) )
                 response = urllib2.urlopen( request ).read()
                 if response:
                     try:
                         required_repo_info_dict = json.loads( response )
                     except Exception, e:
                         log.exception( e )
                         return all_repo_info_dicts
                     required_repo_info_dicts = []
                     for k, v in required_repo_info_dict.items():
                         if k == 'repo_info_dicts':
                             encoded_dict_strings = required_repo_info_dict[ 'repo_info_dicts' ]
                             for encoded_dict_str in encoded_dict_strings:
                                 decoded_dict = encoding_util.tool_shed_decode( encoded_dict_str )
                                 required_repo_info_dicts.append( decoded_dict )
                         else:
                             if k not in all_required_repo_info_dict:
                                 all_required_repo_info_dict[ k ] = v
                             else:
                                 if v and not all_required_repo_info_dict[ k ]:
                                     all_required_repo_info_dict[ k ] = v
                         if required_repo_info_dicts:
                             for required_repo_info_dict in required_repo_info_dicts:
                                 # Each required_repo_info_dict has a single entry, and all_repo_info_dicts is a list
                                 # of dictionaries, each of which has a single entry.  We'll check keys here rather than
                                 # the entire dictionary because a dictionary entry in all_repo_info_dicts will include
                                 # lists of discovered repository dependencies, but these lists will be empty in the
                                 # required_repo_info_dict since dependency discovery has not yet been performed for these
                                 # dictionaries.
                                 required_repo_info_dict_key = required_repo_info_dict.keys()[ 0 ]
                                 all_repo_info_dicts_keys = [ d.keys()[ 0 ] for d in all_repo_info_dicts ]
                                 if required_repo_info_dict_key not in all_repo_info_dicts_keys:
                                     all_repo_info_dicts.append( required_repo_info_dict )
                     all_required_repo_info_dict[ 'all_repo_info_dicts' ] = all_repo_info_dicts
Beispiel #16
0
 def get_update_to_changeset_revision_and_ctx_rev(self, repository):
     """Return the changeset revision hash to which the repository can be updated."""
     changeset_revision_dict = {}
     tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry(
         self.app, str(repository.tool_shed))
     params = '?name=%s&owner=%s&changeset_revision=%s' % (
         str(repository.name), str(repository.owner),
         str(repository.installed_changeset_revision))
     url = common_util.url_join(
         tool_shed_url,
         'repository/get_changeset_revision_and_ctx_rev%s' % params)
     try:
         encoded_update_dict = common_util.tool_shed_get(
             self.app, tool_shed_url, url)
         if encoded_update_dict:
             update_dict = encoding_util.tool_shed_decode(
                 encoded_update_dict)
             includes_data_managers = update_dict.get(
                 'includes_data_managers', False)
             includes_datatypes = update_dict.get('includes_datatypes',
                                                  False)
             includes_tools = update_dict.get('includes_tools', False)
             includes_tools_for_display_in_tool_panel = update_dict.get(
                 'includes_tools_for_display_in_tool_panel', False)
             includes_tool_dependencies = update_dict.get(
                 'includes_tool_dependencies', False)
             includes_workflows = update_dict.get('includes_workflows',
                                                  False)
             has_repository_dependencies = update_dict.get(
                 'has_repository_dependencies', False)
             has_repository_dependencies_only_if_compiling_contained_td = update_dict.get(
                 'has_repository_dependencies_only_if_compiling_contained_td',
                 False)
             changeset_revision = update_dict.get('changeset_revision',
                                                  None)
             ctx_rev = update_dict.get('ctx_rev', None)
         changeset_revision_dict[
             'includes_data_managers'] = includes_data_managers
         changeset_revision_dict['includes_datatypes'] = includes_datatypes
         changeset_revision_dict['includes_tools'] = includes_tools
         changeset_revision_dict[
             'includes_tools_for_display_in_tool_panel'] = includes_tools_for_display_in_tool_panel
         changeset_revision_dict[
             'includes_tool_dependencies'] = includes_tool_dependencies
         changeset_revision_dict['includes_workflows'] = includes_workflows
         changeset_revision_dict[
             'has_repository_dependencies'] = has_repository_dependencies
         changeset_revision_dict[
             'has_repository_dependencies_only_if_compiling_contained_td'] = has_repository_dependencies_only_if_compiling_contained_td
         changeset_revision_dict['changeset_revision'] = changeset_revision
         changeset_revision_dict['ctx_rev'] = ctx_rev
     except Exception, e:
         log.debug(
             "Error getting change set revision for update from the tool shed for repository '%s': %s"
             % (repository.name, str(e)))
         changeset_revision_dict['includes_data_managers'] = False
         changeset_revision_dict['includes_datatypes'] = False
         changeset_revision_dict['includes_tools'] = False
         changeset_revision_dict[
             'includes_tools_for_display_in_tool_panel'] = False
         changeset_revision_dict['includes_tool_dependencies'] = False
         changeset_revision_dict['includes_workflows'] = False
         changeset_revision_dict['has_repository_dependencies'] = False
         changeset_revision_dict[
             'has_repository_dependencies_only_if_compiling_contained_td'] = False
         changeset_revision_dict['changeset_revision'] = None
         changeset_revision_dict['ctx_rev'] = None
Beispiel #17
0
 def populate_containers_dict_from_repository_metadata( self, tool_shed_url, tool_path, repository, reinstalling=False,
                                                        required_repo_info_dicts=None ):
     """
     Retrieve necessary information from the received repository's metadata to populate the
     containers_dict for display.  This method is called only from Galaxy (not the tool shed)
     when displaying repository dependencies for installed repositories and when displaying
     them for uninstalled repositories that are being reinstalled.
     """
     metadata = repository.metadata
     if metadata:
         # Handle proprietary datatypes.
         datatypes = metadata.get( 'datatypes', None )
         # Handle invalid tools.
         invalid_tools = metadata.get( 'invalid_tools', None )
         # Handle README files.
         if repository.has_readme_files:
             if reinstalling or repository.status not in \
                 [ self.app.install_model.ToolShedRepository.installation_status.DEACTIVATED,
                   self.app.install_model.ToolShedRepository.installation_status.INSTALLED ]:
                 # Since we're reinstalling, we need to send a request to the tool shed to get the README files.
                 tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, tool_shed_url )
                 params = '?name=%s&owner=%s&changeset_revision=%s' % ( str( repository.name ),
                                                                        str( repository.owner ),
                                                                        str( repository.installed_changeset_revision ) )
                 url = common_util.url_join( tool_shed_url,
                                             'repository/get_readme_files%s' % params )
                 raw_text = common_util.tool_shed_get( self.app, tool_shed_url, url )
                 readme_files_dict = json.loads( raw_text )
             else:
                 readme_files_dict = readme_util.build_readme_files_dict( self.app,
                                                                          repository,
                                                                          repository.changeset_revision,
                                                                          repository.metadata, tool_path )
         else:
             readme_files_dict = None
         # Handle repository dependencies.
         installed_repository_dependencies, missing_repository_dependencies = \
             self.app.installed_repository_manager.get_installed_and_missing_repository_dependencies( repository )
         # Handle the current repository's tool dependencies.
         repository_tool_dependencies = metadata.get( 'tool_dependencies', None )
         # Make sure to display missing tool dependencies as well.
         repository_invalid_tool_dependencies = metadata.get( 'invalid_tool_dependencies', None )
         if repository_invalid_tool_dependencies is not None:
             if repository_tool_dependencies is None:
                 repository_tool_dependencies = {}
             repository_tool_dependencies.update( repository_invalid_tool_dependencies )
         repository_installed_tool_dependencies, repository_missing_tool_dependencies = \
             self.get_installed_and_missing_tool_dependencies_for_installed_repository( repository,
                                                                                        repository_tool_dependencies )
         if reinstalling:
             installed_tool_dependencies, missing_tool_dependencies = \
                 self.populate_tool_dependencies_dicts( tool_shed_url,
                                                        tool_path,
                                                        repository_installed_tool_dependencies,
                                                        repository_missing_tool_dependencies,
                                                        required_repo_info_dicts )
         else:
             installed_tool_dependencies = repository_installed_tool_dependencies
             missing_tool_dependencies = repository_missing_tool_dependencies
         # Handle valid tools.
         valid_tools = metadata.get( 'tools', None )
         # Handle workflows.
         workflows = metadata.get( 'workflows', None )
         # Handle Data Managers
         valid_data_managers = None
         invalid_data_managers = None
         data_managers_errors = None
         if 'data_manager' in metadata:
             valid_data_managers = metadata['data_manager'].get( 'data_managers', None )
             invalid_data_managers = metadata['data_manager'].get( 'invalid_data_managers', None )
             data_managers_errors = metadata['data_manager'].get( 'messages', None )
         gucm = GalaxyUtilityContainerManager( self.app )
         containers_dict = gucm.build_repository_containers( repository=repository,
                                                             datatypes=datatypes,
                                                             invalid_tools=invalid_tools,
                                                             missing_repository_dependencies=missing_repository_dependencies,
                                                             missing_tool_dependencies=missing_tool_dependencies,
                                                             readme_files_dict=readme_files_dict,
                                                             repository_dependencies=installed_repository_dependencies,
                                                             tool_dependencies=installed_tool_dependencies,
                                                             valid_tools=valid_tools,
                                                             workflows=workflows,
                                                             valid_data_managers=valid_data_managers,
                                                             invalid_data_managers=invalid_data_managers,
                                                             data_managers_errors=data_managers_errors,
                                                             new_install=False,
                                                             reinstalling=reinstalling )
     else:
         containers_dict = dict( datatypes=None,
                                 invalid_tools=None,
                                 readme_files_dict=None,
                                 repository_dependencies=None,
                                 tool_dependencies=None,
                                 valid_tools=None,
                                 workflows=None )
     return containers_dict
def build_citable_url( host, repository ):
    return url_join( host, pathspec=[ 'view', repository.user.username, repository.name ] )