def get_or_create_tool_shed_repository( self, tool_shed, name, owner, changeset_revision ): """ Return a tool shed repository database record defined by the combination of tool shed, repository name, repository owner and changeset_revision or installed_changeset_revision. A new tool shed repository record will be created if one is not located. """ install_model = self.app.install_model # We store the port in the database. tool_shed = common_util.remove_protocol_from_tool_shed_url( tool_shed ) # This method is used only in Galaxy, not the tool shed. repository = suc.get_repository_for_dependency_relationship( self.app, tool_shed, name, owner, changeset_revision ) if not repository: tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, tool_shed ) repository_clone_url = os.path.join( tool_shed_url, 'repos', owner, name ) ctx_rev = suc.get_ctx_rev( self.app, tool_shed_url, name, owner, changeset_revision ) repository = suc.create_or_update_tool_shed_repository( app=self.app, name=name, description=None, installed_changeset_revision=changeset_revision, ctx_rev=ctx_rev, repository_clone_url=repository_clone_url, metadata_dict={}, status=install_model.ToolShedRepository.installation_status.NEW, current_changeset_revision=None, owner=owner, dist_to_shed=False ) return repository
def create_temporary_tool_dependencies_config( self, tool_shed_url, name, owner, changeset_revision ): """Make a call to the tool shed to get the required repository's tool_dependencies.xml file.""" tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, tool_shed_url ) if tool_shed_url is None or name is None or owner is None or changeset_revision is None: message = "Unable to retrieve required tool_dependencies.xml file from the Tool Shed because one or more of the " message += "following required parameters is None: tool_shed_url: %s, name: %s, owner: %s, changeset_revision: %s " % \ ( str( tool_shed_url ), str( name ), str( owner ), str( changeset_revision ) ) raise Exception( message ) params = dict( name=name, owner=owner, changeset_revision=changeset_revision ) pathspec = [ 'repository', 'get_tool_dependencies_config_contents' ] text = url_get( tool_shed_url, password_mgr=self.app.tool_shed_registry.url_auth( tool_shed_url ), pathspec=pathspec, params=params ) if text: # Write the contents to a temporary file on disk so it can be reloaded and parsed. fh = tempfile.NamedTemporaryFile( 'wb', prefix="tmp-toolshed-cttdc" ) tmp_filename = fh.name fh.close() fh = open( tmp_filename, 'wb' ) fh.write( text ) fh.close() return tmp_filename else: message = "Unable to retrieve required tool_dependencies.xml file from the Tool Shed for revision " message += "%s of installed repository %s owned by %s." % ( str( changeset_revision ), str( name ), str( owner ) ) raise Exception( message ) return None
def category(self, trans, **kwd): """ GET /api/tool_shed/category Display a list of repositories in the selected category. :param tool_shed_url: the url of the toolshed to get repositories from :param category_id: the category to get repositories from """ tool_shed_url = urlunquote(kwd.get('tool_shed_url', '')) category_id = kwd.get('category_id', '') params = dict(installable=True) tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry(trans.app, tool_shed_url) url = util.build_url(tool_shed_url, pathspec=['api', 'categories', category_id, 'repositories'], params=params) repositories = [] return_json = json.loads(util.url_get(url)) for repository in return_json['repositories']: api_url = web.url_for(controller='api/tool_shed', action='repository', tool_shed_url=urlquote(tool_shed_url), repository_id=repository['id'], qualified=True) repository['url'] = api_url repositories.append(repository) return_json['repositories'] = repositories return return_json
def get_or_create_tool_shed_repository( self, tool_shed, name, owner, changeset_revision ): """ Return a tool shed repository database record defined by the combination of tool shed, repository name, repository owner and changeset_revision or installed_changeset_revision. A new tool shed repository record will be created if one is not located. """ install_model = self.app.install_model # We store the port in the database. tool_shed = common_util.remove_protocol_from_tool_shed_url( tool_shed ) # This method is used only in Galaxy, not the tool shed. repository = suc.get_repository_for_dependency_relationship( self.app, tool_shed, name, owner, changeset_revision ) if not repository: tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, tool_shed ) repository_clone_url = os.path.join( tool_shed_url, 'repos', owner, name ) ctx_rev = suc.get_ctx_rev( self.app, tool_shed_url, name, owner, changeset_revision ) repository = suc.create_or_update_tool_shed_repository( app=self.app, name=name, description=None, installed_changeset_revision=changeset_revision, ctx_rev=ctx_rev, repository_clone_url=repository_clone_url, metadata_dict={}, status=install_model.ToolShedRepository.installation_status.NEW, current_changeset_revision=None, owner=owner, dist_to_shed=False ) return repository
def get_repository_dependencies_for_installed_tool_shed_repository( self, app, repository): """ Send a request to the appropriate tool shed to retrieve the dictionary of repository dependencies defined for the received repository which is installed into Galaxy. This method is called only from Galaxy. """ tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( app, str(repository.tool_shed)) params = dict(name=str(repository.name), owner=str(repository.owner), changeset_revision=str(repository.changeset_revision)) pathspec = ['repository', 'get_repository_dependencies'] try: raw_text = url_get( tool_shed_url, password_mgr=app.tool_shed_registry.url_auth(tool_shed_url), pathspec=pathspec, params=params) except Exception as e: log.error( "The URL\n%s\nraised the exception:\n%s\n", build_url(tool_shed_url, pathspec=pathspec, params=params), str(e)) return '' if len(raw_text) > 2: encoded_text = json.loads(raw_text) text = encoding_util.tool_shed_decode(encoded_text) else: text = '' return text
def from_workflow_step( Class, trans, step, **kwds ): tool_id = trans.app.toolbox.get_tool_id( step.tool_id ) or step.tool_id tool_version = step.tool_version module = super( ToolModule, Class ).from_workflow_step( trans, step, tool_id=tool_id, tool_version=tool_version ) module.workflow_outputs = step.workflow_outputs module.post_job_actions = {} for pja in step.post_job_actions: module.post_job_actions[pja.action_type] = pja if module.tool: message = "" if step.tool_id != module.tool_id: # This means the exact version of the tool is not installed. We inform the user. old_tool_shed = step.tool_id.split( "/repos/" )[0] if old_tool_shed not in tool_id: # Only display the following warning if the tool comes from a different tool shed old_tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, old_tool_shed ) if not old_tool_shed_url: # a tool from a different tool_shed has been found, but the original tool shed has been deactivated old_tool_shed_url = "http://" + old_tool_shed # let's just assume it's either http, or a http is forwarded to https. old_url = old_tool_shed_url + "/view/%s/%s/" % (module.tool.repository_owner, module.tool.repository_name) new_url = module.tool.tool_shed_repository.get_sharable_url( module.tool.app ) + '/%s/' % module.tool.tool_shed_repository.changeset_revision new_tool_shed_url = new_url.split( "/view" )[0] message += "The tool \'%s\', version %s by the owner %s installed from <a href=\"%s\" target=\"_blank\">%s</a> is not available. " % (module.tool.name, tool_version, module.tool.repository_owner, old_url, old_tool_shed_url) message += "A derivation of this tool installed from <a href=\"%s\" target=\"_blank\">%s</a> will be used instead. " % (new_url, new_tool_shed_url) if step.tool_version and (step.tool_version != module.tool.version): message += "<span title=\"tool id '%s'\">Using version '%s' instead of version '%s' specified in this workflow. " % (tool_id, module.tool.version, step.tool_version) if message: log.debug(message) module.version_changes.append(message) return module
def show(self, trans, **kwd): """ GET /api/tool_shed/contents Display a list of categories in the selected toolshed. :param tool_shed_url: the url of the toolshed to get categories from """ tool_shed_url = urlunquote(kwd.get('tool_shed_url', '')) tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, tool_shed_url) url = util.build_url(tool_shed_url, pathspec=['api', 'categories']) categories = [] try: for category in json.loads(util.url_get(url)): api_url = web.url_for(controller='api/tool_shed', action='category', tool_shed_url=urlquote(tool_shed_url), category_id=category['id'], qualified=True) category['url'] = api_url categories.append(category) except Exception: raise exceptions.ObjectNotFound("Tool Shed %s is not responding." % tool_shed_url) return categories
def from_workflow_step(Class, trans, step, **kwds): tool_id = trans.app.toolbox.get_tool_id(step.tool_id) or step.tool_id tool_version = step.tool_version module = super(ToolModule, Class).from_workflow_step(trans, step, tool_id=tool_id, tool_version=tool_version) module.workflow_outputs = step.workflow_outputs module.post_job_actions = {} for pja in step.post_job_actions: module.post_job_actions[pja.action_type] = pja if module.tool: message = "" if step.tool_id != module.tool_id: # This means the exact version of the tool is not installed. We inform the user. old_tool_shed = step.tool_id.split("/repos/")[0] if old_tool_shed not in tool_id: # Only display the following warning if the tool comes from a different tool shed old_tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry(trans.app, old_tool_shed) if not old_tool_shed_url: # a tool from a different tool_shed has been found, but the original tool shed has been deactivated old_tool_shed_url = "http://" + old_tool_shed # let's just assume it's either http, or a http is forwarded to https. old_url = old_tool_shed_url + "/view/%s/%s/" % (module.tool.repository_owner, module.tool.repository_name) new_url = module.tool.sharable_url + '/%s/' % module.tool.changeset_revision new_tool_shed_url = new_url.split("/view")[0] message += "The tool \'%s\', version %s by the owner %s installed from <a href=\"%s\" target=\"_blank\">%s</a> is not available. " % (module.tool.name, tool_version, module.tool.repository_owner, old_url, old_tool_shed_url) message += "A derivation of this tool installed from <a href=\"%s\" target=\"_blank\">%s</a> will be used instead. " % (new_url, new_tool_shed_url) if step.tool_version and (step.tool_version != module.tool.version): message += "<span title=\"tool id '%s'\">Using version '%s' instead of version '%s' specified in this workflow. " % (tool_id, module.tool.version, step.tool_version) if message: log.debug(message) module.version_changes.append(message) else: log.warning("The tool '%s' is missing. Cannot build workflow module." % tool_id) return module
def check_for_tool_dependencies( self, trans, migration_stage ): # Get the 000x_tools.xml file associated with migration_stage. tools_xml_file_path = os.path.abspath( os.path.join( trans.app.config.root, 'scripts', 'migrate_tools', '%04d_tools.xml' % migration_stage ) ) tree = galaxy.util.parse_xml( tools_xml_file_path ) root = tree.getroot() tool_shed = root.get( 'name' ) shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, tool_shed ) repo_name_dependency_tups = [] if shed_url: for elem in root: if elem.tag == 'repository': tool_dependencies = [] tool_dependencies_dict = {} repository_name = elem.get( 'name' ) changeset_revision = elem.get( 'changeset_revision' ) params = dict( name=repository_name, owner='devteam', changeset_revision=changeset_revision ) pathspec = [ 'repository', 'get_tool_dependencies' ] text = url_get( shed_url, password_mgr=self.app.tool_shed_registry.url_auth( shed_url ), pathspec=pathspec, params=params ) if text: tool_dependencies_dict = encoding_util.tool_shed_decode( text ) for dependency_key, requirements_dict in tool_dependencies_dict.items(): tool_dependency_name = requirements_dict[ 'name' ] tool_dependency_version = requirements_dict[ 'version' ] tool_dependency_type = requirements_dict[ 'type' ] tool_dependency_readme = requirements_dict.get( 'readme', '' ) tool_dependencies.append( ( tool_dependency_name, tool_dependency_version, tool_dependency_type, tool_dependency_readme ) ) repo_name_dependency_tups.append( ( repository_name, tool_dependencies ) ) return repo_name_dependency_tups
def update_repository_record(self, repository, updated_metadata_dict, updated_changeset_revision, updated_ctx_rev): """ Update a tool_shed_repository database record with new information retrieved from the Tool Shed. This happens when updating an installed repository to a new changeset revision. """ repository.metadata = updated_metadata_dict tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, repository.tool_shed) suc.clean_dependency_relationships(self.app, updated_metadata_dict, repository, tool_shed_url) # Update the repository.changeset_revision column in the database. repository.changeset_revision = updated_changeset_revision repository.ctx_rev = updated_ctx_rev # Update the repository.tool_shed_status column in the database. tool_shed_status_dict = repository_util.get_tool_shed_status_for_installed_repository( self.app, repository) if tool_shed_status_dict: repository.tool_shed_status = tool_shed_status_dict else: repository.tool_shed_status = None self.app.install_model.context.add(repository) self.app.install_model.context.flush() self.app.install_model.context.refresh(repository) return repository
def get_sharable_url( self, app ): tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( app, self.tool_shed ) if tool_shed_url: # Append a slash to the tool shed URL, because urlparse.urljoin will eliminate # the last part of a URL if it does not end with a forward slash. tool_shed_url = '%s/' % tool_shed_url return urljoin( tool_shed_url, 'view/%s/%s' % ( self.owner, self.name ) ) return tool_shed_url
def get_sharable_url( self, app ): tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( app, self.tool_shed ) if tool_shed_url: # Append a slash to the tool shed URL, because urlparse.urljoin will eliminate # the last part of a URL if it does not end with a forward slash. tool_shed_url = '%s/' % tool_shed_url return urljoin( tool_shed_url, 'view/%s/%s' % ( self.owner, self.name ) ) return tool_shed_url
def get_repository_type_from_tool_shed(app, tool_shed_url, name, owner): """ Send a request to the tool shed to retrieve the type for a repository defined by the combination of a name and owner. """ tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry(app, tool_shed_url) params = dict(name=name, owner=owner) pathspec = ['repository', 'get_repository_type'] repository_type = util.url_get(tool_shed_url, password_mgr=app.tool_shed_registry.url_auth(tool_shed_url), pathspec=pathspec, params=params) return repository_type
def get_ctx_rev(app, tool_shed_url, name, owner, changeset_revision): """ Send a request to the tool shed to retrieve the ctx_rev for a repository defined by the combination of a name, owner and changeset revision. """ tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry(app, tool_shed_url) params = dict(name=name, owner=owner, changeset_revision=changeset_revision) pathspec = ['repository', 'get_ctx_rev'] ctx_rev = util.url_get(tool_shed_url, password_mgr=app.tool_shed_registry.url_auth(tool_shed_url), pathspec=pathspec, params=params) return ctx_rev
def get_ctx_rev(app, tool_shed_url, name, owner, changeset_revision): """ Send a request to the tool shed to retrieve the ctx_rev for a repository defined by the combination of a name, owner and changeset revision. """ tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry(app, tool_shed_url) params = dict(name=name, owner=owner, changeset_revision=changeset_revision) pathspec = ['repository', 'get_ctx_rev'] ctx_rev = util.url_get(tool_shed_url, password_mgr=app.tool_shed_registry.url_auth(tool_shed_url), pathspec=pathspec, params=params) return ctx_rev
def get_repository_type_from_tool_shed(app, tool_shed_url, name, owner): """ Send a request to the tool shed to retrieve the type for a repository defined by the combination of a name and owner. """ tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry(app, tool_shed_url) params = dict(name=name, owner=owner) pathspec = ['repository', 'get_repository_type'] repository_type = util.url_get(tool_shed_url, password_mgr=app.tool_shed_registry.url_auth(tool_shed_url), pathspec=pathspec, params=params) return repository_type
def get_tool_dependency_definition_metadata_from_tool_shed(app, tool_shed_url, name, owner): """ Send a request to the tool shed to retrieve the current metadata for a repository of type tool_dependency_definition defined by the combination of a name and owner. """ tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry(app, tool_shed_url) params = dict(name=name, owner=owner) pathspec = ['repository', 'get_tool_dependency_definition_metadata'] metadata = util.url_get(tool_shed_url, password_mgr=app.tool_shed_registry.url_auth(tool_shed_url), pathspec=pathspec, params=params) return metadata
def get_tool_dependency_definition_metadata_from_tool_shed(app, tool_shed_url, name, owner): """ Send a request to the tool shed to retrieve the current metadata for a repository of type tool_dependency_definition defined by the combination of a name and owner. """ tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry(app, tool_shed_url) params = dict(name=name, owner=owner) pathspec = ['repository', 'get_tool_dependency_definition_metadata'] metadata = util.url_get(tool_shed_url, password_mgr=app.tool_shed_registry.url_auth(tool_shed_url), pathspec=pathspec, params=params) return metadata
def get_updated_changeset_revisions_from_tool_shed(app, tool_shed_url, name, owner, changeset_revision): """ Get all appropriate newer changeset revisions for the repository defined by the received tool_shed_url / name / owner combination. """ tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry(app, tool_shed_url) if tool_shed_url is None or name is None or owner is None or changeset_revision is None: message = "Unable to get updated changeset revisions from the Tool Shed because one or more of the following " message += "required parameters is None: tool_shed_url: %s, name: %s, owner: %s, changeset_revision: %s " % \ (str(tool_shed_url), str(name), str(owner), str(changeset_revision)) raise Exception(message) params = dict(name=name, owner=owner, changeset_revision=changeset_revision) pathspec = ['repository', 'updated_changeset_revisions'] text = util.url_get(tool_shed_url, password_mgr=app.tool_shed_registry.url_auth(tool_shed_url), pathspec=pathspec, params=params) return text
def get_repository_dependencies_for_installed_tool_shed_repository( self, app, repository ): """ Send a request to the appropriate tool shed to retrieve the dictionary of repository dependencies defined for the received repository which is installed into Galaxy. This method is called only from Galaxy. """ tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( app, str( repository.tool_shed ) ) params = dict( name=str( repository.name ), owner=str( repository.owner ), changeset_revision=str( repository.changeset_revision ) ) pathspec = [ 'repository', 'get_repository_dependencies' ] try: raw_text = common_util.tool_shed_get( app, tool_shed_url, pathspec=pathspec, params=params ) except Exception, e: log.error("The URL\n%s\nraised the exception:\n%s\n", common_util.url_join( tool_shed_url, pathspec=pathspec, params=params ), str( e ) ) return ''
def get_repository_dependencies_for_installed_tool_shed_repository( self, app, repository ): """ Send a request to the appropriate tool shed to retrieve the dictionary of repository dependencies defined for the received repository which is installed into Galaxy. This method is called only from Galaxy. """ tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( app, str( repository.tool_shed ) ) params = dict( name=str( repository.name ), owner=str( repository.owner ), changeset_revision=str( repository.changeset_revision ) ) pathspec = [ 'repository', 'get_repository_dependencies' ] try: raw_text = common_util.tool_shed_get( app, tool_shed_url, pathspec=pathspec, params=params ) except Exception, e: print "The URL\n%s\nraised the exception:\n%s\n" % ( common_util.url_join( tool_shed_url, pathspec=pathspec, params=params ), str( e ) ) return ''
def shed_category( self, trans, **kwd ): """ GET /api/tool_shed_repositories/shed_category Display a list of repositories in the selected category. :param tool_shed_url: the url of the toolshed to get repositories from :param category_id: the category to get repositories from """ tool_shed_url = kwd.get( 'tool_shed_url', '' ) category_id = kwd.get( 'category_id', '' ) tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, tool_shed_url ) url = util.build_url( tool_shed_url, pathspec=[ 'api', 'categories', category_id, 'repositories' ] ) category = json.loads( util.url_get( url ) ) return category
def get_updated_changeset_revisions_from_tool_shed( app, tool_shed_url, name, owner, changeset_revision ): """ Get all appropriate newer changeset revisions for the repository defined by the received tool_shed_url / name / owner combination. """ tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( app, tool_shed_url ) if tool_shed_url is None or name is None or owner is None or changeset_revision is None: message = "Unable to get updated changeset revisions from the Tool Shed because one or more of the following " message += "required parameters is None: tool_shed_url: %s, name: %s, owner: %s, changeset_revision: %s " % \ ( str( tool_shed_url ), str( name ), str( owner ), str( changeset_revision ) ) raise Exception( message ) params = dict( name=name, owner=owner, changeset_revision=changeset_revision ) pathspec = [ 'repository', 'updated_changeset_revisions' ] text = util.url_get( tool_shed_url, password_mgr=app.tool_shed_registry.url_auth( tool_shed_url ), pathspec=pathspec, params=params ) return text
def load_from_element( self, elem, tool_path ): assert elem.tag == 'data_manager', 'A data manager configuration must have a "data_manager" tag as the root. "%s" is present' % ( elem.tag ) self.declared_id = elem.get( 'id', None ) self.guid = elem.get( 'guid', None ) path = elem.get( 'tool_file', None ) self.version = elem.get( 'version', self.version ) tool_shed_repository_id = None tool_guid = None if path is None: tool_elem = elem.find( 'tool' ) assert tool_elem is not None, "Error loading tool for data manager. Make sure that a tool_file attribute or a tool tag set has been defined:\n%s" % ( util.xml_to_string( elem ) ) path = tool_elem.get( "file", None ) tool_guid = tool_elem.get( "guid", None ) # need to determine repository info so that dependencies will work correctly tool_shed_url = tool_elem.find( 'tool_shed' ).text # Handle protocol changes. tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.data_managers.app, tool_shed_url ) # The protocol is not stored in the database. tool_shed = common_util.remove_protocol_from_tool_shed_url( tool_shed_url ) repository_name = tool_elem.find( 'repository_name' ).text repository_owner = tool_elem.find( 'repository_owner' ).text installed_changeset_revision = tool_elem.find( 'installed_changeset_revision' ).text self.tool_shed_repository_info_dict = dict( tool_shed=tool_shed, name=repository_name, owner=repository_owner, installed_changeset_revision=installed_changeset_revision ) tool_shed_repository = \ suc.get_installed_repository( self.data_managers.app, tool_shed=tool_shed, name=repository_name, owner=repository_owner, installed_changeset_revision=installed_changeset_revision ) if tool_shed_repository is None: log.warning( 'Could not determine tool shed repository from database. This should only ever happen when running tests.' ) # we'll set tool_path manually here from shed_conf_file tool_shed_repository_id = None try: tool_path = util.parse_xml( elem.get( 'shed_conf_file' ) ).getroot().get( 'tool_path', tool_path ) except Exception, e: log.error( 'Error determining tool_path for Data Manager during testing: %s', e ) else: tool_shed_repository_id = self.data_managers.app.security.encode_id( tool_shed_repository.id ) # use shed_conf_file to determine tool_path shed_conf_file = elem.get( "shed_conf_file", None ) if shed_conf_file: shed_conf = self.data_managers.app.toolbox.get_shed_config_dict_by_filename( shed_conf_file, None ) if shed_conf: tool_path = shed_conf.get( "tool_path", tool_path )
def get_readme_files_dict_for_display(app, tool_shed_url, repo_info_dict): """ Return a dictionary of README files contained in the single repository being installed so they can be displayed on the tool panel section selection page. """ name = next(iter(repo_info_dict)) repo_info_tuple = repo_info_dict[name] description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, installed_td = \ repository_util.get_repo_info_tuple_contents(repo_info_tuple) # Handle changing HTTP protocols over time. tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry(app, tool_shed_url) params = dict(name=name, owner=repository_owner, changeset_revision=changeset_revision) pathspec = ['repository', 'get_readme_files'] raw_text = url_get(tool_shed_url, auth=app.tool_shed_registry.url_auth(tool_shed_url), pathspec=pathspec, params=params) readme_files_dict = json.loads(raw_text) return readme_files_dict
def get_repository_dependencies_for_installed_tool_shed_repository( self, app, repository ): """ Send a request to the appropriate tool shed to retrieve the dictionary of repository dependencies defined for the received repository which is installed into Galaxy. This method is called only from Galaxy. """ tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( app, str( repository.tool_shed ) ) params = '?name=%s&owner=%s&changeset_revision=%s' % ( str( repository.name ), str( repository.owner ), str( repository.changeset_revision ) ) url = common_util.url_join( tool_shed_url, 'repository/get_repository_dependencies%s' % params ) try: raw_text = common_util.tool_shed_get( app, tool_shed_url, url ) except Exception, e: print "The URL\n%s\nraised the exception:\n%s\n" % ( url, str( e ) ) return ''
def get_readme_files_dict_for_display( app, tool_shed_url, repo_info_dict ): """ Return a dictionary of README files contained in the single repository being installed so they can be displayed on the tool panel section selection page. """ name = next(iter(repo_info_dict)) repo_info_tuple = repo_info_dict[ name ] description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, installed_td = \ repository_util.get_repo_info_tuple_contents( repo_info_tuple ) # Handle changing HTTP protocols over time. tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( app, tool_shed_url ) params = dict( name=name, owner=repository_owner, changeset_revision=changeset_revision ) pathspec = [ 'repository', 'get_readme_files' ] raw_text = url_get( tool_shed_url, password_mgr=app.tool_shed_registry.url_auth( tool_shed_url ), pathspec=pathspec, params=params ) readme_files_dict = json.loads( raw_text ) return readme_files_dict
def shed_categories( self, trans, **kwd ): """ GET /api/tool_shed_repositories/shed_categories Display a list of categories in the selected toolshed. :param tool_shed_url: the url of the toolshed to get categories from """ tool_shed_url = kwd.get( 'tool_shed_url', '' ) tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, tool_shed_url ) url = util.build_url( tool_shed_url, pathspec=[ 'api', 'categories' ] ) categories = json.loads( util.url_get( url ) ) repositories = [] url = util.build_url( tool_shed_url, pathspec=[ 'api', 'repositories' ] ) for repo in json.loads( util.url_get( url ) ): repositories.append( dict( value=repo[ 'id' ], label='%s/%s' % ( repo[ 'owner' ], repo[ 'name' ] ) ) ) return { 'categories': categories, 'repositories': repositories }
def get_readme_files_dict_for_display( app, tool_shed_url, repo_info_dict ): """ Return a dictionary of README files contained in the single repository being installed so they can be displayed on the tool panel section selection page. """ name = repo_info_dict.keys()[ 0 ] repo_info_tuple = repo_info_dict[ name ] description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, installed_td = \ suc.get_repo_info_tuple_contents( repo_info_tuple ) # Handle changing HTTP protocols over time. tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( app, tool_shed_url ) params = '?name=%s&owner=%s&changeset_revision=%s' % ( name, repository_owner, changeset_revision ) url = common_util.url_join( tool_shed_url, 'repository/get_readme_files%s' % params ) raw_text = common_util.tool_shed_get( app, tool_shed_url, url ) readme_files_dict = json.from_json_string( raw_text ) return readme_files_dict
def get_update_to_changeset_revision_and_ctx_rev( self, repository ): """Return the changeset revision hash to which the repository can be updated.""" changeset_revision_dict = {} tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, str( repository.tool_shed ) ) params = dict( name=str( repository.name ), owner=str( repository.owner ), changeset_revision=str( repository.installed_changeset_revision ) ) pathspec = [ 'repository', 'get_changeset_revision_and_ctx_rev' ] try: encoded_update_dict = util.url_get( tool_shed_url, password_mgr=self.app.tool_shed_registry.url_auth( tool_shed_url ), pathspec=pathspec, params=params ) if encoded_update_dict: update_dict = encoding_util.tool_shed_decode( encoded_update_dict ) includes_data_managers = update_dict.get( 'includes_data_managers', False ) includes_datatypes = update_dict.get( 'includes_datatypes', False ) includes_tools = update_dict.get( 'includes_tools', False ) includes_tools_for_display_in_tool_panel = update_dict.get( 'includes_tools_for_display_in_tool_panel', False ) includes_tool_dependencies = update_dict.get( 'includes_tool_dependencies', False ) includes_workflows = update_dict.get( 'includes_workflows', False ) has_repository_dependencies = update_dict.get( 'has_repository_dependencies', False ) has_repository_dependencies_only_if_compiling_contained_td = update_dict.get( 'has_repository_dependencies_only_if_compiling_contained_td', False ) changeset_revision = update_dict.get( 'changeset_revision', None ) ctx_rev = update_dict.get( 'ctx_rev', None ) changeset_revision_dict[ 'includes_data_managers' ] = includes_data_managers changeset_revision_dict[ 'includes_datatypes' ] = includes_datatypes changeset_revision_dict[ 'includes_tools' ] = includes_tools changeset_revision_dict[ 'includes_tools_for_display_in_tool_panel' ] = includes_tools_for_display_in_tool_panel changeset_revision_dict[ 'includes_tool_dependencies' ] = includes_tool_dependencies changeset_revision_dict[ 'includes_workflows' ] = includes_workflows changeset_revision_dict[ 'has_repository_dependencies' ] = has_repository_dependencies changeset_revision_dict[ 'has_repository_dependencies_only_if_compiling_contained_td' ] = has_repository_dependencies_only_if_compiling_contained_td changeset_revision_dict[ 'changeset_revision' ] = changeset_revision changeset_revision_dict[ 'ctx_rev' ] = ctx_rev except Exception as e: log.debug( "Error getting change set revision for update from the tool shed for repository '%s': %s" % ( repository.name, str( e ) ) ) changeset_revision_dict[ 'includes_data_managers' ] = False changeset_revision_dict[ 'includes_datatypes' ] = False changeset_revision_dict[ 'includes_tools' ] = False changeset_revision_dict[ 'includes_tools_for_display_in_tool_panel' ] = False changeset_revision_dict[ 'includes_tool_dependencies' ] = False changeset_revision_dict[ 'includes_workflows' ] = False changeset_revision_dict[ 'has_repository_dependencies' ] = False changeset_revision_dict[ 'has_repository_dependencies_only_if_compiling_contained_td' ] = False changeset_revision_dict[ 'changeset_revision' ] = None changeset_revision_dict[ 'ctx_rev' ] = None return changeset_revision_dict
def get_update_to_changeset_revision_and_ctx_rev( self, repository ): """Return the changeset revision hash to which the repository can be updated.""" changeset_revision_dict = {} tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, str( repository.tool_shed ) ) params = dict( name=str( repository.name ), owner=str( repository.owner ), changeset_revision=str( repository.installed_changeset_revision ) ) pathspec = [ 'repository', 'get_changeset_revision_and_ctx_rev' ] try: encoded_update_dict = util.url_get( tool_shed_url, password_mgr=self.app.tool_shed_registry.url_auth( tool_shed_url ), pathspec=pathspec, params=params ) if encoded_update_dict: update_dict = encoding_util.tool_shed_decode( encoded_update_dict ) includes_data_managers = update_dict.get( 'includes_data_managers', False ) includes_datatypes = update_dict.get( 'includes_datatypes', False ) includes_tools = update_dict.get( 'includes_tools', False ) includes_tools_for_display_in_tool_panel = update_dict.get( 'includes_tools_for_display_in_tool_panel', False ) includes_tool_dependencies = update_dict.get( 'includes_tool_dependencies', False ) includes_workflows = update_dict.get( 'includes_workflows', False ) has_repository_dependencies = update_dict.get( 'has_repository_dependencies', False ) has_repository_dependencies_only_if_compiling_contained_td = update_dict.get( 'has_repository_dependencies_only_if_compiling_contained_td', False ) changeset_revision = update_dict.get( 'changeset_revision', None ) ctx_rev = update_dict.get( 'ctx_rev', None ) changeset_revision_dict[ 'includes_data_managers' ] = includes_data_managers changeset_revision_dict[ 'includes_datatypes' ] = includes_datatypes changeset_revision_dict[ 'includes_tools' ] = includes_tools changeset_revision_dict[ 'includes_tools_for_display_in_tool_panel' ] = includes_tools_for_display_in_tool_panel changeset_revision_dict[ 'includes_tool_dependencies' ] = includes_tool_dependencies changeset_revision_dict[ 'includes_workflows' ] = includes_workflows changeset_revision_dict[ 'has_repository_dependencies' ] = has_repository_dependencies changeset_revision_dict[ 'has_repository_dependencies_only_if_compiling_contained_td' ] = has_repository_dependencies_only_if_compiling_contained_td changeset_revision_dict[ 'changeset_revision' ] = changeset_revision changeset_revision_dict[ 'ctx_rev' ] = ctx_rev except Exception as e: log.debug( "Error getting change set revision for update from the tool shed for repository '%s': %s" % ( repository.name, str( e ) ) ) changeset_revision_dict[ 'includes_data_managers' ] = False changeset_revision_dict[ 'includes_datatypes' ] = False changeset_revision_dict[ 'includes_tools' ] = False changeset_revision_dict[ 'includes_tools_for_display_in_tool_panel' ] = False changeset_revision_dict[ 'includes_tool_dependencies' ] = False changeset_revision_dict[ 'includes_workflows' ] = False changeset_revision_dict[ 'has_repository_dependencies' ] = False changeset_revision_dict[ 'has_repository_dependencies_only_if_compiling_contained_td' ] = False changeset_revision_dict[ 'changeset_revision' ] = None changeset_revision_dict[ 'ctx_rev' ] = None return changeset_revision_dict
def shed_category(self, trans, **kwd): """ GET /api/tool_shed_repositories/shed_category Display a list of repositories in the selected category. :param tool_shed_url: the url of the toolshed to get repositories from :param category_id: the category to get repositories from """ tool_shed_url = kwd.get('tool_shed_url', '') category_id = kwd.get('category_id', '') tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, tool_shed_url) url = util.build_url( tool_shed_url, pathspec=['api', 'categories', category_id, 'repositories']) category = json.loads(util.url_get(url)) return category
def shed_categories(self, trans, **kwd): """ GET /api/tool_shed_repositories/shed_categories Display a list of categories in the selected toolshed. :param tool_shed_url: the url of the toolshed to get categories from """ tool_shed_url = kwd.get('tool_shed_url', '') tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, tool_shed_url) url = util.build_url(tool_shed_url, pathspec=['api', 'categories']) categories = json.loads(util.url_get(url)) repositories = [] url = util.build_url(tool_shed_url, pathspec=['api', 'repositories']) for repo in json.loads(util.url_get(url)): repositories.append( dict(value=repo['id'], label='%s/%s' % (repo['owner'], repo['name']))) return {'categories': categories, 'repositories': repositories}
def update_repository_record( self, repository, updated_metadata_dict, updated_changeset_revision, updated_ctx_rev ): """ Update a tool_shed_repository database record with new information retrieved from the Tool Shed. This happens when updating an installed repository to a new changeset revision. """ repository.metadata = updated_metadata_dict tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, repository.tool_shed ) suc.clean_dependency_relationships(self.app, updated_metadata_dict, repository, tool_shed_url) # Update the repository.changeset_revision column in the database. repository.changeset_revision = updated_changeset_revision repository.ctx_rev = updated_ctx_rev # Update the repository.tool_shed_status column in the database. tool_shed_status_dict = suc.get_tool_shed_status_for_installed_repository( self.app, repository ) if tool_shed_status_dict: repository.tool_shed_status = tool_shed_status_dict else: repository.tool_shed_status = None self.app.install_model.context.add( repository ) self.app.install_model.context.flush() self.app.install_model.context.refresh( repository ) return repository
def show(self, trans, **kwd): """ GET /api/tool_shed/contents Display a list of categories in the selected toolshed. :param tool_shed_url: the url of the toolshed to get categories from """ tool_shed_url = urlunquote(kwd.get('tool_shed_url', '')) tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry(trans.app, tool_shed_url) url = util.build_url(tool_shed_url, pathspec=['api', 'categories']) categories = [] for category in json.loads(util.url_get(url)): api_url = web.url_for(controller='api/tool_shed', action='category', tool_shed_url=urlquote(tool_shed_url), category_id=category['id'], qualified=True) category['url'] = api_url categories.append(category) return categories
def get_repository_dependencies_for_installed_tool_shed_repository(self, app, repository): """ Send a request to the appropriate tool shed to retrieve the dictionary of repository dependencies defined for the received repository which is installed into Galaxy. This method is called only from Galaxy. """ tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry(app, str(repository.tool_shed)) params = dict(name=str(repository.name), owner=str(repository.owner), changeset_revision=str(repository.changeset_revision)) pathspec = ['repository', 'get_repository_dependencies'] try: raw_text = url_get(tool_shed_url, password_mgr=app.tool_shed_registry.url_auth(tool_shed_url), pathspec=pathspec, params=params) except Exception as e: log.error("The URL\n%s\nraised the exception:\n%s\n", build_url(tool_shed_url, pathspec=pathspec, params=params), str(e)) return '' if len(raw_text) > 2: encoded_text = json.loads(raw_text) text = encoding_util.tool_shed_decode(encoded_text) else: text = '' return text
def category(self, trans, **kwd): """ GET /api/tool_shed/category Display a list of repositories in the selected category. :param tool_shed_url: the url of the toolshed to get repositories from :param category_id: the category to get repositories from :param sort_key: the field by which the repositories should be sorted :param sort_order: ascending or descending sort :param page: the page number to return """ sort_order = kwd.get('sort_order', 'asc') sort_key = kwd.get('sort_key', 'name') page = kwd.get('page', 1) tool_shed_url = urlunquote(kwd.get('tool_shed_url', '')) category_id = kwd.get('category_id', '') params = dict(installable=True, sort_order=sort_order, sort_key=sort_key, page=page) tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, tool_shed_url) url = util.build_url( tool_shed_url, pathspec=['api', 'categories', category_id, 'repositories'], params=params) repositories = [] return_json = json.loads(util.url_get(url)) for repository in return_json['repositories']: api_url = web.url_for(controller='api/tool_shed', action='repository', tool_shed_url=urlquote(tool_shed_url), repository_id=repository['id'], qualified=True) repository['url'] = api_url repositories.append(repository) return_json['repositories'] = repositories return return_json
def repair_tool_shed_repository( self, repository, repo_info_dict ): def add_repair_dict_entry( repository_name, error_message ): if repository_name in repair_dict: repair_dict[ repository_name ].append( error_message ) else: repair_dict[ repository_name ] = [ error_message ] return repair_dict tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, repository.tool_shed ) metadata = repository.metadata # The repository.metadata contains dependency information that corresponds to the current changeset revision, # which may be different from what is stored in the database # If any of these repository-repository dependency associations is obsolete, clean_dependency_relationships removes them. suc.clean_dependency_relationships(self.app, metadata, repository, tool_shed_url) repair_dict = {} tpm = tool_panel_manager.ToolPanelManager( self.app ) if repository.status in [ self.app.install_model.ToolShedRepository.installation_status.DEACTIVATED ]: try: self.app.installed_repository_manager.activate_repository( repository ) except Exception, e: error_message = "Error activating repository %s: %s" % ( repository.name, str( e ) ) log.debug( error_message ) repair_dict[ repository.name ] = error_message
def check_for_tool_dependencies(self, trans, migration_stage): # Get the 000x_tools.xml file associated with migration_stage. tools_xml_file_path = os.path.abspath( os.path.join(trans.app.config.root, "scripts", "migrate_tools", "%04d_tools.xml" % migration_stage) ) tree = galaxy.util.parse_xml(tools_xml_file_path) root = tree.getroot() tool_shed = root.get("name") tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry(trans.app, tool_shed) repo_name_dependency_tups = [] if tool_shed_url: for elem in root: if elem.tag == "repository": tool_dependencies = [] tool_dependencies_dict = {} repository_name = elem.get("name") changeset_revision = elem.get("changeset_revision") params = dict(name=repository_name, owner="devteam", changeset_revision=changeset_revision) pathspec = ["repository", "get_tool_dependencies"] text = common_util.tool_shed_get(trans.app, tool_shed_url, pathspec=pathspec, params=params) if text: tool_dependencies_dict = encoding_util.tool_shed_decode(text) for dependency_key, requirements_dict in tool_dependencies_dict.items(): tool_dependency_name = requirements_dict["name"] tool_dependency_version = requirements_dict["version"] tool_dependency_type = requirements_dict["type"] tool_dependency_readme = requirements_dict.get("readme", "") tool_dependencies.append( ( tool_dependency_name, tool_dependency_version, tool_dependency_type, tool_dependency_readme, ) ) repo_name_dependency_tups.append((repository_name, tool_dependencies)) return repo_name_dependency_tups
def populate_containers_dict_from_repository_metadata( self, tool_shed_url, tool_path, repository, reinstalling=False, required_repo_info_dicts=None): """ Retrieve necessary information from the received repository's metadata to populate the containers_dict for display. This method is called only from Galaxy (not the tool shed) when displaying repository dependencies for installed repositories and when displaying them for uninstalled repositories that are being reinstalled. """ metadata = repository.metadata if metadata: # Handle proprietary datatypes. datatypes = metadata.get('datatypes', None) # Handle invalid tools. invalid_tools = metadata.get('invalid_tools', None) # Handle README files. if repository.has_readme_files: if reinstalling or repository.status not in \ [ self.app.install_model.ToolShedRepository.installation_status.DEACTIVATED, self.app.install_model.ToolShedRepository.installation_status.INSTALLED ]: # Since we're reinstalling, we need to send a request to the tool shed to get the README files. tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, tool_shed_url) params = dict(name=str(repository.name), owner=str(repository.owner), changeset_revision=str( repository.installed_changeset_revision)) pathspec = ['repository', 'get_readme_files'] raw_text = common_util.tool_shed_get(self.app, tool_shed_url, pathspec=pathspec, params=params) readme_files_dict = json.loads(raw_text) else: readme_files_dict = readme_util.build_readme_files_dict( self.app, repository, repository.changeset_revision, repository.metadata, tool_path) else: readme_files_dict = None # Handle repository dependencies. installed_repository_dependencies, missing_repository_dependencies = \ self.app.installed_repository_manager.get_installed_and_missing_repository_dependencies( repository ) # Handle the current repository's tool dependencies. repository_tool_dependencies = metadata.get( 'tool_dependencies', None) # Make sure to display missing tool dependencies as well. repository_invalid_tool_dependencies = metadata.get( 'invalid_tool_dependencies', None) if repository_invalid_tool_dependencies is not None: if repository_tool_dependencies is None: repository_tool_dependencies = {} repository_tool_dependencies.update( repository_invalid_tool_dependencies) repository_installed_tool_dependencies, repository_missing_tool_dependencies = \ self.get_installed_and_missing_tool_dependencies_for_installed_repository( repository, repository_tool_dependencies ) if reinstalling: installed_tool_dependencies, missing_tool_dependencies = \ self.populate_tool_dependencies_dicts( tool_shed_url, tool_path, repository_installed_tool_dependencies, repository_missing_tool_dependencies, required_repo_info_dicts ) else: installed_tool_dependencies = repository_installed_tool_dependencies missing_tool_dependencies = repository_missing_tool_dependencies # Handle valid tools. valid_tools = metadata.get('tools', None) # Handle workflows. workflows = metadata.get('workflows', None) # Handle Data Managers valid_data_managers = None invalid_data_managers = None data_managers_errors = None if 'data_manager' in metadata: valid_data_managers = metadata['data_manager'].get( 'data_managers', None) invalid_data_managers = metadata['data_manager'].get( 'invalid_data_managers', None) data_managers_errors = metadata['data_manager'].get( 'messages', None) gucm = GalaxyUtilityContainerManager(self.app) containers_dict = gucm.build_repository_containers( repository=repository, datatypes=datatypes, invalid_tools=invalid_tools, missing_repository_dependencies=missing_repository_dependencies, missing_tool_dependencies=missing_tool_dependencies, readme_files_dict=readme_files_dict, repository_dependencies=installed_repository_dependencies, tool_dependencies=installed_tool_dependencies, valid_tools=valid_tools, workflows=workflows, valid_data_managers=valid_data_managers, invalid_data_managers=invalid_data_managers, data_managers_errors=data_managers_errors, new_install=False, reinstalling=reinstalling) else: containers_dict = dict(datatypes=None, invalid_tools=None, readme_files_dict=None, repository_dependencies=None, tool_dependencies=None, valid_tools=None, workflows=None) return containers_dict
def repair_tool_shed_repository( self, repository, repo_info_dict ): def add_repair_dict_entry( repository_name, error_message ): if repository_name in repair_dict: repair_dict[ repository_name ].append( error_message ) else: repair_dict[ repository_name ] = [ error_message ] return repair_dict tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, repository.tool_shed ) metadata = repository.metadata # The repository.metadata contains dependency information that corresponds to the current changeset revision, # which may be different from what is stored in the database # If any of these repository-repository dependency associations is obsolete, clean_dependency_relationships removes them. suc.clean_dependency_relationships(self.app, metadata, repository, tool_shed_url) repair_dict = {} tpm = tool_panel_manager.ToolPanelManager( self.app ) if repository.status in [ self.app.install_model.ToolShedRepository.installation_status.DEACTIVATED ]: try: self.app.installed_repository_manager.activate_repository( repository ) except Exception as e: error_message = "Error activating repository %s: %s" % ( repository.name, str( e ) ) log.debug( error_message ) repair_dict[ repository.name ] = error_message elif repository.status not in [ self.app.install_model.ToolShedRepository.installation_status.INSTALLED ]: shed_tool_conf, tool_path, relative_install_dir = \ suc.get_tool_panel_config_tool_path_install_dir( self.app, repository ) # Reset the repository attributes to the New state for installation. if metadata: _, tool_panel_section_key = \ tpm.handle_tool_panel_selection( self.app.toolbox, metadata, no_changes_checked=True, tool_panel_section_id=None, new_tool_panel_section_label=None ) else: # The tools will be loaded outside of any sections in the tool panel. tool_panel_section_key = None repository_util.set_repository_attributes( self.app, repository, status=self.app.install_model.ToolShedRepository.installation_status.NEW, error_message=None, deleted=False, uninstalled=False, remove_from_disk=True ) irm = install_manager.InstallRepositoryManager( self.app, tpm ) irm.install_tool_shed_repository( repository, repo_info_dict, tool_panel_section_key, shed_tool_conf, tool_path, install_tool_dependencies=True, install_resolver_dependencies=False, # Assuming repairs are only necessary toolshed packages reinstalling=True ) if repository.status in [ self.app.install_model.ToolShedRepository.installation_status.ERROR ]: repair_dict = add_repair_dict_entry( repository.name, repository.error_message ) else: irm = install_manager.InstallRepositoryManager( self.app, tpm ) # We have an installed tool shed repository, so handle tool dependencies if necessary. if repository.missing_tool_dependencies and metadata and 'tool_dependencies' in metadata: work_dir = tempfile.mkdtemp( prefix="tmp-toolshed-itdep" ) # Reset missing tool dependencies. for tool_dependency in repository.missing_tool_dependencies: if tool_dependency.status in [ self.app.install_model.ToolDependency.installation_status.ERROR, self.app.install_model.ToolDependency.installation_status.INSTALLING ]: tool_dependency = \ tool_dependency_util.set_tool_dependency_attributes( self.app, tool_dependency=tool_dependency, status=self.app.install_model.ToolDependency.installation_status.UNINSTALLED ) # Install tool dependencies. irm.update_tool_shed_repository_status( repository, self.app.install_model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES ) # Get the tool_dependencies.xml file from the repository. tool_dependencies_config = hg_util.get_config_from_disk( 'tool_dependencies.xml', repository.repo_path( self.app ) ) itdm = install_manager.InstallToolDependencyManager( self.app ) installed_tool_dependencies = itdm.install_specified_tool_dependencies( tool_shed_repository=repository, tool_dependencies_config=tool_dependencies_config, tool_dependencies=repository.tool_dependencies, from_tool_migration_manager=False ) for installed_tool_dependency in installed_tool_dependencies: if installed_tool_dependency.status in [ self.app.install_model.ToolDependency.installation_status.ERROR ]: repair_dict = add_repair_dict_entry( repository.name, installed_tool_dependency.error_message ) basic_util.remove_dir( work_dir ) irm.update_tool_shed_repository_status( repository, self.app.install_model.ToolShedRepository.installation_status.INSTALLED ) return repair_dict
def repository(self, trans, **kwd): """ GET /api/tool_shed/repository Get details about the specified repository from its shed. :param repository_id: the tool_shed_repository_id :param repository_id: str :param tool_shed_url: the URL of the toolshed whence to retrieve repository details :param tool_shed_url: str :param tool_ids: (optional) comma-separated list of tool IDs :param tool_ids: str """ tool_dependencies = dict() tools = dict() tool_shed_url = urlunquote(kwd.get('tool_shed_url', '')) log.debug(tool_shed_url) repository_id = kwd.get('repository_id', None) tool_ids = kwd.get('tool_ids', None) if tool_ids is not None: tool_ids = util.listify(tool_ids) tool_panel_section_select_field = tool_util.build_tool_panel_section_select_field(trans.app) tool_panel_section_dict = {'name': tool_panel_section_select_field.name, 'id': tool_panel_section_select_field.field_id, 'sections': []} for name, id, _ in tool_panel_section_select_field.options: tool_panel_section_dict['sections'].append(dict(id=id, name=name)) repository_data = dict() if tool_ids is not None: if len(tool_shed_url) == 0: # By design, this list should always be from the same toolshed. If # this is ever not the case, this code will need to be updated. tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry(self.app, tool_ids[0].split('/')[0]) found_repository = json.loads(util.url_get(tool_shed_url, params=dict(tool_ids=','.join(tool_ids)), pathspec=['api', 'repositories'])) fr_first_key = next(iter(found_repository.keys())) repository_id = found_repository[fr_first_key]['repository_id'] repository_data['current_changeset'] = found_repository['current_changeset'] repository_data['repository'] = json.loads(util.url_get(tool_shed_url, pathspec=['api', 'repositories', repository_id])) del found_repository['current_changeset'] repository_data['tool_shed_url'] = tool_shed_url else: repository_data['repository'] = json.loads(util.url_get(tool_shed_url, pathspec=['api', 'repositories', repository_id])) repository_data['repository']['metadata'] = json.loads(util.url_get(tool_shed_url, pathspec=['api', 'repositories', repository_id, 'metadata'])) repository_data['shed_conf'] = tool_util.build_shed_tool_conf_select_field(trans.app).to_dict() repository_data['panel_section_dict'] = tool_panel_section_dict for changeset, metadata in repository_data['repository']['metadata'].items(): if changeset not in tool_dependencies: tool_dependencies[changeset] = [] if metadata['includes_tools_for_display_in_tool_panel']: if changeset not in tools: tools[changeset] = [] for tool_dict in metadata['tools']: tool_info = dict(clean=re.sub('[^a-zA-Z0-9]+', '_', tool_dict['name']).lower(), guid=tool_dict['guid'], name=tool_dict['name'], version=tool_dict['version'], description=tool_dict['description']) if tool_info not in tools[changeset]: tools[changeset].append(tool_info) if metadata['has_repository_dependencies']: for repository_dependency in metadata['repository_dependencies']: tools[changeset] = self.__get_tools(repository_dependency, tools[changeset]) repository_data['tools'] = tools for key, dependency_dict in metadata['tool_dependencies'].items(): if 'readme' in dependency_dict: del(dependency_dict['readme']) if dependency_dict not in tool_dependencies[changeset]: tool_dependencies[changeset].append(dependency_dict) if metadata['has_repository_dependencies']: for repository_dependency in metadata['repository_dependencies']: tool_dependencies[changeset] = self.__get_tool_dependencies(repository_dependency, tool_dependencies[changeset]) repository_data['tool_dependencies'] = tool_dependencies return repository_data
def load_from_element( self, elem, tool_path ): assert elem.tag == 'data_manager', 'A data manager configuration must have a "data_manager" tag as the root. "%s" is present' % ( elem.tag ) self.declared_id = elem.get( 'id', None ) self.guid = elem.get( 'guid', None ) path = elem.get( 'tool_file', None ) self.version = elem.get( 'version', self.version ) tool_shed_repository_id = None tool_guid = None if path is None: tool_elem = elem.find( 'tool' ) assert tool_elem is not None, "Error loading tool for data manager. Make sure that a tool_file attribute or a tool tag set has been defined:\n%s" % ( util.xml_to_string( elem ) ) path = tool_elem.get( "file", None ) tool_guid = tool_elem.get( "guid", None ) # need to determine repository info so that dependencies will work correctly tool_shed_url = tool_elem.find( 'tool_shed' ).text # Handle protocol changes. tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.data_managers.app, tool_shed_url ) # The protocol is not stored in the database. tool_shed = common_util.remove_protocol_from_tool_shed_url( tool_shed_url ) repository_name = tool_elem.find( 'repository_name' ).text repository_owner = tool_elem.find( 'repository_owner' ).text installed_changeset_revision = tool_elem.find( 'installed_changeset_revision' ).text self.tool_shed_repository_info_dict = dict( tool_shed=tool_shed, name=repository_name, owner=repository_owner, installed_changeset_revision=installed_changeset_revision ) tool_shed_repository = \ repository_util.get_installed_repository( self.data_managers.app, tool_shed=tool_shed, name=repository_name, owner=repository_owner, installed_changeset_revision=installed_changeset_revision ) if tool_shed_repository is None: log.warning( 'Could not determine tool shed repository from database. This should only ever happen when running tests.' ) # we'll set tool_path manually here from shed_conf_file tool_shed_repository_id = None try: tool_path = util.parse_xml( elem.get( 'shed_conf_file' ) ).getroot().get( 'tool_path', tool_path ) except Exception as e: log.error( 'Error determining tool_path for Data Manager during testing: %s', e ) else: tool_shed_repository_id = self.data_managers.app.security.encode_id( tool_shed_repository.id ) # use shed_conf_file to determine tool_path shed_conf_file = elem.get( "shed_conf_file", None ) if shed_conf_file: shed_conf = self.data_managers.app.toolbox.get_shed_config_dict_by_filename( shed_conf_file, None ) if shed_conf: tool_path = shed_conf.get( "tool_path", tool_path ) assert path is not None, "A tool file path could not be determined:\n%s" % ( util.xml_to_string( elem ) ) self.load_tool( os.path.join( tool_path, path ), guid=tool_guid, data_manager_id=self.id, tool_shed_repository_id=tool_shed_repository_id ) self.name = elem.get( 'name', self.tool.name ) self.description = elem.get( 'description', self.tool.description ) self.undeclared_tables = util.asbool( elem.get( 'undeclared_tables', self.undeclared_tables ) ) for data_table_elem in elem.findall( 'data_table' ): data_table_name = data_table_elem.get( "name" ) assert data_table_name is not None, "A name is required for a data table entry" if data_table_name not in self.data_tables: self.data_tables[ data_table_name ] = odict() output_elem = data_table_elem.find( 'output' ) if output_elem is not None: for column_elem in output_elem.findall( 'column' ): column_name = column_elem.get( 'name', None ) assert column_name is not None, "Name is required for column entry" data_table_coumn_name = column_elem.get( 'data_table_name', column_name ) self.data_tables[ data_table_name ][ data_table_coumn_name ] = column_name output_ref = column_elem.get( 'output_ref', None ) if output_ref is not None: if data_table_name not in self.output_ref_by_data_table: self.output_ref_by_data_table[ data_table_name ] = {} self.output_ref_by_data_table[ data_table_name ][ data_table_coumn_name ] = output_ref value_translation_elems = column_elem.findall( 'value_translation' ) if value_translation_elems is not None: for value_translation_elem in value_translation_elems: value_translation = value_translation_elem.text if value_translation is not None: value_translation_type = value_translation_elem.get( 'type', DEFAULT_VALUE_TRANSLATION_TYPE ) if data_table_name not in self.value_translation_by_data_table_column: self.value_translation_by_data_table_column[ data_table_name ] = {} if data_table_coumn_name not in self.value_translation_by_data_table_column[ data_table_name ]: self.value_translation_by_data_table_column[ data_table_name ][ data_table_coumn_name ] = [] if value_translation_type == 'function': if value_translation in VALUE_TRANSLATION_FUNCTIONS: value_translation = VALUE_TRANSLATION_FUNCTIONS[ value_translation ] else: raise ValueError( "Unsupported value translation function: '%s'" % ( value_translation ) ) else: assert value_translation_type == DEFAULT_VALUE_TRANSLATION_TYPE, ValueError( "Unsupported value translation type: '%s'" % ( value_translation_type ) ) self.value_translation_by_data_table_column[ data_table_name ][ data_table_coumn_name ].append( value_translation ) for move_elem in column_elem.findall( 'move' ): move_type = move_elem.get( 'type', 'directory' ) relativize_symlinks = move_elem.get( 'relativize_symlinks', False ) # TODO: should we instead always relativize links? source_elem = move_elem.find( 'source' ) if source_elem is None: source_base = None source_value = '' else: source_base = source_elem.get( 'base', None ) source_value = source_elem.text target_elem = move_elem.find( 'target' ) if target_elem is None: target_base = None target_value = '' else: target_base = target_elem.get( 'base', None ) target_value = target_elem.text if data_table_name not in self.move_by_data_table_column: self.move_by_data_table_column[ data_table_name ] = {} self.move_by_data_table_column[ data_table_name ][ data_table_coumn_name ] = \ dict( type=move_type, source_base=source_base, source_value=source_value, target_base=target_base, target_value=target_value, relativize_symlinks=relativize_symlinks )
def __init__(self, app, latest_migration_script_number, tool_shed_install_config, migrated_tools_config, install_dependencies): """ Check tool settings in tool_shed_install_config and install all repositories that are not already installed. The tool panel configuration file is the received migrated_tools_config, which is the reserved file named migrated_tools_conf.xml. """ self.app = app self.toolbox = self.app.toolbox self.migrated_tools_config = migrated_tools_config # Initialize the ToolPanelManager. self.tpm = tool_panel_manager.ToolPanelManager(self.app) # If install_dependencies is True but tool_dependency_dir is not set, do not attempt # to install but print informative error message. if install_dependencies and app.config.tool_dependency_dir is None: message = 'You are attempting to install tool dependencies but do not have a value ' message += 'for "tool_dependency_dir" set in your galaxy.ini file. Set this ' message += 'location value to the path where you want tool dependencies installed and ' message += 'rerun the migration script.' raise Exception(message) # Get the local non-shed related tool panel configs (there can be more than one, and the # default name is tool_conf.xml). self.proprietary_tool_confs = self.non_shed_tool_panel_configs self.proprietary_tool_panel_elems = self.get_proprietary_tool_panel_elems( latest_migration_script_number) # Set the location where the repositories will be installed by retrieving the tool_path # setting from migrated_tools_config. tree, error_message = xml_util.parse_xml(migrated_tools_config) if tree is None: log.error(error_message) else: root = tree.getroot() self.tool_path = root.get('tool_path') log.debug( "Repositories will be installed into configured tool_path location ", str(self.tool_path)) # Parse tool_shed_install_config to check each of the tools. self.tool_shed_install_config = tool_shed_install_config tree, error_message = xml_util.parse_xml(tool_shed_install_config) if tree is None: log.error(error_message) else: root = tree.getroot() defined_tool_shed_url = root.get('name') self.tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, defined_tool_shed_url) self.tool_shed = common_util.remove_protocol_and_port_from_tool_shed_url( self.tool_shed_url) self.repository_owner = common_util.REPOSITORY_OWNER self.shed_config_dict = self.tpm.get_shed_tool_conf_dict( self.migrated_tools_config) # Since tool migration scripts can be executed any number of times, we need to # make sure the appropriate tools are defined in tool_conf.xml. If no tools # associated with the migration stage are defined, no repositories will be installed # on disk. The default behavior is that the tool shed is down. tool_shed_accessible = False tool_panel_configs = common_util.get_non_shed_tool_panel_configs( app) if tool_panel_configs: # The missing_tool_configs_dict contents are something like: # {'emboss_antigenic.xml': [('emboss', '5.0.0', 'package', '\nreadme blah blah blah\n')]} tool_shed_accessible, missing_tool_configs_dict = \ common_util.check_for_missing_tools(app, tool_panel_configs, latest_migration_script_number) else: # It doesn't matter if the tool shed is accessible since there are no migrated # tools defined in the local Galaxy instance, but we have to set the value of # tool_shed_accessible to True so that the value of migrate_tools.version can # be correctly set in the database. tool_shed_accessible = True missing_tool_configs_dict = odict() if tool_shed_accessible: if len(self.proprietary_tool_confs) == 1: plural = '' file_names = self.proprietary_tool_confs[0] else: plural = 's' file_names = ', '.join(self.proprietary_tool_confs) if missing_tool_configs_dict: for proprietary_tool_conf in self.proprietary_tool_confs: # Create a backup of the tool configuration in the un-migrated state. shutil.copy( proprietary_tool_conf, '%s-pre-stage-%04d' % (proprietary_tool_conf, latest_migration_script_number)) for repository_elem in root: # Make sure we have a valid repository tag. if self.__is_valid_repository_tag(repository_elem): # Get all repository dependencies for the repository defined by the # current repository_elem. Repository dependency definitions contained # in tool shed repositories with migrated tools must never define a # relationship to a repository dependency that contains a tool. The # repository dependency can only contain items that are not loaded into # the Galaxy tool panel (e.g., tool dependency definitions, custom datatypes, # etc). This restriction must be followed down the entire dependency hierarchy. name = repository_elem.get('name') changeset_revision = repository_elem.get( 'changeset_revision') tool_shed_accessible, repository_dependencies_dict = \ common_util.get_repository_dependencies(app, self.tool_shed_url, name, self.repository_owner, changeset_revision) # Make sure all repository dependency records exist (as tool_shed_repository # table rows) in the Galaxy database. created_tool_shed_repositories = \ self.create_or_update_tool_shed_repository_records(name, changeset_revision, repository_dependencies_dict) # Order the repositories for proper installation. This process is similar to the # process used when installing tool shed repositories, but does not handle managing # tool panel sections and other components since repository dependency definitions # contained in tool shed repositories with migrated tools must never define a relationship # to a repository dependency that contains a tool. ordered_tool_shed_repositories = \ self.order_repositories_for_installation(created_tool_shed_repositories, repository_dependencies_dict) for tool_shed_repository in ordered_tool_shed_repositories: is_repository_dependency = self.__is_repository_dependency( name, changeset_revision, tool_shed_repository) self.install_repository( repository_elem, tool_shed_repository, install_dependencies, is_repository_dependency= is_repository_dependency) else: message = "\nNo tools associated with migration stage %s are defined in your " % \ str(latest_migration_script_number) message += "file%s named %s,\nso no repositories will be installed on disk.\n" % \ (plural, file_names) log.info(message) else: message = "\nThe main Galaxy tool shed is not currently available, so skipped migration stage %s.\n" % \ str(latest_migration_script_number) message += "Try again later.\n" log.error(message)
def get_required_repo_info_dicts(self, tool_shed_url, repo_info_dicts): """ Inspect the list of repo_info_dicts for repository dependencies and append a repo_info_dict for each of them to the list. All repository_dependency entries in each of the received repo_info_dicts includes all required repositories, so only one pass through this method is required to retrieve all repository dependencies. """ all_required_repo_info_dict = {} all_repo_info_dicts = [] if repo_info_dicts: # We'll send tuples of ( tool_shed, repository_name, repository_owner, changeset_revision ) to the tool # shed to discover repository ids. required_repository_tups = [] for repo_info_dict in repo_info_dicts: if repo_info_dict not in all_repo_info_dicts: all_repo_info_dicts.append(repo_info_dict) for repository_name, repo_info_tup in repo_info_dict.items(): description, \ repository_clone_url, \ changeset_revision, \ ctx_rev, \ repository_owner, \ repository_dependencies, \ tool_dependencies = \ repository_util.get_repo_info_tuple_contents(repo_info_tup) if repository_dependencies: for key, val in repository_dependencies.items(): if key in ['root_key', 'description']: continue repository_components_tuple = container_util.get_components_from_key(key) components_list = repository_util.extract_components_from_tuple(repository_components_tuple) # Skip listing a repository dependency if it is required only to compile a tool dependency # defined for the dependent repository since in this case, the repository dependency is really # a dependency of the dependent repository's contained tool dependency, and only if that # tool dependency requires compilation. # For backward compatibility to the 12/20/12 Galaxy release. only_if_compiling_contained_td = 'False' if len(components_list) == 4: only_if_compiling_contained_td = 'False' elif len(components_list) == 5: only_if_compiling_contained_td = 'False' if not asbool(only_if_compiling_contained_td): if components_list not in required_repository_tups: required_repository_tups.append(components_list) for components_list in val: try: only_if_compiling_contained_td = components_list[5] except: only_if_compiling_contained_td = 'False' # Skip listing a repository dependency if it is required only to compile a tool dependency # defined for the dependent repository (see above comment). if not asbool(only_if_compiling_contained_td): if components_list not in required_repository_tups: required_repository_tups.append(components_list) else: # We have a single repository with no dependencies. components_list = [tool_shed_url, repository_name, repository_owner, changeset_revision] required_repository_tups.append(components_list) if required_repository_tups: # The value of required_repository_tups is a list of tuples, so we need to encode it. encoded_required_repository_tups = [] for required_repository_tup in required_repository_tups: # Convert every item in required_repository_tup to a string. required_repository_tup = [str(item) for item in required_repository_tup] encoded_required_repository_tups.append(encoding_util.encoding_sep.join(required_repository_tup)) encoded_required_repository_str = encoding_util.encoding_sep2.join(encoded_required_repository_tups) encoded_required_repository_str = encoding_util.tool_shed_encode(encoded_required_repository_str) if repository_util.is_tool_shed_client(self.app): # Handle secure / insecure Tool Shed URL protocol changes and port changes. tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry(self.app, tool_shed_url) pathspec = ['repository', 'get_required_repo_info_dict'] url = build_url(tool_shed_url, pathspec=pathspec) # Fix for handling 307 redirect not being handled nicely by urlopen() when the Request() has data provided url = _urlopen(url).geturl() response = _urlopen(url, urlencode(dict(encoded_str=encoded_required_repository_str))).read() if response: try: required_repo_info_dict = json.loads(response) except Exception as e: log.exception(e) return all_repo_info_dicts required_repo_info_dicts = [] for k, v in required_repo_info_dict.items(): if k == 'repo_info_dicts': encoded_dict_strings = required_repo_info_dict['repo_info_dicts'] for encoded_dict_str in encoded_dict_strings: decoded_dict = encoding_util.tool_shed_decode(encoded_dict_str) required_repo_info_dicts.append(decoded_dict) else: if k not in all_required_repo_info_dict: all_required_repo_info_dict[k] = v else: if v and not all_required_repo_info_dict[k]: all_required_repo_info_dict[k] = v if required_repo_info_dicts: for required_repo_info_dict in required_repo_info_dicts: # Each required_repo_info_dict has a single entry, and all_repo_info_dicts is a list # of dictionaries, each of which has a single entry. We'll check keys here rather than # the entire dictionary because a dictionary entry in all_repo_info_dicts will include # lists of discovered repository dependencies, but these lists will be empty in the # required_repo_info_dict since dependency discovery has not yet been performed for these # dictionaries. required_repo_info_dict_key = next(iter(required_repo_info_dict)) all_repo_info_dicts_keys = [next(iter(d)) for d in all_repo_info_dicts] if required_repo_info_dict_key not in all_repo_info_dicts_keys: all_repo_info_dicts.append(required_repo_info_dict) else: # required_repo_info_dict_key corresponds to the repo name. # A single install transaction might require the installation of 2 or more repos # with the same repo name but different owners or versions. # Therefore, if required_repo_info_dict_key is already in all_repo_info_dicts, # check that the tool id is already present. If it is not, we are dealing with the same repo name, # but a different owner/changeset revision or version and we add the repo to the list of repos to be installed. tool_id = required_repo_info_dict[required_repo_info_dict_key][1] is_present = False for repo_info_dict in all_repo_info_dicts: for k, v in repo_info_dict.items(): if required_repo_info_dict_key == k: if tool_id == v[1]: is_present = True break if not is_present: all_repo_info_dicts.append(required_repo_info_dict) all_required_repo_info_dict['all_repo_info_dicts'] = all_repo_info_dicts return all_required_repo_info_dict
def load_from_element(self, elem, tool_path): assert elem.tag == 'data_manager', 'A data manager configuration must have a "data_manager" tag as the root. "%s" is present' % (elem.tag) self.declared_id = elem.get('id', None) self.guid = elem.get('guid', None) path = elem.get('tool_file', None) self.version = elem.get('version', self.version) tool_shed_repository_id = None tool_guid = None if path is None: tool_elem = elem.find('tool') assert tool_elem is not None, "Error loading tool for data manager. Make sure that a tool_file attribute or a tool tag set has been defined:\n%s" % (util.xml_to_string(elem)) path = tool_elem.get("file", None) tool_guid = tool_elem.get("guid", None) # need to determine repository info so that dependencies will work correctly if hasattr(self.data_managers.app, 'tool_cache') and tool_guid in self.data_managers.app.tool_cache._tool_paths_by_id: path = self.data_managers.app.tool_cache._tool_paths_by_id[tool_guid] tool = self.data_managers.app.tool_cache.get_tool(path) tool_shed_repository = tool.tool_shed_repository self.tool_shed_repository_info_dict = dict(tool_shed=tool_shed_repository.tool_shed, name=tool_shed_repository.name, owner=tool_shed_repository.owner, installed_changeset_revision=tool_shed_repository.installed_changeset_revision) tool_shed_repository_id = self.data_managers.app.security.encode_id(tool_shed_repository.id) tool_path = "" else: tool_shed_url = tool_elem.find('tool_shed').text # Handle protocol changes. tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry(self.data_managers.app, tool_shed_url) # The protocol is not stored in the database. tool_shed = common_util.remove_protocol_from_tool_shed_url(tool_shed_url) repository_name = tool_elem.find('repository_name').text repository_owner = tool_elem.find('repository_owner').text installed_changeset_revision = tool_elem.find('installed_changeset_revision').text self.tool_shed_repository_info_dict = dict(tool_shed=tool_shed, name=repository_name, owner=repository_owner, installed_changeset_revision=installed_changeset_revision) tool_shed_repository = \ repository_util.get_installed_repository(self.data_managers.app, tool_shed=tool_shed, name=repository_name, owner=repository_owner, installed_changeset_revision=installed_changeset_revision) if tool_shed_repository is None: log.warning('Could not determine tool shed repository from database. This should only ever happen when running tests.') # we'll set tool_path manually here from shed_conf_file tool_shed_repository_id = None try: tool_path = util.parse_xml(elem.get('shed_conf_file')).getroot().get('tool_path', tool_path) except Exception as e: log.error('Error determining tool_path for Data Manager during testing: %s', e) else: tool_shed_repository_id = self.data_managers.app.security.encode_id(tool_shed_repository.id) # use shed_conf_file to determine tool_path shed_conf_file = elem.get("shed_conf_file", None) if shed_conf_file: shed_conf = self.data_managers.app.toolbox.get_shed_config_dict_by_filename(shed_conf_file, None) if shed_conf: tool_path = shed_conf.get("tool_path", tool_path) assert path is not None, "A tool file path could not be determined:\n%s" % (util.xml_to_string(elem)) self.load_tool(os.path.join(tool_path, path), guid=tool_guid, data_manager_id=self.id, tool_shed_repository_id=tool_shed_repository_id) self.name = elem.get('name', self.tool.name) self.description = elem.get('description', self.tool.description) self.undeclared_tables = util.asbool(elem.get('undeclared_tables', self.undeclared_tables)) for data_table_elem in elem.findall('data_table'): data_table_name = data_table_elem.get("name") assert data_table_name is not None, "A name is required for a data table entry" if data_table_name not in self.data_tables: self.data_tables[data_table_name] = odict() output_elem = data_table_elem.find('output') if output_elem is not None: for column_elem in output_elem.findall('column'): column_name = column_elem.get('name', None) assert column_name is not None, "Name is required for column entry" data_table_coumn_name = column_elem.get('data_table_name', column_name) self.data_tables[data_table_name][data_table_coumn_name] = column_name output_ref = column_elem.get('output_ref', None) if output_ref is not None: if data_table_name not in self.output_ref_by_data_table: self.output_ref_by_data_table[data_table_name] = {} self.output_ref_by_data_table[data_table_name][data_table_coumn_name] = output_ref value_translation_elems = column_elem.findall('value_translation') if value_translation_elems is not None: for value_translation_elem in value_translation_elems: value_translation = value_translation_elem.text if value_translation is not None: value_translation_type = value_translation_elem.get('type', DEFAULT_VALUE_TRANSLATION_TYPE) if data_table_name not in self.value_translation_by_data_table_column: self.value_translation_by_data_table_column[data_table_name] = {} if data_table_coumn_name not in self.value_translation_by_data_table_column[data_table_name]: self.value_translation_by_data_table_column[data_table_name][data_table_coumn_name] = [] if value_translation_type == 'function': if value_translation in VALUE_TRANSLATION_FUNCTIONS: value_translation = VALUE_TRANSLATION_FUNCTIONS[value_translation] else: raise ValueError("Unsupported value translation function: '%s'" % (value_translation)) else: assert value_translation_type == DEFAULT_VALUE_TRANSLATION_TYPE, ValueError("Unsupported value translation type: '%s'" % (value_translation_type)) self.value_translation_by_data_table_column[data_table_name][data_table_coumn_name].append(value_translation) for move_elem in column_elem.findall('move'): move_type = move_elem.get('type', 'directory') relativize_symlinks = move_elem.get('relativize_symlinks', False) # TODO: should we instead always relativize links? source_elem = move_elem.find('source') if source_elem is None: source_base = None source_value = '' else: source_base = source_elem.get('base', None) source_value = source_elem.text target_elem = move_elem.find('target') if target_elem is None: target_base = None target_value = '' else: target_base = target_elem.get('base', None) target_value = target_elem.text if data_table_name not in self.move_by_data_table_column: self.move_by_data_table_column[data_table_name] = {} self.move_by_data_table_column[data_table_name][data_table_coumn_name] = \ dict(type=move_type, source_base=source_base, source_value=source_value, target_base=target_base, target_value=target_value, relativize_symlinks=relativize_symlinks)
def get_required_repo_info_dicts(self, tool_shed_url, repo_info_dicts): """ Inspect the list of repo_info_dicts for repository dependencies and append a repo_info_dict for each of them to the list. All repository_dependency entries in each of the received repo_info_dicts includes all required repositories, so only one pass through this method is required to retrieve all repository dependencies. """ all_required_repo_info_dict = {} all_repo_info_dicts = [] if repo_info_dicts: # We'll send tuples of ( tool_shed, repository_name, repository_owner, changeset_revision ) to the tool # shed to discover repository ids. required_repository_tups = [] for repo_info_dict in repo_info_dicts: if repo_info_dict not in all_repo_info_dicts: all_repo_info_dicts.append(repo_info_dict) for repository_name, repo_info_tup in repo_info_dict.items(): description, \ repository_clone_url, \ changeset_revision, \ ctx_rev, \ repository_owner, \ repository_dependencies, \ tool_dependencies = \ repository_util.get_repo_info_tuple_contents(repo_info_tup) if repository_dependencies: for key, val in repository_dependencies.items(): if key in ['root_key', 'description']: continue repository_components_tuple = container_util.get_components_from_key( key) components_list = repository_util.extract_components_from_tuple( repository_components_tuple) # Skip listing a repository dependency if it is required only to compile a tool dependency # defined for the dependent repository since in this case, the repository dependency is really # a dependency of the dependent repository's contained tool dependency, and only if that # tool dependency requires compilation. # For backward compatibility to the 12/20/12 Galaxy release. only_if_compiling_contained_td = 'False' if len(components_list) == 4: only_if_compiling_contained_td = 'False' elif len(components_list) == 5: only_if_compiling_contained_td = 'False' if not asbool(only_if_compiling_contained_td): if components_list not in required_repository_tups: required_repository_tups.append( components_list) for components_list in val: try: only_if_compiling_contained_td = components_list[ 5] except IndexError: only_if_compiling_contained_td = 'False' # Skip listing a repository dependency if it is required only to compile a tool dependency # defined for the dependent repository (see above comment). if not asbool(only_if_compiling_contained_td): if components_list not in required_repository_tups: required_repository_tups.append( components_list) else: # We have a single repository with no dependencies. components_list = [ tool_shed_url, repository_name, repository_owner, changeset_revision ] required_repository_tups.append(components_list) if required_repository_tups: # The value of required_repository_tups is a list of tuples, so we need to encode it. encoded_required_repository_tups = [] for required_repository_tup in required_repository_tups: # Convert every item in required_repository_tup to a string. required_repository_tup = [ str(item) for item in required_repository_tup ] encoded_required_repository_tups.append( encoding_util.encoding_sep.join( required_repository_tup)) encoded_required_repository_str = encoding_util.encoding_sep2.join( encoded_required_repository_tups) encoded_required_repository_str = encoding_util.tool_shed_encode( encoded_required_repository_str) if repository_util.is_tool_shed_client(self.app): # Handle secure / insecure Tool Shed URL protocol changes and port changes. tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, tool_shed_url) pathspec = ['repository', 'get_required_repo_info_dict'] url = build_url(tool_shed_url, pathspec=pathspec) # Fix for handling 307 redirect not being handled nicely by urlopen() when the Request() has data provided try: url = _urlopen(url).geturl() except HTTPError as e: if e.code == 502: pass else: raise payload = urlencode( dict(encoded_str=encoded_required_repository_str)) response = _urlopen(url, payload).read() if response: try: required_repo_info_dict = json.loads( unicodify(response)) except Exception as e: log.exception(e) return all_repo_info_dicts required_repo_info_dicts = [] for k, v in required_repo_info_dict.items(): if k == 'repo_info_dicts': encoded_dict_strings = required_repo_info_dict[ 'repo_info_dicts'] for encoded_dict_str in encoded_dict_strings: decoded_dict = encoding_util.tool_shed_decode( encoded_dict_str) required_repo_info_dicts.append( decoded_dict) else: if k not in all_required_repo_info_dict: all_required_repo_info_dict[k] = v else: if v and not all_required_repo_info_dict[k]: all_required_repo_info_dict[k] = v if required_repo_info_dicts: for required_repo_info_dict in required_repo_info_dicts: # Each required_repo_info_dict has a single entry, and all_repo_info_dicts is a list # of dictionaries, each of which has a single entry. We'll check keys here rather than # the entire dictionary because a dictionary entry in all_repo_info_dicts will include # lists of discovered repository dependencies, but these lists will be empty in the # required_repo_info_dict since dependency discovery has not yet been performed for these # dictionaries. required_repo_info_dict_key = next( iter(required_repo_info_dict)) all_repo_info_dicts_keys = [ next(iter(d)) for d in all_repo_info_dicts ] if required_repo_info_dict_key not in all_repo_info_dicts_keys: all_repo_info_dicts.append( required_repo_info_dict) else: # required_repo_info_dict_key corresponds to the repo name. # A single install transaction might require the installation of 2 or more repos # with the same repo name but different owners or versions. # Therefore, if required_repo_info_dict_key is already in all_repo_info_dicts, # check that the tool id is already present. If it is not, we are dealing with the same repo name, # but a different owner/changeset revision or version and we add the repo to the list of repos to be installed. tool_id = required_repo_info_dict[ required_repo_info_dict_key][1] is_present = False for repo_info_dict in all_repo_info_dicts: for k, v in repo_info_dict.items(): if required_repo_info_dict_key == k: if tool_id == v[1]: is_present = True break if not is_present: all_repo_info_dicts.append( required_repo_info_dict) all_required_repo_info_dict[ 'all_repo_info_dicts'] = all_repo_info_dicts return all_required_repo_info_dict
def get_required_repo_info_dicts( self, tool_shed_url, repo_info_dicts ): """ Inspect the list of repo_info_dicts for repository dependencies and append a repo_info_dict for each of them to the list. All repository_dependency entries in each of the received repo_info_dicts includes all required repositories, so only one pass through this method is required to retrieve all repository dependencies. """ all_required_repo_info_dict = {} all_repo_info_dicts = [] if repo_info_dicts: # We'll send tuples of ( tool_shed, repository_name, repository_owner, changeset_revision ) to the tool # shed to discover repository ids. required_repository_tups = [] for repo_info_dict in repo_info_dicts: if repo_info_dict not in all_repo_info_dicts: all_repo_info_dicts.append( repo_info_dict ) for repository_name, repo_info_tup in repo_info_dict.items(): description, \ repository_clone_url, \ changeset_revision, \ ctx_rev, \ repository_owner, \ repository_dependencies, \ tool_dependencies = \ suc.get_repo_info_tuple_contents( repo_info_tup ) if repository_dependencies: for key, val in repository_dependencies.items(): if key in [ 'root_key', 'description' ]: continue repository_components_tuple = container_util.get_components_from_key( key ) components_list = suc.extract_components_from_tuple( repository_components_tuple ) # Skip listing a repository dependency if it is required only to compile a tool dependency # defined for the dependent repository since in this case, the repository dependency is really # a dependency of the dependent repository's contained tool dependency, and only if that # tool dependency requires compilation. # For backward compatibility to the 12/20/12 Galaxy release. prior_installation_required = 'False' only_if_compiling_contained_td = 'False' if len( components_list ) == 4: prior_installation_required = 'False' only_if_compiling_contained_td = 'False' elif len( components_list ) == 5: prior_installation_required = components_list[ 4 ] only_if_compiling_contained_td = 'False' if not asbool( only_if_compiling_contained_td ): if components_list not in required_repository_tups: required_repository_tups.append( components_list ) for components_list in val: try: only_if_compiling_contained_td = components_list[ 5 ] except: only_if_compiling_contained_td = 'False' # Skip listing a repository dependency if it is required only to compile a tool dependency # defined for the dependent repository (see above comment). if not asbool( only_if_compiling_contained_td ): if components_list not in required_repository_tups: required_repository_tups.append( components_list ) else: # We have a single repository with no dependencies. components_list = [ tool_shed_url, repository_name, repository_owner, changeset_revision ] required_repository_tups.append( components_list ) if required_repository_tups: # The value of required_repository_tups is a list of tuples, so we need to encode it. encoded_required_repository_tups = [] for required_repository_tup in required_repository_tups: # Convert every item in required_repository_tup to a string. required_repository_tup = [ str( item ) for item in required_repository_tup ] encoded_required_repository_tups.append( encoding_util.encoding_sep.join( required_repository_tup ) ) encoded_required_repository_str = encoding_util.encoding_sep2.join( encoded_required_repository_tups ) encoded_required_repository_str = encoding_util.tool_shed_encode( encoded_required_repository_str ) if suc.is_tool_shed_client( self.app ): # Handle secure / insecure Tool Shed URL protocol changes and port changes. tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, tool_shed_url ) url = common_util.url_join( tool_shed_url, '/repository/get_required_repo_info_dict' ) # Fix for handling 307 redirect not being handled nicely by urllib2.urlopen when the urllib2.Request has data provided url = urllib2.urlopen( urllib2.Request( url ) ).geturl() request = urllib2.Request( url, data=urllib.urlencode( dict( encoded_str=encoded_required_repository_str ) ) ) response = urllib2.urlopen( request ).read() if response: try: required_repo_info_dict = json.loads( response ) except Exception, e: log.exception( e ) return all_repo_info_dicts required_repo_info_dicts = [] for k, v in required_repo_info_dict.items(): if k == 'repo_info_dicts': encoded_dict_strings = required_repo_info_dict[ 'repo_info_dicts' ] for encoded_dict_str in encoded_dict_strings: decoded_dict = encoding_util.tool_shed_decode( encoded_dict_str ) required_repo_info_dicts.append( decoded_dict ) else: if k not in all_required_repo_info_dict: all_required_repo_info_dict[ k ] = v else: if v and not all_required_repo_info_dict[ k ]: all_required_repo_info_dict[ k ] = v if required_repo_info_dicts: for required_repo_info_dict in required_repo_info_dicts: # Each required_repo_info_dict has a single entry, and all_repo_info_dicts is a list # of dictionaries, each of which has a single entry. We'll check keys here rather than # the entire dictionary because a dictionary entry in all_repo_info_dicts will include # lists of discovered repository dependencies, but these lists will be empty in the # required_repo_info_dict since dependency discovery has not yet been performed for these # dictionaries. required_repo_info_dict_key = required_repo_info_dict.keys()[ 0 ] all_repo_info_dicts_keys = [ d.keys()[ 0 ] for d in all_repo_info_dicts ] if required_repo_info_dict_key not in all_repo_info_dicts_keys: all_repo_info_dicts.append( required_repo_info_dict ) all_required_repo_info_dict[ 'all_repo_info_dicts' ] = all_repo_info_dicts
def get_required_repo_info_dicts( self, tool_shed_url, repo_info_dicts ): """ Inspect the list of repo_info_dicts for repository dependencies and append a repo_info_dict for each of them to the list. All repository_dependency entries in each of the received repo_info_dicts includes all required repositories, so only one pass through this method is required to retrieve all repository dependencies. """ all_required_repo_info_dict = {} all_repo_info_dicts = [] if repo_info_dicts: # We'll send tuples of ( tool_shed, repository_name, repository_owner, changeset_revision ) to the tool # shed to discover repository ids. required_repository_tups = [] for repo_info_dict in repo_info_dicts: if repo_info_dict not in all_repo_info_dicts: all_repo_info_dicts.append( repo_info_dict ) for repository_name, repo_info_tup in repo_info_dict.items(): description, \ repository_clone_url, \ changeset_revision, \ ctx_rev, \ repository_owner, \ repository_dependencies, \ tool_dependencies = \ suc.get_repo_info_tuple_contents( repo_info_tup ) if repository_dependencies: for key, val in repository_dependencies.items(): if key in [ 'root_key', 'description' ]: continue repository_components_tuple = container_util.get_components_from_key( key ) components_list = suc.extract_components_from_tuple( repository_components_tuple ) # Skip listing a repository dependency if it is required only to compile a tool dependency # defined for the dependent repository since in this case, the repository dependency is really # a dependency of the dependent repository's contained tool dependency, and only if that # tool dependency requires compilation. # For backward compatibility to the 12/20/12 Galaxy release. only_if_compiling_contained_td = 'False' if len( components_list ) == 4: only_if_compiling_contained_td = 'False' elif len( components_list ) == 5: only_if_compiling_contained_td = 'False' if not asbool( only_if_compiling_contained_td ): if components_list not in required_repository_tups: required_repository_tups.append( components_list ) for components_list in val: try: only_if_compiling_contained_td = components_list[ 5 ] except: only_if_compiling_contained_td = 'False' # Skip listing a repository dependency if it is required only to compile a tool dependency # defined for the dependent repository (see above comment). if not asbool( only_if_compiling_contained_td ): if components_list not in required_repository_tups: required_repository_tups.append( components_list ) else: # We have a single repository with no dependencies. components_list = [ tool_shed_url, repository_name, repository_owner, changeset_revision ] required_repository_tups.append( components_list ) if required_repository_tups: # The value of required_repository_tups is a list of tuples, so we need to encode it. encoded_required_repository_tups = [] for required_repository_tup in required_repository_tups: # Convert every item in required_repository_tup to a string. required_repository_tup = [ str( item ) for item in required_repository_tup ] encoded_required_repository_tups.append( encoding_util.encoding_sep.join( required_repository_tup ) ) encoded_required_repository_str = encoding_util.encoding_sep2.join( encoded_required_repository_tups ) encoded_required_repository_str = encoding_util.tool_shed_encode( encoded_required_repository_str ) if suc.is_tool_shed_client( self.app ): # Handle secure / insecure Tool Shed URL protocol changes and port changes. tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, tool_shed_url ) pathspec = [ 'repository', 'get_required_repo_info_dict' ] url = common_util.url_join( tool_shed_url, pathspec=pathspec ) # Fix for handling 307 redirect not being handled nicely by urllib2.urlopen when the urllib2.Request has data provided url = urllib2.urlopen( urllib2.Request( url ) ).geturl() request = urllib2.Request( url, data=urllib.urlencode( dict( encoded_str=encoded_required_repository_str ) ) ) response = urllib2.urlopen( request ).read() if response: try: required_repo_info_dict = json.loads( response ) except Exception, e: log.exception( e ) return all_repo_info_dicts required_repo_info_dicts = [] for k, v in required_repo_info_dict.items(): if k == 'repo_info_dicts': encoded_dict_strings = required_repo_info_dict[ 'repo_info_dicts' ] for encoded_dict_str in encoded_dict_strings: decoded_dict = encoding_util.tool_shed_decode( encoded_dict_str ) required_repo_info_dicts.append( decoded_dict ) else: if k not in all_required_repo_info_dict: all_required_repo_info_dict[ k ] = v else: if v and not all_required_repo_info_dict[ k ]: all_required_repo_info_dict[ k ] = v if required_repo_info_dicts: for required_repo_info_dict in required_repo_info_dicts: # Each required_repo_info_dict has a single entry, and all_repo_info_dicts is a list # of dictionaries, each of which has a single entry. We'll check keys here rather than # the entire dictionary because a dictionary entry in all_repo_info_dicts will include # lists of discovered repository dependencies, but these lists will be empty in the # required_repo_info_dict since dependency discovery has not yet been performed for these # dictionaries. required_repo_info_dict_key = required_repo_info_dict.keys()[ 0 ] all_repo_info_dicts_keys = [ d.keys()[ 0 ] for d in all_repo_info_dicts ] if required_repo_info_dict_key not in all_repo_info_dicts_keys: all_repo_info_dicts.append( required_repo_info_dict ) all_required_repo_info_dict[ 'all_repo_info_dicts' ] = all_repo_info_dicts
def load_from_element(self, elem, tool_path): assert elem.tag == 'data_manager', 'A data manager configuration must have a "data_manager" tag as the root. "%s" is present' % ( elem.tag) self.declared_id = elem.get('id', None) self.guid = elem.get('guid', None) path = elem.get('tool_file', None) self.version = elem.get('version', self.version) tool_shed_repository_id = None tool_guid = None if path is None: tool_elem = elem.find('tool') assert tool_elem is not None, "Error loading tool for data manager. Make sure that a tool_file attribute or a tool tag set has been defined:\n%s" % ( util.xml_to_string(elem)) path = tool_elem.get("file", None) tool_guid = tool_elem.get("guid", None) #need to determine repository info so that dependencies will work correctly tool_shed_url = tool_elem.find('tool_shed').text # Handle protocol changes. tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.data_managers.app, tool_shed_url) # The protocol is not stored in the database. tool_shed = common_util.remove_protocol_from_tool_shed_url( tool_shed_url) repository_name = tool_elem.find('repository_name').text repository_owner = tool_elem.find('repository_owner').text installed_changeset_revision = tool_elem.find( 'installed_changeset_revision').text self.tool_shed_repository_info_dict = dict( tool_shed=tool_shed, name=repository_name, owner=repository_owner, installed_changeset_revision=installed_changeset_revision) tool_shed_repository = \ suc.get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( self.data_managers.app, tool_shed, repository_name, repository_owner, installed_changeset_revision ) if tool_shed_repository is None: log.warning( 'Could not determine tool shed repository from database. This should only ever happen when running tests.' ) #we'll set tool_path manually here from shed_conf_file tool_shed_repository_id = None try: tool_path = util.parse_xml( elem.get('shed_conf_file')).getroot().get( 'tool_path', tool_path) except Exception, e: log.error( 'Error determining tool_path for Data Manager during testing: %s', e) else: tool_shed_repository_id = self.data_managers.app.security.encode_id( tool_shed_repository.id) #use shed_conf_file to determine tool_path shed_conf_file = elem.get("shed_conf_file", None) if shed_conf_file: shed_conf = self.data_managers.app.toolbox.get_shed_config_dict_by_filename( shed_conf_file, None) if shed_conf: tool_path = shed_conf.get("tool_path", tool_path)
def populate_containers_dict_from_repository_metadata( self, tool_shed_url, tool_path, repository, reinstalling=False, required_repo_info_dicts=None ): """ Retrieve necessary information from the received repository's metadata to populate the containers_dict for display. This method is called only from Galaxy (not the tool shed) when displaying repository dependencies for installed repositories and when displaying them for uninstalled repositories that are being reinstalled. """ metadata = repository.metadata if metadata: # Handle proprietary datatypes. datatypes = metadata.get( 'datatypes', None ) # Handle invalid tools. invalid_tools = metadata.get( 'invalid_tools', None ) # Handle README files. if repository.has_readme_files: if reinstalling or repository.status not in \ [ self.app.install_model.ToolShedRepository.installation_status.DEACTIVATED, self.app.install_model.ToolShedRepository.installation_status.INSTALLED ]: # Since we're reinstalling, we need to send a request to the tool shed to get the README files. tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, tool_shed_url ) params = dict( name=str( repository.name ), owner=str( repository.owner ), changeset_revision=str( repository.installed_changeset_revision ) ) pathspec = [ 'repository', 'get_readme_files' ] raw_text = util.url_get( tool_shed_url, password_mgr=self.app.tool_shed_registry.url_auth( tool_shed_url ), pathspec=pathspec, params=params ) readme_files_dict = json.loads( raw_text ) else: readme_files_dict = readme_util.build_readme_files_dict( self.app, repository, repository.changeset_revision, repository.metadata, tool_path ) else: readme_files_dict = None # Handle repository dependencies. installed_repository_dependencies, missing_repository_dependencies = \ self.app.installed_repository_manager.get_installed_and_missing_repository_dependencies( repository ) # Handle the current repository's tool dependencies. repository_tool_dependencies = metadata.get( 'tool_dependencies', None ) # Make sure to display missing tool dependencies as well. repository_invalid_tool_dependencies = metadata.get( 'invalid_tool_dependencies', None ) if repository_invalid_tool_dependencies is not None: if repository_tool_dependencies is None: repository_tool_dependencies = {} repository_tool_dependencies.update( repository_invalid_tool_dependencies ) repository_installed_tool_dependencies, repository_missing_tool_dependencies = \ self.get_installed_and_missing_tool_dependencies_for_installed_repository( repository, repository_tool_dependencies ) if reinstalling: installed_tool_dependencies, missing_tool_dependencies = \ self.populate_tool_dependencies_dicts( tool_shed_url, tool_path, repository_installed_tool_dependencies, repository_missing_tool_dependencies, required_repo_info_dicts ) else: installed_tool_dependencies = repository_installed_tool_dependencies missing_tool_dependencies = repository_missing_tool_dependencies # Handle valid tools. valid_tools = metadata.get( 'tools', None ) # Handle workflows. workflows = metadata.get( 'workflows', None ) # Handle Data Managers valid_data_managers = None invalid_data_managers = None data_managers_errors = None if 'data_manager' in metadata: valid_data_managers = metadata['data_manager'].get( 'data_managers', None ) invalid_data_managers = metadata['data_manager'].get( 'invalid_data_managers', None ) data_managers_errors = metadata['data_manager'].get( 'messages', None ) gucm = GalaxyUtilityContainerManager( self.app ) containers_dict = gucm.build_repository_containers( repository=repository, datatypes=datatypes, invalid_tools=invalid_tools, missing_repository_dependencies=missing_repository_dependencies, missing_tool_dependencies=missing_tool_dependencies, readme_files_dict=readme_files_dict, repository_dependencies=installed_repository_dependencies, tool_dependencies=installed_tool_dependencies, valid_tools=valid_tools, workflows=workflows, valid_data_managers=valid_data_managers, invalid_data_managers=invalid_data_managers, data_managers_errors=data_managers_errors, new_install=False, reinstalling=reinstalling ) else: containers_dict = dict( datatypes=None, invalid_tools=None, readme_files_dict=None, repository_dependencies=None, tool_dependencies=None, valid_tools=None, workflows=None ) return containers_dict
def __init__( self, app, latest_migration_script_number, tool_shed_install_config, migrated_tools_config, install_dependencies ): """ Check tool settings in tool_shed_install_config and install all repositories that are not already installed. The tool panel configuration file is the received migrated_tools_config, which is the reserved file named migrated_tools_conf.xml. """ self.app = app self.toolbox = self.app.toolbox self.migrated_tools_config = migrated_tools_config # Initialize the ToolPanelManager. self.tpm = tool_panel_manager.ToolPanelManager( self.app ) # If install_dependencies is True but tool_dependency_dir is not set, do not attempt # to install but print informative error message. if install_dependencies and app.config.tool_dependency_dir is None: message = 'You are attempting to install tool dependencies but do not have a value ' message += 'for "tool_dependency_dir" set in your galaxy.ini file. Set this ' message += 'location value to the path where you want tool dependencies installed and ' message += 'rerun the migration script.' raise Exception( message ) # Get the local non-shed related tool panel configs (there can be more than one, and the # default name is tool_conf.xml). self.proprietary_tool_confs = self.non_shed_tool_panel_configs self.proprietary_tool_panel_elems = self.get_proprietary_tool_panel_elems( latest_migration_script_number ) # Set the location where the repositories will be installed by retrieving the tool_path # setting from migrated_tools_config. tree, error_message = xml_util.parse_xml( migrated_tools_config ) if tree is None: print error_message else: root = tree.getroot() self.tool_path = root.get( 'tool_path' ) print "Repositories will be installed into configured tool_path location ", str( self.tool_path ) # Parse tool_shed_install_config to check each of the tools. self.tool_shed_install_config = tool_shed_install_config tree, error_message = xml_util.parse_xml( tool_shed_install_config ) if tree is None: print error_message else: root = tree.getroot() defined_tool_shed_url = root.get( 'name' ) self.tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, defined_tool_shed_url ) self.tool_shed = common_util.remove_protocol_and_port_from_tool_shed_url( self.tool_shed_url ) self.repository_owner = common_util.REPOSITORY_OWNER index, self.shed_config_dict = self.tpm.get_shed_tool_conf_dict( self.migrated_tools_config ) # Since tool migration scripts can be executed any number of times, we need to # make sure the appropriate tools are defined in tool_conf.xml. If no tools # associated with the migration stage are defined, no repositories will be installed # on disk. The default behavior is that the tool shed is down. tool_shed_accessible = False tool_panel_configs = common_util.get_non_shed_tool_panel_configs( app ) if tool_panel_configs: # The missing_tool_configs_dict contents are something like: # {'emboss_antigenic.xml': [('emboss', '5.0.0', 'package', '\nreadme blah blah blah\n')]} tool_shed_accessible, missing_tool_configs_dict = \ common_util.check_for_missing_tools( app, tool_panel_configs, latest_migration_script_number ) else: # It doesn't matter if the tool shed is accessible since there are no migrated # tools defined in the local Galaxy instance, but we have to set the value of # tool_shed_accessible to True so that the value of migrate_tools.version can # be correctly set in the database. tool_shed_accessible = True missing_tool_configs_dict = odict() if tool_shed_accessible: if len( self.proprietary_tool_confs ) == 1: plural = '' file_names = self.proprietary_tool_confs[ 0 ] else: plural = 's' file_names = ', '.join( self.proprietary_tool_confs ) if missing_tool_configs_dict: for proprietary_tool_conf in self.proprietary_tool_confs: # Create a backup of the tool configuration in the un-migrated state. shutil.copy( proprietary_tool_conf, '%s-pre-stage-%04d' % ( proprietary_tool_conf, latest_migration_script_number ) ) for repository_elem in root: # Make sure we have a valid repository tag. if self.__is_valid_repository_tag( repository_elem ): # Get all repository dependencies for the repository defined by the # current repository_elem. Repository dependency definitions contained # in tool shed repositories with migrated tools must never define a # relationship to a repository dependency that contains a tool. The # repository dependency can only contain items that are not loaded into # the Galaxy tool panel (e.g., tool dependency definitions, custom datatypes, # etc). This restriction must be followed down the entire dependency hierarchy. name = repository_elem.get( 'name' ) changeset_revision = repository_elem.get( 'changeset_revision' ) tool_shed_accessible, repository_dependencies_dict = \ common_util.get_repository_dependencies( app, self.tool_shed_url, name, self.repository_owner, changeset_revision ) # Make sure all repository dependency records exist (as tool_shed_repository # table rows) in the Galaxy database. created_tool_shed_repositories = \ self.create_or_update_tool_shed_repository_records( name, changeset_revision, repository_dependencies_dict ) # Order the repositories for proper installation. This process is similar to the # process used when installing tool shed repositories, but does not handle managing # tool panel sections and other components since repository dependency definitions # contained in tool shed repositories with migrated tools must never define a relationship # to a repository dependency that contains a tool. ordered_tool_shed_repositories = \ self.order_repositories_for_installation( created_tool_shed_repositories, repository_dependencies_dict ) for tool_shed_repository in ordered_tool_shed_repositories: is_repository_dependency = self.__is_repository_dependency( name, changeset_revision, tool_shed_repository ) self.install_repository( repository_elem, tool_shed_repository, install_dependencies, is_repository_dependency=is_repository_dependency ) else: message = "\nNo tools associated with migration stage %s are defined in your " % \ str( latest_migration_script_number ) message += "file%s named %s,\nso no repositories will be installed on disk.\n" % \ ( plural, file_names ) print message else: message = "\nThe main Galaxy tool shed is not currently available, so skipped migration stage %s.\n" % \ str( latest_migration_script_number ) message += "Try again later.\n" print message
def repair_tool_shed_repository(self, repository, repo_info_dict): def add_repair_dict_entry(repository_name, error_message): if repository_name in repair_dict: repair_dict[repository_name].append(error_message) else: repair_dict[repository_name] = [error_message] return repair_dict tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, repository.tool_shed) metadata = repository.metadata # The repository.metadata contains dependency information that corresponds to the current changeset revision, # which may be different from what is stored in the database # If any of these repository-repository dependency associations is obsolete, clean_dependency_relationships removes them. suc.clean_dependency_relationships(self.app, metadata, repository, tool_shed_url) repair_dict = {} tpm = tool_panel_manager.ToolPanelManager(self.app) if repository.status in [ self.app.install_model.ToolShedRepository.installation_status. DEACTIVATED ]: try: self.app.installed_repository_manager.activate_repository( repository) except Exception as e: error_message = "Error activating repository %s: %s" % ( repository.name, str(e)) log.debug(error_message) repair_dict[repository.name] = error_message elif repository.status not in [ self.app.install_model.ToolShedRepository.installation_status. INSTALLED ]: shed_tool_conf, tool_path, relative_install_dir = \ suc.get_tool_panel_config_tool_path_install_dir( self.app, repository ) # Reset the repository attributes to the New state for installation. if metadata: _, tool_panel_section_key = \ tpm.handle_tool_panel_selection( self.app.toolbox, metadata, no_changes_checked=True, tool_panel_section_id=None, new_tool_panel_section_label=None ) else: # The tools will be loaded outside of any sections in the tool panel. tool_panel_section_key = None repository_util.set_repository_attributes( self.app, repository, status=self.app.install_model.ToolShedRepository. installation_status.NEW, error_message=None, deleted=False, uninstalled=False, remove_from_disk=True) irm = install_manager.InstallRepositoryManager(self.app, tpm) irm.install_tool_shed_repository( repository, repo_info_dict, tool_panel_section_key, shed_tool_conf, tool_path, install_tool_dependencies=True, install_resolver_dependencies= False, # Assuming repairs are only necessary toolshed packages reinstalling=True) if repository.status in [ self.app.install_model.ToolShedRepository. installation_status.ERROR ]: repair_dict = add_repair_dict_entry(repository.name, repository.error_message) else: irm = install_manager.InstallRepositoryManager(self.app, tpm) # We have an installed tool shed repository, so handle tool dependencies if necessary. if repository.missing_tool_dependencies and metadata and 'tool_dependencies' in metadata: work_dir = tempfile.mkdtemp(prefix="tmp-toolshed-itdep") # Reset missing tool dependencies. for tool_dependency in repository.missing_tool_dependencies: if tool_dependency.status in [ self.app.install_model.ToolDependency. installation_status.ERROR, self.app.install_model. ToolDependency.installation_status.INSTALLING ]: tool_dependency = \ tool_dependency_util.set_tool_dependency_attributes( self.app, tool_dependency=tool_dependency, status=self.app.install_model.ToolDependency.installation_status.UNINSTALLED ) # Install tool dependencies. irm.update_tool_shed_repository_status( repository, self.app.install_model.ToolShedRepository. installation_status.INSTALLING_TOOL_DEPENDENCIES) # Get the tool_dependencies.xml file from the repository. tool_dependencies_config = hg_util.get_config_from_disk( 'tool_dependencies.xml', repository.repo_path(self.app)) itdm = install_manager.InstallToolDependencyManager(self.app) installed_tool_dependencies = itdm.install_specified_tool_dependencies( tool_shed_repository=repository, tool_dependencies_config=tool_dependencies_config, tool_dependencies=repository.tool_dependencies, from_tool_migration_manager=False) for installed_tool_dependency in installed_tool_dependencies: if installed_tool_dependency.status in [ self.app.install_model.ToolDependency. installation_status.ERROR ]: repair_dict = add_repair_dict_entry( repository.name, installed_tool_dependency.error_message) basic_util.remove_dir(work_dir) irm.update_tool_shed_repository_status( repository, self.app.install_model.ToolShedRepository. installation_status.INSTALLED) return repair_dict