def get_repository_dependencies(self, app, changeset, toolshed_url): # We aren't concerned with repositories of type tool_dependency_definition here if a # repository_metadata record is not returned because repositories of this type will never # have repository dependencies. However, if a readme file is uploaded, or some other change # is made that does not create a new downloadable changeset revision but updates the existing # one, we still want to be able to get repository dependencies. repository_metadata = metadata_util.get_current_repository_metadata_for_changeset_revision(app, self, changeset) if repository_metadata: metadata = repository_metadata.metadata if metadata: rb = relation_builder.RelationBuilder(app, self, repository_metadata, toolshed_url) repository_dependencies = rb.get_repository_dependencies_for_changeset_revision() if repository_dependencies: return repository_dependencies return None
def create_repo_info_dict(app, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_name=None, repository=None, repository_metadata=None, tool_dependencies=None, repository_dependencies=None): """ Return a dictionary that includes all of the information needed to install a repository into a local Galaxy instance. The dictionary will also contain the recursive list of repository dependencies defined for the repository, as well as the defined tool dependencies. This method is called from Galaxy under four scenarios: 1. During the tool shed repository installation process via the tool shed's get_repository_information() method. In this case both the received repository and repository_metadata will be objects, but tool_dependencies and repository_dependencies will be None. 2. When getting updates for an installed repository where the updates include newly defined repository dependency definitions. This scenario is similar to 1. above. The tool shed's get_repository_information() method is the caller, and both the received repository and repository_metadata will be objects, but tool_dependencies and repository_dependencies will be None. 3. When a tool shed repository that was uninstalled from a Galaxy instance is being reinstalled with no updates available. In this case, both repository and repository_metadata will be None, but tool_dependencies and repository_dependencies will be objects previously retrieved from the tool shed if the repository includes definitions for them. 4. When a tool shed repository that was uninstalled from a Galaxy instance is being reinstalled with updates available. In this case, this method is reached via the tool shed's get_updated_repository_information() method, and both repository and repository_metadata will be objects but tool_dependencies and repository_dependencies will be None. """ repo_info_dict = {} repository = suc.get_repository_by_name_and_owner(app, repository_name, repository_owner) if app.name == 'tool_shed': # We're in the tool shed. repository_metadata = suc.get_repository_metadata_by_changeset_revision( app, app.security.encode_id(repository.id), changeset_revision) if repository_metadata: metadata = repository_metadata.metadata if metadata: tool_shed_url = str(web.url_for('/', qualified=True)).rstrip('/') rb = relation_builder.RelationBuilder(app, repository, repository_metadata, tool_shed_url) # Get a dictionary of all repositories upon which the contents of the received repository depends. repository_dependencies = rb.get_repository_dependencies_for_changeset_revision( ) tool_dependencies = metadata.get('tool_dependencies', {}) if tool_dependencies: new_tool_dependencies = {} for dependency_key, requirements_dict in tool_dependencies.items(): if dependency_key in ['set_environment']: new_set_environment_dict_list = [] for set_environment_dict in requirements_dict: set_environment_dict['repository_name'] = repository_name set_environment_dict['repository_owner'] = repository_owner set_environment_dict[ 'changeset_revision'] = changeset_revision new_set_environment_dict_list.append(set_environment_dict) new_tool_dependencies[ dependency_key] = new_set_environment_dict_list else: requirements_dict['repository_name'] = repository_name requirements_dict['repository_owner'] = repository_owner requirements_dict['changeset_revision'] = changeset_revision new_tool_dependencies[dependency_key] = requirements_dict tool_dependencies = new_tool_dependencies # Cast unicode to string, with the exception of description, since it is free text and can contain special characters. repo_info_dict[str( repository.name)] = (repository.description, str(repository_clone_url), str(changeset_revision), str(ctx_rev), str(repository_owner), repository_dependencies, tool_dependencies) return repo_info_dict
def json(self, trans, **kwd): """ GET /api/tools/json Get the tool form JSON for a tool in a repository. :param guid: the GUID of the tool :param guid: str :param tsr_id: the ID of the repository :param tsr_id: str :param changeset: the changeset at which to load the tool json :param changeset: str """ guid = kwd.get('guid', None) tsr_id = kwd.get('tsr_id', None) changeset = kwd.get('changeset', None) if None in [changeset, tsr_id, guid]: message = 'Changeset, repository ID, and tool GUID are all required parameters.' trans.response.status = 400 return {'status': 'error', 'message': message} tsucm = ToolShedUtilityContainerManager(trans.app) repository = repository_util.get_repository_in_tool_shed( self.app, tsr_id) repository_clone_url = common_util.generate_clone_url_for_repository_in_tool_shed( repository.user, repository) repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( trans.app, tsr_id, changeset) toolshed_base_url = str(web.url_for('/', qualified=True)).rstrip('/') rb = relation_builder.RelationBuilder(trans.app, repository, repository_metadata, toolshed_base_url) repository_dependencies = rb.get_repository_dependencies_for_changeset_revision( ) containers_dict = tsucm.build_repository_containers( repository, changeset, repository_dependencies, repository_metadata) found_tool = None for folder in containers_dict['valid_tools'].folders: if hasattr(folder, 'valid_tools'): for tool in folder.valid_tools: tool.id = tool.tool_id tool_guid = suc.generate_tool_guid(repository_clone_url, tool) if tool_guid == guid: found_tool = tool break if found_tool is None: message = f'Unable to find tool with guid {guid} in repository {repository.name}.' trans.response.status = 404 return {'status': 'error', 'message': message} with ValidationContext.from_app(trans.app) as validation_context: tv = tool_validator.ToolValidator(validation_context) repository, tool, valid, message = tv.load_tool_from_changeset_revision( tsr_id, changeset, found_tool.tool_config) if message or not valid: status = 'error' return dict(message=message, status=status) tool_help = '' if tool.help: tool_help = tool.help.render(static_path=web.url_for('/static'), host_url=web.url_for('/', qualified=True)) tool_help = util.unicodify(tool_help, 'utf-8') tool_dict = tool.to_dict(trans) tool_dict['inputs'] = {} tool.populate_model(trans, tool.inputs, {}, tool_dict['inputs']) tool_dict.update({ 'help': tool_help, 'citations': bool(tool.citations), 'requirements': [{ 'name': r.name, 'version': r.version } for r in tool.requirements], 'state_inputs': params_to_strings(tool.inputs, {}, trans.app), 'display': tool.display_interface, 'action': web.url_for(tool.action), 'method': tool.method, 'enctype': tool.enctype }) return json.dumps(tool_dict)