def validate_repository_name(app, name, user): """ Validate whether the given name qualifies as a new TS repo name. Repository names must be unique for each user, must be at least two characters in length and must contain only lower-case letters, numbers, and the '_' character. """ if name in ['None', None, '']: return 'Enter the required repository name.' if name in ['repos']: return "The term '%s' is a reserved word in the Tool Shed, so it cannot be used as a repository name." % name check_existing = get_repository_by_name_and_owner(app, name, user.username) if check_existing is not None: if check_existing.deleted: return 'You own a deleted repository named <b>%s</b>, please choose a different name.' % escape( name) else: return "You already own a repository named <b>%s</b>, please choose a different name." % escape( name) if len(name) < 2: return "Repository names must be at least 2 characters in length." if len(name) > 80: return "Repository names cannot be more than 80 characters in length." if not (VALID_REPOSITORYNAME_RE.match(name)): return "Repository names must contain only lower-case letters, numbers and underscore." return ''
def get_dependencies_for_metadata_revision(app, metadata): dependencies = [] for shed, name, owner, changeset, prior, _ in metadata[ 'repository_dependencies']: required_repository = get_repository_by_name_and_owner( app, name, owner) updated_changeset = get_next_downloadable_changeset_revision( app, required_repository, changeset) if updated_changeset is None: continue metadata_entry = get_repository_metadata_by_changeset_revision( app, app.security.encode_id(required_repository.id), updated_changeset) dependencies.append(metadata_entry) return dependencies
def get_updated_changeset_revisions(app, name, owner, changeset_revision): """ Return a string of comma-separated changeset revision hashes for all available updates to the received changeset revision for the repository defined by the received name and owner. """ repository = get_repository_by_name_and_owner(app, name, owner) # Get the upper bound changeset revision. upper_bound_changeset_revision = get_next_downloadable_changeset_revision( app, repository, changeset_revision) # Build the list of changeset revision hashes defining each available update up to, but excluding # upper_bound_changeset_revision. repo = get_repo_for_repository(app, repository=repository) changeset_hashes = [] for changeset in reversed_lower_upper_bounded_changelog( repo, changeset_revision, upper_bound_changeset_revision): # Make sure to exclude upper_bound_changeset_revision. if changeset != upper_bound_changeset_revision: changeset_hashes.append(str(repo[changeset])) if changeset_hashes: changeset_hashes_str = ','.join(changeset_hashes) return changeset_hashes_str return ''
def get_repository_dependency_tups_from_repository_metadata( app, repository_metadata, deprecated_only=False): """ Return a list of of tuples defining repository objects required by the received repository. The returned list defines the entire repository dependency tree. This method is called only from the Tool Shed. """ dependency_tups = [] if repository_metadata is not None: metadata = repository_metadata.metadata if metadata: repository_dependencies_dict = metadata.get( 'repository_dependencies', None) if repository_dependencies_dict is not None: repository_dependency_tups = repository_dependencies_dict.get( 'repository_dependencies', None) if repository_dependency_tups is not None: # The value of repository_dependency_tups is a list of repository dependency tuples like this: # ['http://localhost:9009', 'package_samtools_0_1_18', 'devteam', 'ef37fc635cb9', 'False', 'False'] for repository_dependency_tup in repository_dependency_tups: toolshed, name, owner, changeset_revision, pir, oicct = \ parse_repository_dependency_tuple(repository_dependency_tup) repository = get_repository_by_name_and_owner( app, name, owner) if repository: if deprecated_only: if repository.deprecated: dependency_tups.append( repository_dependency_tup) else: dependency_tups.append( repository_dependency_tup) else: log.debug( "Cannot locate repository %s owned by %s for inclusion in repository dependency tups." % (name, owner)) return dependency_tups
def create_repo_info_dict(app, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_name=None, repository=None, repository_metadata=None, tool_dependencies=None, repository_dependencies=None): """ Return a dictionary that includes all of the information needed to install a repository into a local Galaxy instance. The dictionary will also contain the recursive list of repository dependencies defined for the repository, as well as the defined tool dependencies. This method is called from Galaxy under four scenarios: 1. During the tool shed repository installation process via the tool shed's get_repository_information() method. In this case both the received repository and repository_metadata will be objects, but tool_dependencies and repository_dependencies will be None. 2. When getting updates for an installed repository where the updates include newly defined repository dependency definitions. This scenario is similar to 1. above. The tool shed's get_repository_information() method is the caller, and both the received repository and repository_metadata will be objects, but tool_dependencies and repository_dependencies will be None. 3. When a tool shed repository that was uninstalled from a Galaxy instance is being reinstalled with no updates available. In this case, both repository and repository_metadata will be None, but tool_dependencies and repository_dependencies will be objects previously retrieved from the tool shed if the repository includes definitions for them. 4. When a tool shed repository that was uninstalled from a Galaxy instance is being reinstalled with updates available. In this case, this method is reached via the tool shed's get_updated_repository_information() method, and both repository and repository_metadata will be objects but tool_dependencies and repository_dependencies will be None. """ repo_info_dict = {} repository = get_repository_by_name_and_owner(app, repository_name, repository_owner) if app.name == 'tool_shed': # We're in the tool shed. repository_metadata = get_repository_metadata_by_changeset_revision( app, app.security.encode_id(repository.id), changeset_revision) if repository_metadata: metadata = repository_metadata.metadata if metadata: tool_shed_url = web.url_for('/', qualified=True).rstrip('/') rb = tool_shed.dependencies.repository.relation_builder.RelationBuilder( app, repository, repository_metadata, tool_shed_url) # Get a dictionary of all repositories upon which the contents of the received repository depends. repository_dependencies = rb.get_repository_dependencies_for_changeset_revision( ) tool_dependencies = metadata.get('tool_dependencies', {}) if tool_dependencies: new_tool_dependencies = {} for dependency_key, requirements_dict in tool_dependencies.items(): if dependency_key in ['set_environment']: new_set_environment_dict_list = [] for set_environment_dict in requirements_dict: set_environment_dict['repository_name'] = repository_name set_environment_dict['repository_owner'] = repository_owner set_environment_dict[ 'changeset_revision'] = changeset_revision new_set_environment_dict_list.append(set_environment_dict) new_tool_dependencies[ dependency_key] = new_set_environment_dict_list else: requirements_dict['repository_name'] = repository_name requirements_dict['repository_owner'] = repository_owner requirements_dict['changeset_revision'] = changeset_revision new_tool_dependencies[dependency_key] = requirements_dict tool_dependencies = new_tool_dependencies repo_info_dict[repository.name] = (repository.description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies) return repo_info_dict