def find_new_deps_from_installed(galaxy_context, installed_repos, no_deps=False): if no_deps: return [] # FIXME: Just return the single item list installed_repositories here deps_and_reqs_set = set() log.debug('finding new deps for installed repos: %s', [str(x) for x in installed_repos]) # Remove dupes. Note, can/will change ordering. # installed_repos = list(set(installed_repos)) # install requirements ("dependcies" in collection info), if we want them for installed_repository in installed_repos: # log.debug('just_installed_repository: %s', installed_repository) # convert deps/reqs to sets. Losing any ordering, but avoids dupes reqs_set = set(installed_repository.requirements) deps_and_reqs_set.update(reqs_set) # for dep_req in sorted(deps_and_reqs_set): # log.debug('deps_and_reqs_set_item: %s', dep_req) deps_and_reqs_list = sorted(list(deps_and_reqs_set)) # log.debug('deps_and_reqs_list: %s', pf(deps_and_reqs_list)) unsolved_deps_reqs = [] for dep_req in deps_and_reqs_list: log.debug('Checking if %s is provided by something installed', str(dep_req)) # Search for an exact ns_n_v match irdb = installed_repository_db.InstalledRepositoryDatabase( galaxy_context) already_installed_iter = irdb.by_requirement(dep_req) already_installed = list(already_installed_iter) log.debug('already_installed: %s', already_installed) solved = False for provider in already_installed: log.debug('The dep_req %s is already provided by %s', dep_req, provider) solved = solved or True if solved: log.debug('skipping dep_req %s', dep_req) continue unsolved_deps_reqs.append(dep_req) log.debug('Found additional requirements: %s', pprint.pformat(unsolved_deps_reqs)) return unsolved_deps_reqs
def info_repository_specs(galaxy_context, api, repository_spec_strings, display_callback=None, offline=None): online = not offline display_callback = display_callback or display.display_callback offline = offline or False irdb = installed_repository_db.InstalledRepositoryDatabase(galaxy_context) labels_to_match = [] all_labels_to_match = [] for repository_spec_string in repository_spec_strings: galaxy_namespace, repository_name, content_name = parse_repository_name( repository_spec_string) log.debug('showing info for repository spec: %s', repository_spec_string) repository_name = repository_name or content_name if online: remote_data = api.lookup_repo_by_name(galaxy_namespace, repository_name) if remote_data: display_callback(_repr_remote_repo(remote_data)) label_to_match = '%s.%s' % (galaxy_namespace, repository_name) all_labels_to_match.append(label_to_match) labels_to_match.append(label_to_match) matcher = matchers.MatchRepositorySpec( [label_and_spec[1] for label_and_spec in labels_to_match]) matcher = matchers.MatchLabels(labels_to_match) matched_repositories = irdb.select(repository_spec_match_filter=matcher) remote_data = False matched_labels = [] for matched_repository in matched_repositories: display_callback(_repr_installed_repository(matched_repository)) matched_labels.append(matched_repository.repository_spec.label) unmatched_labels = set(all_labels_to_match).difference(set(matched_labels)) if unmatched_labels: display_callback('These repositories were not found:') for unmatched_label in sorted(unmatched_labels): display_callback(_repr_unmatched_label(unmatched_label)) return
def installed_content_item_iterator(galaxy_context, namespace_match_filter=None, repository_spec_match_filter=None, content_item_match_filter=None, content_item_type=None): # match_all works for all types namespace_match_filter = namespace_match_filter or matchers.MatchAll() repository_spec_match_filter = repository_spec_match_filter or matchers.MatchAll( ) content_item_match_filter = content_item_match_filter or matchers.MatchAll( ) content_item_type = content_item_type or 'roles' installed_repo_db = installed_repository_db.InstalledRepositoryDatabase( galaxy_context) # for namespace_full_path in namespace_paths_iterator: for installed_repository in installed_repo_db.select( namespace_match_filter=namespace_match_filter, repository_spec_match_filter=repository_spec_match_filter): log.debug('Found repository "%s" at %s', installed_repository.repository_spec.label, installed_repository.path) installed_repository_full_path = installed_repository.path if not repository_spec_match_filter(installed_repository): log.debug('The repository_match_filter %s failed to match for %s', repository_spec_match_filter, installed_repository) continue all_content_iterator = all_content_item_types_iterator( installed_repository) log.debug('Looking for %s in repository at %s', content_item_type, installed_repository_full_path) for installed_content_item in all_content_iterator: log.debug('installed_content_item: %s', installed_content_item) if not content_item_match_filter(installed_content_item): log.debug('%s was not matched by content_match_filter: %s', installed_content_item, content_item_match_filter) continue # this is sort of the 'join' of installed_repository and installed_content content_info = { 'path': installed_content_item.path, 'content_data': installed_content_item, 'installed_repository': installed_repository, 'version': installed_content_item.version, } yield content_info
def install_repositories_matching_repository_specs( galaxy_context, requirements_list, editable=False, namespace_override=None, display_callback=None, # TODO: error handling callback ? ignore_errors=False, no_deps=False, force_overwrite=False): '''Install a set of repositories specified by repository_specs if they are not already installed''' # log.debug('editable: %s', editable) log.debug('requirements_list: %s', requirements_list) _verify_requirements_repository_spec_have_namespaces(requirements_list) # FIXME: mv mv this filtering to it's own method # match any of the content specs for stuff we want to install # ie, see if it is already installed requested_repository_specs = [ x.requirement_spec for x in requirements_list ] repository_match_filter = matchers.MatchRepositorySpecNamespaceName( requested_repository_specs) irdb = installed_repository_db.InstalledRepositoryDatabase(galaxy_context) already_installed_generator = irdb.select( repository_match_filter=repository_match_filter) # FIXME: if/when GalaxyContent and InstalledGalaxyContent are attr.ib based and frozen and hashable # we can simplify this filter with set ops already_installed_repository_spec_set = set([ installed.repository_spec for installed in already_installed_generator ]) log.debug('already_installed_repository_spec_set: %s', already_installed_repository_spec_set) # This filters out already installed repositories unless --force. Aside from the warning, 'mazer install alikins.something_installed_already' is ok. requirements_to_install = [ y for y in requirements_list if y.requirement_spec not in already_installed_repository_spec_set and not force_overwrite ] log.debug('repository_specs_to_install: %s', pprint.pformat(requirements_to_install)) return install_repositories(galaxy_context, requirements_to_install, display_callback=display_callback, ignore_errors=ignore_errors, no_deps=no_deps, force_overwrite=force_overwrite)
def remove(galaxy_context, repository_match_filter=None, display_callback=None): repository_match_filter = repository_match_filter or matchers.MatchNone() icdb = installed_repository_db.InstalledRepositoryDatabase(galaxy_context) for matched_repository in icdb.select( repository_match_filter=repository_match_filter): log.debug('removing %s', matched_repository) # content_info['content_data'].remove() remove_repository(matched_repository, display_callback=display_callback) return 0
def test_installed_repository_db_match_names(galaxy_context): irdb = installed_repository_db.InstalledRepositoryDatabase(galaxy_context) match_filter = matchers.MatchNames(['foo.bar']) for x in irdb.select(match_filter): log.debug('x: %s', x)
def test_installed_repository_db(galaxy_context): irdb = installed_repository_db.InstalledRepositoryDatabase(galaxy_context) for x in irdb.select(): log.debug('x: %s', x)
def install(galaxy_context, fetcher, fetch_results, repository_spec, force_overwrite=False, display_callback=None): """extract the archive to the filesystem and write out install metadata. MUST be called after self.fetch().""" log.debug('install: repository_spec=%s, force_overwrite=%s', repository_spec, force_overwrite) just_installed_spec_and_results = [] # FIXME: really need to move the fetch step elsewhere and do it before, # install should get pass a content_archive (or something more abstract) # TODO: some useful exceptions for 'cant find', 'cant read', 'cant write' archive_path = fetch_results.get('archive_path', None) # TODO: this could be pulled up a layer, after getting fetch_results but before install() if not archive_path: raise exceptions.GalaxyClientError( 'No valid content data found for...') log.debug("installing from %s", archive_path) repo_archive_ = repository_archive.load_archive(archive_path, repository_spec) log.debug('repo_archive_: %s', repo_archive_) log.debug('repo_archive_.info: %s', repo_archive_.info) # we strip off any higher-level directories for all of the files contained within # the tar file here. The default is 'github_repo-target'. Gerrit instances, on the other # hand, does not have a parent directory at all. # preparation for archive extraction if not os.path.isdir(galaxy_context.content_path): log.debug('No content path (%s) found so creating it', galaxy_context.content_path) os.makedirs(galaxy_context.content_path) # Build up all the info about where the repository will be installed to namespaced_repository_path = '%s/%s' % (repository_spec.namespace, repository_spec.name) install_info_path = os.path.join(galaxy_context.content_path, namespaced_repository_path, 'meta/.galaxy_install_info') # extract_archive_to_dir depends on the repo_archive type, so ask it extract_archive_to_dir = os.path.join(galaxy_context.content_path, namespaced_repository_path) editable = repository_spec.fetch_method == FetchMethods.EDITABLE destination_info = InstallDestinationInfo( destination_root_dir=galaxy_context.content_path, repository_spec=repository_spec, extract_archive_to_dir=extract_archive_to_dir, namespaced_repository_path=namespaced_repository_path, install_info_path=install_info_path, force_overwrite=force_overwrite, editable=editable) # A list of InstallationResults res = repository_archive.install(repo_archive_, repository_spec=repository_spec, destination_info=destination_info, display_callback=display_callback) just_installed_spec_and_results.append((repository_spec, res)) if display_callback: display_callback( "- The repository %s was successfully installed to %s" % (repository_spec.label, galaxy_context.content_path)) # rm any temp files created when getting the content archive # TODO: use some sort of callback? fetcher.cleanup() # We know the repo specs for the repos we asked to install, and the installation results, # so now use that info to find the just installed repos on disk and load them and return them. just_installed_repository_specs = [ x[0] for x in just_installed_spec_and_results ] # log.debug('just_installed_repository_specs: %s', just_installed_repository_specs) irdb = installed_repository_db.InstalledRepositoryDatabase(galaxy_context) just_installed_repositories = [] for just_installed_repository_spec in just_installed_repository_specs: just_installed_repository_gen = irdb.by_repository_spec( just_installed_repository_spec) # TODO: Eventually, we could make install.install return a generator and yield these results straigt # from just_installed_repository_generator. The loop here is mostly for logging/feedback. # Should only get one answer here for now. for just_installed_repository in just_installed_repository_gen: log.debug('just_installed_repository is installed: %s', pprint.pformat(attr.asdict(just_installed_repository))) just_installed_repositories.append(just_installed_repository) # log.debug('just_installed_repositories: %s', pprint.pformat(just_installed_repositories)) return just_installed_repositories
def _list(galaxy_context, repository_spec_match_filter=None, list_content=False, display_callback=None): log.debug('list_content: %s', list_content) all_repository_match = repository_spec_match_filter or matchers.MatchAll() # We search for installed repos to list, and then display all the content in those installed repos icidb = installed_content_item_db.InstalledContentItemDatabase( galaxy_context) irdb = installed_repository_db.InstalledRepositoryDatabase(galaxy_context) # accumulate for api return repo_list = [] for installed_repository in irdb.select( repository_spec_match_filter=all_repository_match): log.debug('installed_repo: %s', installed_repository) content_items = collections.defaultdict(list) # Find all the content items for this particular repo repository_match = matchers.MatchRepositorySpec( [installed_repository.repository_spec]) for content_info in icidb.select( repository_spec_match_filter=repository_match): content_dict = content_info.copy() content_item_type = content_dict['content_data'].content_item_type # revisit this output format once we get some feedback content_dict.update({ 'type': content_item_type, 'name': content_dict['content_data'].name, # 'installed_repo_namespace': repo.namespace, # 'installed_repo_name': repo.name, # 'installed_repo_path': repo.path, # 'installed_repo_id': repo.repository_spec.label, 'installed_repository': installed_repository, }) content_items[content_item_type].append(content_dict) # content_item_list.append(content_dict) repo_dict = { 'content_items': content_items, 'installed_repository': installed_repository } repo_list.append(repo_dict) for repo_item in repo_list: repo_msg = "repo={installed_repository.repository_spec.label}, type=repository, version={installed_repository.repository_spec.version}" display_callback(repo_msg.format(**repo_item)) if not list_content: continue for content_item_type_key, content_items_data in repo_item[ 'content_items'].items(): content_msg = "repo={installed_repository.repository_spec.label}, type={type}, name={name}, " + \ "version={installed_repository.repository_spec.version}" # content_msg = " type={type}, name={name}, " + \ # "version={installed_repository.repository_spec.version}" # type_msg = " {content_item_type}:" # display_callback(type_msg.format(content_item_type=content_item_type_key)) log.debug('content_item: %s', content_items_data) log.debug('content_item_type_key: %s', content_item_type_key) for content_item_data in content_items_data: display_callback(content_msg.format(**content_item_data)) # display_callback(msg.format(**content_dict)) return repo_list
def install_repository(galaxy_context, requirement_to_install, display_callback=None, # TODO: error handling callback ? ignore_errors=False, no_deps=False, force_overwrite=False): '''This installs a single package by finding it, fetching it, verifying it and installing it.''' display_callback = display_callback or display.display_callback # INITIAL state # dep_requirements = [] # TODO: we could do all the downloads first, then install them. Likely # less error prone mid 'transaction' log.debug('Processing %r', requirement_to_install) repository_spec_to_install = requirement_to_install.requirement_spec requirement_spec_to_install = requirement_to_install.requirement_spec # else trans to ... FIND_FETCHER? # TODO: check if already installed and move to approriate state log.debug('About to find() requested requirement_spec_to_install: %s', requirement_spec_to_install) display_callback('', level='info') display_callback('Installing spec: %s' % requirement_spec_to_install.label, level='info') # We dont have anything that matches the RequirementSpec installed fetcher = fetch_factory.get(galaxy_context=galaxy_context, requirement_spec=requirement_spec_to_install) # if we fail to get a fetcher here, then to... FIND_FETCHER_FAILURE ? # could also move some of the logic in fetcher_factory to be driven from here # and make the steps of mapping repository spec -> fetcher method part of the # state machine. That might be a good place to support multiple galaxy servers # or preferring local content to remote content, etc. # FIND state # See if we can find metadata and/or download the archive before we try to # remove an installed version... try: find_results = install.find(fetcher) except exceptions.GalaxyError as e: log.debug('requirement_to_install %s failed to be met: %s', requirement_to_install, e) log.warning('Unable to find metadata for %s: %s', requirement_spec_to_install.label, e) # FIXME: raise dep error exception? raise_without_ignore(ignore_errors, e) # continue return None # TODO: make sure repository_spec version is correct and set # TODO: state transition, if find_results -> INSTALL # if not, then FIND_FAILED # TODO/FIXME: We give find() a RequirementSpec, but find_results should have enough # info to create a concrete RepositorySpec # TODO: if we want client side content whitelist/blacklist, or pinned versions, # or rules to only update within some semver range (ie, only 'patch' level), # we could hook rule validation stuff here. # TODO: build a new repository_spec based on what we actually fetched to feed to # install etc. The fetcher.fetch() could return a datastructure needed to build # the new one instead of doing it in verify() found_repository_spec = install.repository_spec_from_find_results(find_results, requirement_spec_to_install) log.debug('found_repository_spec: %s', found_repository_spec) display_callback(' Found: %s (for spec %s)' % (found_repository_spec, requirement_spec_to_install.label)) # See if the found collection spec is already installed and either warn or 'force_overwrite' # to remove existing first. # cheap 'update' is to consider anything already installed that matches the request repo_spec # as 'installed' and let force override that. # potential_repository_spec is a repo spec for the install candidate we potentially found. irdb = installed_repository_db.InstalledRepositoryDatabase(galaxy_context) # log.debug('Checking to see if %s is already installed', requirement_spec_to_install) log.debug('Checking to see if a collection named %s is already installed', found_repository_spec.label) repository_spec_match_filter = matchers.MatchRepositorySpecNamespaceName([found_repository_spec]) # already_installed_iter = irdb.by_requirement_spec(requirement_spec_to_install) already_installed_iter = irdb.select(repository_spec_match_filter=repository_spec_match_filter) already_installed = sorted(list(already_installed_iter)) log.debug('already_installed: %s', already_installed) # TODO: The already installed check above verifies that nothing that matches the requirement spec is installed, # but just because the name+version required wasn't installed, that doesn't mean that name at a different # version isn't installed. # To catch that, also need to check if the irdb by name to see if anything with that name is installed. # repository_spec_to_install = found_repository_spec log.debug('About to download repository requested by %s: %s', requirement_spec_to_install, repository_spec_to_install) if find_results['custom'].get('collection_is_deprecated', False): display_callback("The collection '%s' is deprecated." % (found_repository_spec.label), level='warning') # FETCH state try: fetch_results = install.fetch(fetcher, repository_spec=repository_spec_to_install, find_results=find_results) log.debug('fetch_results: %s', fetch_results) # fetch_results will include a 'archive_path' pointing to where the artifact # was saved to locally. except exceptions.GalaxyError as e: # fetch error probably should just go to a FAILED state, at least until # we have to implement retries log.warning('Unable to fetch %s: %s', repository_spec_to_install.name, e) raise_without_ignore(ignore_errors, e) # continue # FIXME: raise ? return None # FIXME: seems like we want to resolve deps before trying install # We need the role (or other content) deps from meta before installing # though, and sometimes (for galaxy case) we dont know that until we've downloaded # the file, which we dont do until somewhere in the begin of content.install (fetch). # We can get that from the galaxy API though. # # FIXME: exc handling # Remove the already installed version, via --force for already_installed_repository in already_installed: repo_label = '%s,%s' % (already_installed_repository.repository_spec.label, already_installed_repository.repository_spec.version) # bail if we are not overwriting already installed content if not force_overwrite: display_callback(' %s is already installed at %s' % (repo_label, already_installed_repository.path), level='warning') log.debug('A collection providing %s was already installed. In %s', requirement_spec_to_install, already_installed) return None display_callback(' Removing: %s (previously installed to %s)' % (repo_label, already_installed_repository.path), level='info') log.debug('Removing already_installed %s', already_installed_repository) repository.remove(already_installed_repository) installed_repositories = [] try: installed_repositories = install.install(galaxy_context, fetcher, fetch_results, repository_spec=found_repository_spec, force_overwrite=force_overwrite, display_callback=display_callback) except exceptions.GalaxyError as e: msg = "- %s was NOT installed successfully: %s " display_callback(msg % (found_repository_spec, e), level='warning') log.warning(msg, found_repository_spec.label, str(e)) raise_without_ignore(ignore_errors, e) return [] if not installed_repositories: log.warning("- %s was NOT installed successfully.", found_repository_spec.label) raise_without_ignore(ignore_errors) return installed_repositories
def _list(galaxy_context, repository_spec_match_filter=None, list_content=False, output_format=None, display_callback=None): output_format = output_format or OutputFormat.HUMAN log.debug('list_content: %s', list_content) all_repository_match = repository_spec_match_filter or matchers.MatchAll() # We search for installed repos to list, and then display all the content in those installed repos icidb = installed_content_item_db.InstalledContentItemDatabase( galaxy_context) irdb = installed_repository_db.InstalledRepositoryDatabase(galaxy_context) # accumulate for api return repo_list = [] for installed_repository in irdb.select( repository_spec_match_filter=all_repository_match): log.debug('installed_repo: %s', installed_repository) content_items = collections.defaultdict(list) # Find all the content items for this particular repo repository_match = matchers.MatchRepositorySpec( [installed_repository.repository_spec]) for content_info in icidb.select( repository_spec_match_filter=repository_match): content_dict = content_info.copy() content_item_type = content_dict['content_data'].content_item_type # revisit this output format once we get some feedback content_dict.update({ 'type': content_item_type, 'name': content_dict['content_data'].name, 'is_plugin': content_dict['content_data'].is_plugin, # 'installed_repo_namespace': repo.namespace, # 'installed_repo_name': repo.name, # 'installed_repo_path': repo.path, # 'installed_repo_id': repo.repository_spec.label, 'installed_repository': installed_repository, }) content_items[content_item_type].append(content_dict) # content_item_list.append(content_dict) repo_dict = { 'content_items': content_items, 'installed_repository': installed_repository } repo_list.append(repo_dict) if output_format == OutputFormat.LOCKFILE: output = format_as_lockfile(repo_list, lockfile_freeze=False) display_callback(output) elif output_format == OutputFormat.LOCKFILE_FREEZE: output = format_as_lockfile(repo_list, lockfile_freeze=True) display_callback(output) elif output_format == OutputFormat.FULLY_QUALIFIED: display_fully_qualified(repo_list, list_content, display_callback) else: display_for_human(repo_list, list_content, display_callback) return repo_list
def install_repository( galaxy_context, requirement_to_install, display_callback=None, # TODO: error handling callback ? ignore_errors=False, no_deps=False, force_overwrite=False): '''This installs a single package by finding it, fetching it, verifying it and installing it.''' display_callback = display_callback or display.display_callback # INITIAL state # dep_requirements = [] # TODO: we could do all the downloads first, then install them. Likely # less error prone mid 'transaction' log.debug('Processing %s', requirement_to_install) repository_spec_to_install = requirement_to_install.requirement_spec # else trans to ... FIND_FETCHER? # TODO: check if already installed and move to approriate state log.debug('About to find() requested repository_spec_to_install: %s', repository_spec_to_install) # potential_repository_spec is a repo spec for the install candidate we potentially found. irdb = installed_repository_db.InstalledRepositoryDatabase(galaxy_context) log.debug('Checking to see if %s is already installed', repository_spec_to_install) already_installed_iter = irdb.by_repository_spec( repository_spec_to_install) already_installed = sorted(list(already_installed_iter)) log.debug('already_installed: %s', already_installed) if already_installed: for already_installed_repository in already_installed: display_callback( '%s is already installed at %s' % (already_installed_repository.repository_spec.label, already_installed_repository.path), level='warning') log.debug('Stuff %s was already installed. In %s', repository_spec_to_install, already_installed) return None fetcher = fetch_factory.get(galaxy_context=galaxy_context, repository_spec=repository_spec_to_install) # if we fail to get a fetcher here, then to... FIND_FETCHER_FAILURE ? # could also move some of the logic in fetcher_factory to be driven from here # and make the steps of mapping repository spec -> fetcher method part of the # state machine. That might be a good place to support multiple galaxy servers # or preferring local content to remote content, etc. # FIND state # See if we can find metadata and/or download the archive before we try to # remove an installed version... try: find_results = install.find(fetcher) except exceptions.GalaxyError as e: log.warning('Unable to find metadata for %s: %s', repository_spec_to_install.label, e) # FIXME: raise dep error exception? raise_without_ignore(ignore_errors, e) # continue return None # TODO: make sure repository_spec version is correct and set # TODO: state transition, if find_results -> INSTALL # if not, then FIND_FAILED log.debug('About to download requested repository_spec_to_install: %s', repository_spec_to_install) # FETCH state try: fetch_results = install.fetch( fetcher, repository_spec=repository_spec_to_install, find_results=find_results) log.debug('fetch_results: %s', fetch_results) # fetch_results will include a 'archive_path' pointing to where the artifact # was saved to locally. except exceptions.GalaxyError as e: # fetch error probably should just go to a FAILED state, at least until # we have to implement retries log.warning('Unable to fetch %s: %s', repository_spec_to_install.name, e) raise_without_ignore(ignore_errors, e) # continue # FIXME: raise ? return None # TODO: if we want client side content whitelist/blacklist, or pinned versions, # or rules to only update within some semver range (ie, only 'patch' level), # we could hook rule validation stuff here. # TODO: build a new repository_spec based on what we actually fetched to feed to # install etc. The fetcher.fetch() could return a datastructure needed to build # the new one instead of doing it in verify() fetched_repository_spec = install.update_repository_spec( fetch_results, repository_spec_to_install) log.debug('fetched_repository_spec: %s', fetched_repository_spec) # FIXME: seems like we want to resolve deps before trying install # We need the role (or other content) deps from meta before installing # though, and sometimes (for galaxy case) we dont know that until we've downloaded # the file, which we dont do until somewhere in the begin of content.install (fetch). # We can get that from the galaxy API though. # # FIXME: exc handling installed_repositories = [] try: installed_repositories = install.install( galaxy_context, fetcher, fetch_results, repository_spec=fetched_repository_spec, force_overwrite=force_overwrite, display_callback=display_callback) except exceptions.GalaxyError as e: msg = "- %s was NOT installed successfully: %s " display_callback(msg % (fetched_repository_spec.label, e), level='warning') log.warning(msg, fetched_repository_spec.label, str(e)) raise_without_ignore(ignore_errors, e) return [] if not installed_repositories: log.warning("- %s was NOT installed successfully.", fetched_repository_spec.label) raise_without_ignore(ignore_errors) return installed_repositories
def installed_content_iterator(galaxy_context, namespace_match_filter=None, repository_match_filter=None, content_match_filter=None, content_type=None): # match_all works for all types namespace_match_filter = namespace_match_filter or matchers.MatchAll() repository_match_filter = repository_match_filter or matchers.MatchAll() content_match_filter = content_match_filter or matchers.MatchAll() content_type = content_type or 'roles' installed_repo_db = installed_repository_db.InstalledRepositoryDatabase(galaxy_context) # for namespace_full_path in namespace_paths_iterator: for installed_repository in installed_repo_db.select(namespace_match_filter=namespace_match_filter, repository_match_filter=repository_match_filter): log.debug('Found repo "%s" at %s', installed_repository.content_spec.label, installed_repository.path) installed_repository_full_path = installed_repository.path if not repository_match_filter(installed_repository): log.debug('The repo_match_filter %s failed to match for %s', repository_match_filter, installed_repository) continue # since we will need a different iterator for each specific type of content, consult # a map of content_type->iterator_method however there is only a 'roles' iterator for now installed_repository_content_iterator_method = \ installed_repository_content_iterator_map.get(content_type) if installed_repository_content_iterator_method is None: continue installed_repository_content_iterator = installed_repository_content_iterator_method(installed_repository_full_path) log.debug('Looking for %s in repo at %s', content_type, installed_repository_full_path) for installed_content_full_path in installed_repository_content_iterator: repo_namespace = installed_repository.content_spec.namespace path_file = os.path.basename(installed_content_full_path) gr = InstalledContent(galaxy_context, path_file, namespace=repo_namespace, path=installed_content_full_path) log.debug('Found %s "%s" at %s', gr.content_type, gr.name, installed_content_full_path) log.debug('gr.metadata: %s', gr.metadata) version = None # TODO: should probably sep the generator for getting the InstalledContent objects from the generator that # creates the content_info returns instead of intertwining them if gr.metadata or gr.install_info: version = gr.install_info.version or "(unknown version)" if not content_match_filter(gr): log.debug('%s was not matched by content_match_filter: %s', gr, content_match_filter) continue # this is sort of the 'join' of installed_repository and installed_content content_info = {'path': path_file, 'content_data': gr, 'installed_repository': installed_repository, 'version': version, } yield content_info
def info_content_specs(galaxy_context, api, content_spec_strings, display_callback=None, offline=None): online = not offline display_callback = display_callback or display.display_callback # log.debug('base_content_path: %s', base_content_path) # content_path = galaxy_context.content_path offline = offline or False # icdb = installed_content_db.InstalledContentDatabase(galaxy_context) irdb = installed_repository_db.InstalledRepositoryDatabase(galaxy_context) labels_to_match = [] all_labels_to_match = [] for content_spec_string in content_spec_strings: galaxy_namespace, repo_name, content_name = parse_content_name(content_spec_string) log.debug('showing info for content spec: %s', content_spec_string) repo_name = repo_name or content_name # log.debug('repo_name2=%s', repo_name) if online: # remote_data = api.lookup_content_by_name(galaxy_namespace, repo_name, content_name) remote_data = api.lookup_repo_by_name(galaxy_namespace, repo_name) if remote_data: display_callback(_repr_remote_repo(remote_data)) label_to_match = '%s.%s' % (galaxy_namespace, repo_name) all_labels_to_match.append(label_to_match) labels_to_match.append((label_to_match, ContentSpec(namespace=galaxy_namespace, name=repo_name))) # matcher = matchers.MatchNamespacesOrLabels([label_and_spec[0] for label_and_spec in labels_to_match]) matcher = matchers.MatchContentSpec([label_and_spec[1] for label_and_spec in labels_to_match]) matched_repos = irdb.select(repository_match_filter=matcher) # matched_contents = icdb.select(repository_match_filter=matcher) # log.debug('matched_contents: %s', list(matched_contents)) # content_path = os.path.join(content_path, '%s.%s' % (galaxy_namespace, repo_name)) remote_data = False matched_labels = [] for matched_repo in matched_repos: display_callback(_repr_installed_repo(matched_repo)) matched_labels.append(matched_repo.content_spec.label) unmatched_labels = set(all_labels_to_match).difference(set(matched_labels)) if unmatched_labels: display_callback('These repos were not found:') for unmatched_label in sorted(unmatched_labels): display_callback(_repr_unmatched_label(unmatched_label)) return