def get_plugins(package_name, paths=None): """Find packages that are plugins of the given package. Args: package_name (str): Name of the package. paths (list of str): Paths to search for packages, defaults to `config.packages_path`. Returns: list of str: The packages that are plugins of the given package. """ pkg = get_latest_package(package_name, paths=paths, error=True) if not pkg.has_plugins: return [] it = iter_package_families(paths) package_names = set(x.name for x in it) bar = ProgressBar("Searching", len(package_names)) plugin_pkgs = [] for package_name_ in package_names: bar.next() if package_name_ == package_name: continue # not a plugin of itself plugin_pkg = get_latest_package(package_name_, paths=paths) if not plugin_pkg.plugin_for: continue for plugin_for in plugin_pkg.plugin_for: if plugin_for == pkg.name: plugin_pkgs.append(package_name_) bar.finish() return plugin_pkgs
def test_intersects_resolve(self): """Test intersects with resolve object""" resolved_pkg_data = { "foo": { "1": { "name": "foo", "version": "1" } }, "maya": { "2020.1": { "name": "maya", "version": "2020.1" } }, } mem_path = "memory@%s" % hex(id(resolved_pkg_data)) resolved_repo = package_repository_manager.get_repository(mem_path) resolved_repo.data = resolved_pkg_data resolved_packages = [ variant for family in iter_package_families(paths=[mem_path]) for package in family.iter_packages() for variant in package.iter_variants() ] resolve = VariantsBinding(resolved_packages) self.assertTrue(intersects(resolve.foo, "1")) self.assertFalse(intersects(resolve.foo, "0")) self.assertTrue(intersects(resolve.maya, "2019+")) self.assertFalse(intersects(resolve.maya, "<=2019"))
def test_pkg_iteration(self): """package iteration.""" all_packages = set() all_fams = iter_package_families() for fam in all_fams: packages = _to_qnames(iter_packages(fam.name)) all_packages.update(packages) self.assertEqual(all_packages, ALL_PACKAGES) res = _to_qnames(iter_packages('nada')) self.assertEqual(res, set(['nada'])) res = _to_qnames(iter_packages('python')) self.assertEqual( res, set([ 'python-2.5.2', 'python-2.6.0', 'python-2.6.8', 'python-2.7.0' ])) res = _to_qnames(iter_packages('pydad', "<3")) self.assertEqual(res, set(['pydad-1', 'pydad-2'])) for fam_name in ALL_FAMILIES: for package in iter_packages(fam_name): family = package.parent self.assertEqual(family.name, fam_name) it = family.iter_packages() self.assertTrue(package in it)
def profiles(): """Return list of profiles This function is called asynchronously, and is suitable for making complex filesystem or database queries. Can also be a variable of type tuple or list """ # view: super set of profile `views` attributes # REZ_VIEW=show.ongoing;dev.pipeline; # User name based filtering # (TODO) But could be department name or anything else, # and implementing roles as packages. user_roles = {getpass.getuser().lower()} accessible_profiles = list() for pkg_family in iter_package_families(paths=__mongozark.profiles): latest_version = next(pkg_family.iter_packages()) required_roles = set(r.lower() for r in getattr(latest_version, "roles", [])) if required_roles.intersection(user_roles): accessible_profiles.append(pkg_family.name) return accessible_profiles
def reload(self): self.logger.info('Reloading..') package_repository_manager.clear_caches() family_names = list( set(f.name for f in packages.iter_package_families())) self.setRowCount(len(family_names)) family_names.sort(key=lambda x: x.lower()) for row, family_name in enumerate(family_names): self._make_row(row, family_name) self.logger.info(f'{len(family_names)} packages collected.')
def load_packages(): """Load all packages so loading time doesn't impact solve times """ print("Warming package cache...") fams = list(iter_package_families(paths=[pkg_repo_dir])) for i, fam in enumerate(fams): sys.stdout.write("\n[%d/%d]" % (i + 1, len(fams))) for pkg in fam.iter_packages(): pkg.validate_data() for var in pkg.iter_variants(): pass # just ensures variant objects are created and cached sys.stdout.write('.') sys.stdout.flush() print('')
def iter_families(self, location=None): """Iter package families Note that same family may get yielded multiple times since they exists in multiple locations, e.g. from 'install' and 'release'. :param location: One single package path to look for. Loop over all paths (`packages_path`) if not given. :type location: str or None :return: An iterator that yields `PkgFamily` objects :rtype: collections.Iterator[PkgFamily] """ paths = [location] if location else self._paths for family in iter_package_families(paths=paths): location = family.resource.location location = "{}@{}".format(family.repository.name(), location) # for repository type other than 'filesystem', e.g. 'memory' yield PkgFamily( name=family.name, location=location, )
def get_reverse_dependency_tree(package_name, depth=None, paths=None, build_requires=False, private_build_requires=False): """Find packages that depend on the given package. This is a reverse dependency lookup. A tree is constructed, showing what packages depend on the given package, with an optional depth limit. A resolve does not occur. Only the latest version of each package is used, and requirements from all variants of that package are used. Args: package_name (str): Name of the package depended on. depth (int): Tree depth limit, unlimited if None. paths (list of str): paths to search for packages, defaults to `config.packages_path`. build_requires (bool): If True, includes packages' build_requires. private_build_requires (bool): If True, include `package_name`'s private_build_requires. Returns: A 2-tuple: - (list of list of str): Lists of package names, where each list is a single depth in the tree. The first list is always [`package_name`]. - `pygraph.digraph` object, where nodes are package names, and `package_name` is always the leaf node. """ pkgs_list = [[package_name]] g = digraph() g.add_node(package_name) # build reverse lookup it = iter_package_families(paths) package_names = set(x.name for x in it) if package_name not in package_names: raise PackageFamilyNotFoundError("No such package family %r" % package_name) if depth == 0: return pkgs_list, g bar = ProgressBar("Searching", len(package_names)) lookup = defaultdict(set) for i, package_name_ in enumerate(package_names): it = iter_packages(name=package_name_, paths=paths) packages = list(it) if not packages: continue pkg = max(packages, key=lambda x: x.version) requires = [] for variant in pkg.iter_variants(): pbr = (private_build_requires and pkg.name == package_name) requires += variant.get_requires(build_requires=build_requires, private_build_requires=pbr) for req in requires: if not req.conflict: lookup[req.name].add(package_name_) bar.next() bar.finish() # perform traversal n = 0 consumed = set([package_name]) working_set = set([package_name]) node_color = "#F6F6F6" node_fontsize = 10 node_attrs = [("fillcolor", node_color), ("style", "filled"), ("fontsize", node_fontsize)] while working_set and (depth is None or n < depth): working_set_ = set() for child in working_set: parents = lookup[child] - consumed working_set_.update(parents) consumed.update(parents) for parent in parents: g.add_node(parent, attrs=node_attrs) g.add_edge((parent, child)) if working_set_: pkgs_list.append(sorted(list(working_set_))) working_set = working_set_ n += 1 return pkgs_list, g
def search(self, resources_request=None): """Search for resources. Args: resources_request (str): Resource to search, glob-style patterns are supported. If None, returns all matching resource types. Returns: 2-tuple: - str: resource type (family, package, variant); - List of `ResourceSearchResult`: Matching resources. Will be in alphabetical order if families, and version ascending for packages or variants. """ # Find matching package families name_pattern, version_range = self._parse_request(resources_request) family_names = set( x.name for x in iter_package_families(paths=self.package_paths) if fnmatch.fnmatch(x.name, name_pattern)) family_names = sorted(family_names) # determine what type of resource we're searching for if self.resource_type: resource_type = self.resource_type elif version_range or len(family_names) == 1: resource_type = "package" else: resource_type = "family" if not family_names: return resource_type, [] # return list of family names (validation is n/a in this case) if resource_type == "family": results = [ResourceSearchResult(x, "family") for x in family_names] return "family", results results = [] # iterate over packages/variants for name in family_names: it = iter_packages(name, version_range, paths=self.package_paths) packages = sorted(it, key=lambda x: x.version) if self.latest and packages: packages = [packages[-1]] for package in packages: # validate and check time (accessing timestamp may cause # validation fail) try: if package.timestamp: if self.after_time and package.timestamp < self.after_time: continue if self.before_time and package.timestamp >= self.before_time: continue if self.validate: package.validate_data() except ResourceContentError as e: if resource_type == "package": result = ResourceSearchResult(package, "package", str(e)) results.append(result) continue if resource_type == "package": result = ResourceSearchResult(package, "package") results.append(result) continue # iterate variants try: for variant in package.iter_variants(): if self.validate: try: variant.validate_data() except ResourceContentError as e: result = ResourceSearchResult( variant, "variant", str(e)) results.append(result) continue result = ResourceSearchResult(variant, "variant") results.append(result) except ResourceContentError: # this may happen if 'variants' in package is malformed continue return resource_type, results
def test_fam_iteration(self): """package family iteration.""" all_fams = _to_names(iter_package_families()) self.assertEqual(all_fams, ALL_FAMILIES)
def command(opts, parser, extra_arg_groups=None): from rez.config import config from rez.packages import iter_package_families, iter_packages from rez.utils.yaml import dump_yaml from rez.utils.memcached import Client from rez.utils.formatting import columnise, readable_time_duration, \ readable_memory_size import sys memcache_client = Client(servers=config.memcached_uri, debug=config.debug_memcache) if not memcache_client: print("memcaching is not enabled.", file=sys.stderr) sys.exit(1) if opts.poll: poll(memcache_client, opts.interval) return if opts.flush: memcache_client.flush(hard=True) print("memcached servers are flushed.") return if opts.warm: seen = set() paths = config.nonlocal_packages_path for family in iter_package_families(paths=paths): if family.name in seen: continue for package in iter_packages(family.name, paths=paths): if opts.verbose: print("warming: %s" % package.qualified_name) # forces package definition load, which puts in memcache _ = package.data # noqa seen.add(family.name) print("memcached servers are warmed.") return if opts.reset_stats: memcache_client.reset_stats() print("memcached servers are stat reset.") return def _fail(): print("memcached servers are not responding.", file=sys.stderr) sys.exit(1) stats = memcache_client.get_stats() if opts.stats: if stats: txt = dump_yaml(stats) print(txt) else: _fail() return # print stats summary if not stats: _fail() rows = [[ "CACHE SERVER", "UPTIME", "HITS", "MISSES", "HIT RATIO", "MEMORY", "USED" ], [ "------------", "------", "----", "------", "---------", "------", "----" ]] for server_id, stats_dict in stats: server_uri = server_id.split()[0] uptime = int(stats_dict.get("uptime", 0)) hits = int(stats_dict.get("get_hits", 0)) misses = int(stats_dict.get("get_misses", 0)) memory = int(stats_dict.get("limit_maxbytes", 0)) used = int(stats_dict.get("bytes", 0)) hit_ratio = float(hits) / max(hits + misses, 1) hit_percent = int(hit_ratio * 100.0) used_ratio = float(used) / max(memory, 1) used_percent = int(used_ratio * 100.0) row = (server_uri, readable_time_duration(uptime), str(hits), str(misses), "%d%%" % hit_percent, readable_memory_size(memory), "%s (%d%%)" % (readable_memory_size(used), used_percent)) rows.append(row) print('\n'.join(columnise(rows)))