def get_plugins(package_name, paths=None): """Find packages that are plugins of the given package. Args: package_name (str): Name of the package. paths (list of str): Paths to search for packages, defaults to `config.packages_path`. Returns: list of str: The packages that are plugins of the given package. """ pkg = get_latest_package(package_name, paths=paths, error=True) if not pkg.has_plugins: return [] it = iter_package_families(paths) package_names = set(x.name for x in it) bar = ProgressBar("Searching", len(package_names)) plugin_pkgs = [] for package_name_ in package_names: bar.next() if package_name_ == package_name: continue # not a plugin of itself plugin_pkg = get_latest_package(package_name_, paths=paths) if not plugin_pkg.plugin_for: continue for plugin_for in plugin_pkg.plugin_for: if plugin_for == pkg.name: plugin_pkgs.append(package_name_) bar.finish() return plugin_pkgs
def test_2(self): """package iteration.""" all_packages = set() all_fams = iter_package_families() for fam in all_fams: packages = _to_qnames(iter_packages(fam.name)) all_packages.update(packages) self.assertEqual(all_packages, ALL_PACKAGES) res = _to_qnames(iter_packages('nada')) self.assertEqual(res, set(['nada'])) res = _to_qnames(iter_packages('python')) self.assertEqual( res, set([ 'python-2.5.2', 'python-2.6.0', 'python-2.6.8', 'python-2.7.0' ])) res = _to_qnames(iter_packages('pydad', "<3")) self.assertEqual(res, set(['pydad-1', 'pydad-2'])) for fam_name in ALL_FAMILIES: for package in iter_packages(fam_name): family = package.parent self.assertEqual(family.name, fam_name) it = family.iter_packages() self.assertTrue(package in it)
def test_2(self): """package iteration.""" all_packages = set() all_fams = iter_package_families() for fam in all_fams: packages = _to_qnames(iter_packages(fam.name)) all_packages.update(packages) self.assertEqual(all_packages, ALL_PACKAGES) res = _to_qnames(iter_packages('nada')) self.assertEqual(res, set(['nada'])) res = _to_qnames(iter_packages('python')) self.assertEqual(res, set(['python-2.5.2', 'python-2.6.0', 'python-2.6.8', 'python-2.7.0'])) res = _to_qnames(iter_packages('pydad', "<3")) self.assertEqual(res, set(['pydad-1', 'pydad-2'])) for fam_name in ALL_FAMILIES: for package in iter_packages(fam_name): family = package.parent self.assertEqual(family.name, fam_name) it = family.iter_packages() self.assertTrue(package in it)
def iter_latest_packages(paths=None, packages=None): """Get one package from every Rez package family. Args: paths (list[str], optional): The directories to search for Rez package families, Default: :attr:`rez.config.config.packages_path`. packages (set[str], optional): If this parameter is given a value, any Rez package families that are discovered in `paths` will be filtered by the list of Rez package family names in this parameter. Yields: :class:`rez.packages_.Package`: The latest version of every package in the current environment. """ names = sorted( set(family.name for family in packages_.iter_package_families(paths=paths))) if packages: names = [name for name in names if name in packages] for name in names: package = packages_.get_latest_package(name, paths=paths) if not package: _LOGGER.warning( 'Package family "%s" was found but `get_latest_package` returned None.' "The package is probably damaged.", name, ) continue yield package
def test_1(self): """package family iteration.""" all_fams = _to_names(iter_package_families()) self.assertEqual(all_fams, ALL_FAMILIES)
def command(opts, parser, extra_arg_groups=None): from rez.config import config from rez.packages_ import iter_package_families, iter_packages from rez.utils.yaml import dump_yaml from rez.utils.memcached import Client from rez.utils.formatting import columnise, readable_time_duration, \ readable_memory_size import sys memcache_client = Client(servers=config.memcached_uri, debug=config.debug_memcache) if not memcache_client: print("memcaching is not enabled.", file=sys.stderr) sys.exit(1) if opts.poll: poll(memcache_client, opts.interval) return if opts.flush: memcache_client.flush(hard=True) print("memcached servers are flushed.") return if opts.warm: seen = set() paths = config.nonlocal_packages_path for family in iter_package_families(paths=paths): if family.name in seen: continue for package in iter_packages(family.name, paths=paths): if opts.verbose: print("warming: %s" % package.qualified_name) # forces package definition load, which puts in memcache _ = package.data # noqa seen.add(family.name) print("memcached servers are warmed.") return if opts.reset_stats: memcache_client.reset_stats() print("memcached servers are stat reset.") return def _fail(): print("memcached servers are not responding.", file=sys.stderr) sys.exit(1) stats = memcache_client.get_stats() if opts.stats: if stats: txt = dump_yaml(stats) print(txt) else: _fail() return # print stats summary if not stats: _fail() rows = [["CACHE SERVER", "UPTIME", "HITS", "MISSES", "HIT RATIO", "MEMORY", "USED"], ["------------", "------", "----", "------", "---------", "------", "----"]] for server_id, stats_dict in stats: server_uri = server_id.split()[0] uptime = int(stats_dict.get("uptime", 0)) hits = int(stats_dict.get("get_hits", 0)) misses = int(stats_dict.get("get_misses", 0)) memory = int(stats_dict.get("limit_maxbytes", 0)) used = int(stats_dict.get("bytes", 0)) hit_ratio = float(hits) / max(hits + misses, 1) hit_percent = int(hit_ratio * 100.0) used_ratio = float(used) / max(memory, 1) used_percent = int(used_ratio * 100.0) row = (server_uri, readable_time_duration(uptime), str(hits), str(misses), "%d%%" % hit_percent, readable_memory_size(memory), "%s (%d%%)" % (readable_memory_size(used), used_percent)) rows.append(row) print('\n'.join(columnise(rows)))
def get_reverse_dependency_tree(package_name, depth=None, paths=None, build_requires=False, private_build_requires=False): """Find packages that depend on the given package. This is a reverse dependency lookup. A tree is constructed, showing what packages depend on the given package, with an optional depth limit. A resolve does not occur. Only the latest version of each package is used, and requirements from all variants of that package are used. Args: package_name (str): Name of the package depended on. depth (int): Tree depth limit, unlimited if None. paths (list of str): paths to search for packages, defaults to `config.packages_path`. build_requires (bool): If True, includes packages' build_requires. private_build_requires (bool): If True, include `package_name`'s private_build_requires. Returns: A 2-tuple: - (list of list of str): Lists of package names, where each list is a single depth in the tree. The first list is always [`package_name`]. - `pygraph.digraph` object, where nodes are package names, and `package_name` is always the leaf node. """ pkgs_list = [[package_name]] g = digraph() g.add_node(package_name) # build reverse lookup it = iter_package_families(paths) package_names = set(x.name for x in it) if package_name not in package_names: raise PackageFamilyNotFoundError("No such package family %r" % package_name) if depth == 0: return pkgs_list, g bar = ProgressBar("Searching", len(package_names)) lookup = defaultdict(set) for i, package_name_ in enumerate(package_names): bar.next() packages = list(iter_packages(name=package_name_, paths=paths)) if not packages: continue pkg = max(packages, key=lambda x: x.version) requires = set(pkg.requires or []) for req_list in (pkg.variants or []): requires.update(req_list) for req in requires: if not req.conflict: lookup[req.name].add(package_name_) # perform traversal bar.finish() n = 0 consumed = set([package_name]) working_set = set([package_name]) node_color = "#F6F6F6" node_fontsize = 10 node_attrs = [("fillcolor", node_color), ("style", "filled"), ("fontsize", node_fontsize)] while working_set and (depth is None or n < depth): working_set_ = set() for child in working_set: parents = lookup[child] - consumed working_set_.update(parents) consumed.update(parents) for parent in parents: g.add_node(parent, attrs=node_attrs) g.add_edge((parent, child)) if working_set_: pkgs_list.append(sorted(list(working_set_))) working_set = working_set_ n += 1 return pkgs_list, g
def search(self, resources_request=None): """Search for resources. Args: resources_request (str): Resource to search, glob-style patterns are supported. If None, returns all matching resource types. Returns: 2-tuple: - str: resource type (family, package, variant); - List of `ResourceSearchResult`: Matching resources. Will be in alphabetical order if families, and version ascending for packages or variants. """ # Find matching package families name_pattern, version_range = self._parse_request(resources_request) family_names = set( x.name for x in iter_package_families(paths=self.package_paths) if fnmatch.fnmatch(x.name, name_pattern)) family_names = sorted(family_names) # determine what type of resource we're searching for if self.resource_type: resource_type = self.resource_type elif version_range or len(family_names) == 1: resource_type = "package" else: resource_type = "family" if not family_names: return resource_type, [] # return list of family names (validation is n/a in this case) if resource_type == "family": results = [ResourceSearchResult(x, "family") for x in family_names] return "family", results results = [] # iterate over packages/variants for name in family_names: it = iter_packages(name, version_range, paths=self.package_paths) packages = sorted(it, key=lambda x: x.version) if self.latest and packages: packages = [packages[-1]] for package in packages: # validate and check time (accessing timestamp may cause # validation fail) try: if package.timestamp: if self.after_time and package.timestamp < self.after_time: continue if self.before_time and package.timestamp >= self.before_time: continue if self.validate: package.validate_data() except ResourceContentError as e: if resource_type == "package": result = ResourceSearchResult(package, "package", str(e)) results.append(result) continue if resource_type == "package": result = ResourceSearchResult(package, "package") results.append(result) continue # iterate variants try: for variant in package.iter_variants(): if self.validate: try: variant.validate_data() except ResourceContentError as e: result = ResourceSearchResult( variant, "variant", str(e)) results.append(result) continue result = ResourceSearchResult(variant, "variant") results.append(result) except ResourceContentError: # this may happen if 'variants' in package is malformed continue return resource_type, results
def get_reverse_dependency_tree(package_name, depth=None, paths=None): """Find packages that depend on the given package. This is a reverse dependency lookup. A tree is constructed, showing what packages depend on the given package, with an optional depth limit. A resolve does not occur. Only the latest version of each package is used, and requirements from all variants of that package are used. Args: package_name (str): Name of the package depended on. depth (int): Tree depth limit, unlimited if None. paths (list of str): paths to search for packages, defaults to `config.packages_path`. Returns: A 2-tuple: - (list of list of str): Lists of package names, where each list is a single depth in the tree. The first list is always [`package_name`]. - `pygraph.digraph` object, where nodes are package names, and `package_name` is always the leaf node. """ pkgs_list = [[package_name]] g = digraph() g.add_node(package_name) # build reverse lookup it = iter_package_families(paths) package_names = set(x.name for x in it) if package_name not in package_names: raise PackageFamilyNotFoundError("No such package family %r" % package_name) if depth == 0: return pkgs_list, g bar = ProgressBar("Searching", len(package_names)) lookup = defaultdict(set) for i, package_name_ in enumerate(package_names): bar.next() it = iter_packages(name=package_name_, paths=paths) packages = list(it) if not packages: continue pkg = max(packages, key=lambda x: x.version) requires = set(pkg.requires or []) for req_list in pkg.variants or []: requires.update(req_list) for req in requires: if not req.conflict: lookup[req.name].add(package_name_) # perform traversal bar.finish() n = 0 consumed = set([package_name]) working_set = set([package_name]) node_color = "#F6F6F6" node_fontsize = 10 node_attrs = [("fillcolor", node_color), ("style", "filled"), ("fontsize", node_fontsize)] while working_set and (depth is None or n < depth): working_set_ = set() for child in working_set: parents = lookup[child] - consumed working_set_.update(parents) consumed.update(parents) for parent in parents: g.add_node(parent, attrs=node_attrs) g.add_edge((parent, child)) if working_set_: pkgs_list.append(sorted(list(working_set_))) working_set = working_set_ n += 1 return pkgs_list, g
def search(self, resources_request=None): """Search for resources. Args: resources_request (str): Resource to search, glob-style patterns are supported. If None, returns all matching resource types. Returns: 2-tuple: - str: resource type (family, package, variant); - List of `ResourceSearchResult`: Matching resources. Will be in alphabetical order if families, and version ascending for packages or variants. """ # Find matching package families name_pattern, version_range = self._parse_request(resources_request) family_names = set( x.name for x in iter_package_families(paths=self.package_paths) if fnmatch.fnmatch(x.name, name_pattern) ) family_names = sorted(family_names) # determine what type of resource we're searching for if self.resource_type: resource_type = self.resource_type elif version_range or len(family_names) == 1: resource_type = "package" else: resource_type = "family" if not family_names: return resource_type, [] # return list of family names (validation is n/a in this case) if resource_type == "family": results = [ResourceSearchResult(x, "family") for x in family_names] return "family", results results = [] # iterate over packages/variants for name in family_names: it = iter_packages(name, version_range, paths=self.package_paths) packages = sorted(it, key=lambda x: x.version) if self.latest and packages: packages = [packages[-1]] for package in packages: # validate and check time (accessing timestamp may cause # validation fail) try: if package.timestamp: if self.after_time and package.timestamp < self.after_time: continue if self.before_time and package.timestamp >= self.before_time: continue if self.validate: package.validate_data() except ResourceContentError as e: if resource_type == "package": result = ResourceSearchResult(package, "package", str(e)) results.append(result) continue if resource_type == "package": result = ResourceSearchResult(package, "package") results.append(result) continue # iterate variants try: for variant in package.iter_variants(): if self.validate: try: variant.validate_data() except ResourceContentError as e: result = ResourceSearchResult( variant, "variant", str(e)) results.append(result) continue result = ResourceSearchResult(variant, "variant") results.append(result) except ResourceContentError: # this may happen if 'variants' in package is malformed continue return resource_type, results
def command(opts, parser, extra_arg_groups=None): from rez.config import config from rez.exceptions import RezError from rez.utils.formatting import get_epoch_time_from_str, expand_abbreviations from rez.utils.logging_ import print_error from rez.packages_ import iter_package_families, iter_packages from rez.vendor.version.requirement import Requirement import os.path import fnmatch import sys error_class = None if opts.debug else RezError before_time = 0 after_time = 0 if opts.before: before_time = get_epoch_time_from_str(opts.before) if opts.after: after_time = get_epoch_time_from_str(opts.after) if after_time and before_time and (after_time >= before_time): parser.error("non-overlapping --before and --after") if opts.paths is None: pkg_paths = config.nonlocal_packages_path if opts.no_local else None else: pkg_paths = (opts.paths or "").split(os.pathsep) pkg_paths = [os.path.expanduser(x) for x in pkg_paths if x] name_pattern = opts.PKG or '*' version_range = None if opts.PKG: try: req = Requirement(opts.PKG) name_pattern = req.name if not req.range.is_any(): version_range = req.range except: pass type_ = opts.type if opts.errors or (type_ == "auto" and version_range): type_ = "package" # turn some of the nastier rez-1 warnings into errors config.override("error_package_name_mismatch", True) config.override("error_version_mismatch", True) config.override("error_nonstring_version", True) if opts.no_warnings: config.override("warn_none", True) # families found = False family_names = [] families = iter_package_families(paths=pkg_paths) if opts.sort: families = sorted(families, key=lambda x: x.name) for family in families: if family.name not in family_names and \ fnmatch.fnmatch(family.name, name_pattern): family_names.append(family.name) if type_ == "auto": type_ = "package" if family.name == name_pattern else "family" if type_ == "family": print family.name found = True def _handle(e): print_error(str(e)) def _print_resource(r): if opts.validate: try: r.validate_data() except error_class as e: _handle(e) return if opts.format: txt = expand_abbreviations(opts.format, fields) lines = txt.split("\\n") for line in lines: try: line_ = r.format(line) except error_class as e: _handle(e) break if opts.no_newlines: line_ = line_.replace('\n', "\\n") print line_ else: print r.qualified_name # packages/variants if type_ in ("package", "variant"): for name in family_names: packages = iter_packages(name, version_range, paths=pkg_paths) if opts.sort or opts.latest: packages = sorted(packages, key=lambda x: x.version) if opts.latest and packages: packages = [packages[-1]] for package in packages: if ((before_time or after_time) and package.timestamp and (before_time and package.timestamp >= before_time or after_time and package.timestamp <= after_time)): continue if opts.errors: try: package.validate_data() except error_class as e: _handle(e) found = True elif type_ == "package": _print_resource(package) found = True elif type_ == "variant": try: package.validate_data() except error_class as e: _handle(e) continue try: for variant in package.iter_variants(): _print_resource(variant) found = True except error_class as e: _handle(e) continue if not found: if opts.errors: print "no erroneous packages found" else: print "no matches found" sys.exit(1)
def command(opts, parser, extra_arg_groups=None): from rez.config import config from rez.packages_ import iter_package_families, iter_packages from rez.utils.yaml import dump_yaml from rez.utils.memcached import Client from rez.utils.formatting import columnise, readable_time_duration, \ readable_memory_size import sys memcache_client = Client(servers=config.memcached_uri, debug=config.debug_memcache) if not memcache_client: print >> sys.stderr, "memcaching is not enabled." sys.exit(1) if opts.poll: poll(memcache_client, opts.interval) return if opts.flush: memcache_client.flush(hard=True) print "memcached servers are flushed." return if opts.warm: seen = set() paths = config.nonlocal_packages_path for family in iter_package_families(paths=paths): if family.name in seen: continue for package in iter_packages(family.name, paths=paths): if opts.verbose: print("warming: %s" % package.qualified_name) # forces package definition load, which puts in memcache _ = package.data # noqa seen.add(family.name) print "memcached servers are warmed." return if opts.reset_stats: memcache_client.reset_stats() print "memcached servers are stat reset." return def _fail(): print >> sys.stderr, "memcached servers are not responding." sys.exit(1) stats = memcache_client.get_stats() if opts.stats: if stats: txt = dump_yaml(stats) print txt else: _fail() return # print stats summary if not stats: _fail() rows = [["CACHE SERVER", "UPTIME", "HITS", "MISSES", "HIT RATIO", "MEMORY", "USED"], ["------------", "------", "----", "------", "---------", "------", "----"]] for server_id, stats_dict in stats: server_uri = server_id.split()[0] uptime = int(stats_dict.get("uptime", 0)) hits = int(stats_dict.get("get_hits", 0)) misses = int(stats_dict.get("get_misses", 0)) memory = int(stats_dict.get("limit_maxbytes", 0)) used = int(stats_dict.get("bytes", 0)) hit_ratio = float(hits) / max(hits + misses, 1) hit_percent = int(hit_ratio * 100.0) used_ratio = float(used) / max(memory, 1) used_percent = int(used_ratio * 100.0) row = (server_uri, readable_time_duration(uptime), str(hits), str(misses), "%d%%" % hit_percent, readable_memory_size(memory), "%s (%d%%)" % (readable_memory_size(used), used_percent)) rows.append(row) print '\n'.join(columnise(rows))