def test_collect_specific(self): prs = {} for x in ("category", "package", "version", "iuse"): prs[x] = packages.PackageRestriction(x, values.AlwaysTrue) r = packages.AndRestriction(packages.OrRestriction(*prs.values()), packages.AlwaysTrue) for k, v in prs.iteritems(): self.assertEqual( list(util.collect_package_restrictions(r, attrs=[k])), [v]) r = packages.AndRestriction(packages.OrRestriction(*prs.values()), *prs.values()) for k, v in prs.iteritems(): self.assertEqual( list(util.collect_package_restrictions(r, attrs=[k])), [v] * 2)
def matches_finalize(targets, namespace): repos = multiplex.tree(*namespace.repos) # If current working dir is in a repo, build a path restriction; otherwise # match everything. if not targets: cwd = os.getcwd() if cwd in repos: targets = [cwd] else: return [] restrictions = [] for target in targets: try: restrictions.append(parserestrict.parse_match(target)) except parserestrict.ParseError as e: if os.path.exists(target): try: restrictions.append(repos.path_restrict(target)) except ValueError as e: argparser.error(e) else: argparser.error(e) if restrictions: return packages.OrRestriction(*restrictions) return []
def downgrade_resolver(vdbs, dbs, verify_vdb=True, nodeps=False, force_replace=False, resolver_cls=plan.merge_plan, **kwds): """ generate and configure a resolver for downgrading all processed nodes. :param vdbs: list of :obj:`pkgcore.repository.prototype.tree` instances that represents the livefs :param dbs: list of :obj:`pkgcore.repository.prototype.tree` instances representing sources of pkgs :param verify_vdb: should we stop resolving once we hit the vdb, or do full resolution? :return: :obj:`pkgcore.resolver.plan.merge_plan` instance """ restrict = packages.OrRestriction(*list( atom(f'>={x.cpvstr}') for x in chain.from_iterable(vdbs))) f = partial(plan.merge_plan.prefer_downgrade_version_strategy, restrict) dbs = list(map(partial(misc.restrict_repo, restrict), dbs)) # hack. if nodeps: vdbs = list(map(misc.nodeps_repo, vdbs)) dbs = list(map(misc.nodeps_repo, dbs)) elif not verify_vdb: vdbs = list(map(misc.nodeps_repo, vdbs)) dbs = list(dbs) if force_replace: resolver_cls = generate_replace_resolver_kls(resolver_cls) return resolver_cls(dbs + vdbs, plan.pkg_sort_highest, f, **kwds)
def pkg_upgrade(_value, namespace): pkgs = [] for pkg in namespace.domain.all_livefs_repos: matches = sorted(namespace.domain.all_repos.match(pkg.slotted_atom)) if matches and matches[-1] != pkg: pkgs.append(matches[-1].versioned_atom) return packages.OrRestriction(*pkgs)
def generate_intersects_from_pkg_node(self, pkg_node, tag=None): arch = pkg_node.get("arch") if arch is not None: arch = str(arch.strip()).split() if not arch or "*" in arch: arch = None vuln = list(pkg_node.findall("vulnerable")) if not vuln: return None elif len(vuln) > 1: vuln_list = [self.generate_restrict_from_range(x) for x in vuln] vuln = packages.OrRestriction(*vuln_list) else: vuln_list = [self.generate_restrict_from_range(vuln[0])] vuln = vuln_list[0] if arch is not None: vuln = packages.AndRestriction( vuln, packages.PackageRestriction( "keywords", values.ContainmentMatch(all=False, *arch))) invuln = (pkg_node.findall("unaffected")) if not invuln: # wrap it. return packages.KeyedAndRestriction(vuln, tag=tag) invuln_list = [ self.generate_restrict_from_range(x, negate=True) for x in invuln ] invuln = [x for x in invuln_list if x not in vuln_list] if not invuln: if tag is None: return packages.KeyedAndRestriction(vuln, tag=tag) return packages.KeyedAndRestriction(vuln, tag=tag) return packages.KeyedAndRestriction(vuln, tag=tag, *invuln)
def test_collect_all(self): prs = [packages.PackageRestriction("category", values.AlwaysTrue)] * 10 self.assertEqual( list(util.collect_package_restrictions(packages.AndRestriction( packages.OrRestriction(), packages.AndRestriction(), *prs))), prs)
class AcctCheck(Check): """Various checks for acct-* packages. Verify that acct-* packages do not use conflicting, invalid or out-of-range UIDs/GIDs. """ scope = base.repo_scope _restricted_source = (sources.RestrictionRepoSource, (packages.OrRestriction(*( restricts.CategoryDep('acct-user'), restricts.CategoryDep('acct-group'))),)) _source = (sources.RepositoryRepoSource, (), (('source', _restricted_source),)) known_results = frozenset([ MissingAccountIdentifier, ConflictingAccountIdentifiers, OutsideRangeAccountIdentifier, ]) def __init__(self, *args): super().__init__(*args) self.id_re = re.compile( r'ACCT_(?P<var>USER|GROUP)_ID=(?P<quot>[\'"]?)(?P<id>[0-9]+)(?P=quot)') self.seen_uids = defaultdict(partial(defaultdict, list)) self.seen_gids = defaultdict(partial(defaultdict, list)) self.category_map = { 'acct-user': (self.seen_uids, 'USER', (65534,)), 'acct-group': (self.seen_gids, 'GROUP', (65533, 65534)), } def feed(self, pkg): try: seen_id_map, expected_var, extra_allowed_ids = self.category_map[pkg.category] except KeyError: return for l in pkg.ebuild.text_fileobj(): m = self.id_re.match(l) if m is not None: if m.group('var') == expected_var: found_id = int(m.group('id')) break else: yield MissingAccountIdentifier(f"ACCT_{expected_var}_ID", pkg=pkg) return # all UIDs/GIDs must be in <500, with special exception # of nobody/nogroup which use 65534/65533 if found_id >= 500 and found_id not in extra_allowed_ids: yield OutsideRangeAccountIdentifier(expected_var.lower(), found_id, pkg=pkg) return seen_id_map[found_id][pkg.key].append(pkg) def finish(self): # report overlapping ID usage for seen, expected_var, _ids in self.category_map.values(): for found_id, pkgs in sorted(seen.items()): if len(pkgs) > 1: pkgs = (x.cpvstr for x in sorted(chain.from_iterable(pkgs.values()))) yield ConflictingAccountIdentifiers(expected_var.lower(), found_id, pkgs)
def check_args(cls, parser, namespace): if namespace.commits: if namespace.targets: targets = ' '.join(namespace.targets) s = pluralism(namespace.targets) parser.error( f'--commits is mutually exclusive with target{s}: {targets}' ) ref = namespace.commits repo = namespace.target_repo targets = list(repo.category_dirs) if os.path.isdir(pjoin(repo.location, 'eclass')): targets.append('eclass') try: p = subprocess.run( ['git', 'diff', '--cached', ref, '--name-only'] + targets, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=repo.location, encoding='utf8') except FileNotFoundError: parser.error( 'git not available to determine targets for --commits') if p.returncode != 0: error = p.stderr.splitlines()[0] parser.error(f'failed running git: {error}') elif not p.stdout: # no changes exist, exit early parser.exit() pkgs, eclasses = partition( p.stdout.splitlines(), predicate=lambda x: x.startswith('eclass/')) pkgs = sorted(cls._pkg_atoms(pkgs)) eclasses = filter(None, (eclass_regex.match(x) for x in eclasses)) eclasses = sorted(x.group('eclass') for x in eclasses) restrictions = [] if pkgs: restrict = packages.OrRestriction(*pkgs) restrictions.append((base.package_scope, restrict)) if eclasses: func = partial(cls._committed_eclass, frozenset(eclasses)) restrict = values.AnyMatch(values.FunctionRestriction(func)) restrictions.append((base.eclass_scope, restrict)) # no pkgs or eclasses to check, exit early if not restrictions: parser.exit() namespace.contexts.append(GitStash(parser, repo)) namespace.restrictions = restrictions
def parse_revdep(value): """Value should be an atom, packages with deps intersecting that match.""" try: targetatom = atom.atom(value) except atom.MalformedAtom as e: raise argparser.error(e) val_restrict = values.FlatteningRestriction( atom.atom, values.AnyMatch(values.FunctionRestriction(targetatom.intersects))) return packages.OrRestriction(*list( packages.PackageRestriction(dep, val_restrict) for dep in ('bdepend', 'depend', 'rdepend', 'pdepend')))
def generate_filter(masks, unmasks, *extra): # note that we ignore unmasking if masking isn't specified. # no point, mainly masking = make_mask_filter(masks, negate=True) unmasking = make_mask_filter(unmasks, negate=False) r = () if masking: if unmasking: r = (packages.OrRestriction(masking, unmasking, disable_inst_caching=True),) else: r = (masking,) return packages.AndRestriction(disable_inst_caching=True, finalize=True, *(r + extra))
def test_commits_existing(self): output = [ 'dev-libs/foo/metadata.xml\n', 'media-libs/bar/bar-0.ebuild\n', ] with patch('pkgcheck.git.spawn_get_output') as spawn_get_output: spawn_get_output.return_value = (0, output) options, _func = self.tool.parse_args(self.args + ['--commits']) restrictions = [ atom.atom('dev-libs/foo'), atom.atom('media-libs/bar') ] assert options.restrictions == \ [(base.package_scope, packages.OrRestriction(*restrictions))]
def test_commits_existing(self): output = [ 'dev-libs/foo/metadata.xml\n', 'media-libs/bar/bar-0.ebuild\n', ] with patch('subprocess.run') as git_diff: git_diff.return_value.returncode = 0 git_diff.return_value.stdout = ''.join(output) options, _func = self.tool.parse_args(self.args + ['--commits']) restrictions = [ atom.atom('dev-libs/foo'), atom.atom('media-libs/bar') ] assert options.restrictions == \ [(base.package_scope, packages.OrRestriction(*restrictions))]
def generate_filter(self, masking, unmasking, *extra): # note that we ignore unmasking if masking isn't specified. # no point, mainly r = () if masking: if unmasking: r = (packages.OrRestriction(masking, unmasking, disable_inst_caching=True), ) else: r = (masking, ) vfilter = packages.AndRestriction(disable_inst_caching=True, finalize=True, *(r + extra)) return vfilter
def pkg_grouped_iter(self, sorter=None): """yield GLSA restrictions grouped by package key :param sorter: must be either None, or a comparison function """ if sorter is None: sorter = iter pkgs = {} pkgatoms = {} for glsa, pkg, pkgatom, vuln in self.iter_vulnerabilities(): pkgatoms[pkg] = pkgatom pkgs.setdefault(pkg, []).append(vuln) for pkgname in sorter(pkgs): yield packages.KeyedAndRestriction( pkgatoms[pkgname], packages.OrRestriction(*pkgs[pkgname]), key=pkgname)
def _manifest_validate(parser, namespace): targets = namespace.target if namespace.target else [namespace.cwd] restrictions = [] for target in targets: if os.path.exists(target): try: restrictions.append(namespace.repo.path_restrict(target)) except ValueError as e: manifest.error(e) else: try: restrictions.append(parse_match(target)) except ValueError: manifest.error(f'invalid atom: {target!r}') namespace.restriction = packages.OrRestriction(*restrictions)
def matches_finalize(targets, namespace): if not targets: return [] repos = multiplex.tree(*namespace.repos) restrictions = [] for target in targets: try: restrictions.append(parserestrict.parse_match(target)) except parserestrict.ParseError as e: if os.path.exists(target): try: restrictions.append(repos.path_restrict(target)) except ValueError as e: argparser.error(e) else: argparser.error(e) if restrictions: return packages.OrRestriction(*restrictions) return []
def _commit(options, out, err): repo = options.repo git_add_files = [] # determine changes from staged files changes = determine_changes(options) _untracked_ebuild_re = re.compile(r'^\?\? (?P<category>[^/]+)/[^/]+/(?P<package>[^/]+)\.ebuild$') # update manifests for existing packages if atoms := {x.atom.unversioned_atom for x in changes.ebuild_changes}: if pkgs := {x.versioned_atom for x in repo.itermatch(packages.OrRestriction(*atoms))}: # pull all matches and drop untracked ebuilds p = git.run( 'status', '--porcelain=v1', '-u', '-z', "*.ebuild", cwd=repo.location, stdout=subprocess.PIPE) for path in p.stdout.strip('\x00').split('\x00'): if mo := _untracked_ebuild_re.match(path): try: untracked = atom_cls(f"={mo.group('category')}/{mo.group('package')}") pkgs.discard(untracked) except MalformedAtom: continue
def check_args(cls, parser, namespace): if namespace.commits: if namespace.targets: targets = ' '.join(namespace.targets) parser.error('--commits is mutually exclusive with ' f'target{_pl(namespace.targets)}: {targets}') repo = namespace.target_repo ret, out = spawn_get_output( ['git', 'diff', 'origin', '--name-only'] + list(repo.categories), cwd=repo.location) if ret != 0: parser.error( 'git not available to determine targets for --commits') elif not out: # no pkg changes exist parser.exit() pkgs = sorted( atom_cls(os.sep.join(x.split(os.sep, 2)[:2])) for x in out) combined_restrict = packages.OrRestriction(*pkgs) namespace.restrictions = [(base.package_scope, combined_restrict)]
def _digest_validate(parser, namespace): repo = namespace.repo targets = namespace.target restrictions = [] if repo is not None: if not targets: restrictions.append(repo.path_restrict(repo.location)) else: # if we're currently in a known ebuild repo use it, otherwise use all ebuild repos cwd = os.getcwd() repo = namespace.domain.ebuild_repos_raw.repo_match(cwd) if repo is None: repo = namespace.domain.all_ebuild_repos_raw if not targets: try: restrictions.append(repo.path_restrict(cwd)) except ValueError: # we're not in a configured repo so manifest everything restrictions.extend(repo.path_restrict(x.location) for x in repo.trees) if not repo.operations.supports("digests"): digest.error("no repository support for digests") for target in targets: if os.path.exists(target): try: restrictions.append(repo.path_restrict(target)) except ValueError as e: digest.error(e) else: try: restrictions.append(parse_match(target)) except ValueError: digest.error(f"invalid atom: {target!r}") restriction = packages.OrRestriction(*restrictions) namespace.restriction = restriction namespace.repo = repo
def test_identify_candidates(self): with pytest.raises(TypeError): self.repo.match("asdf") rc = packages.PackageRestriction("category", values.StrExactMatch("dev-util")) assert \ sorted(set(x.package for x in self.repo.itermatch(rc))) == \ sorted(["diffball", "bsdiff"]) rp = packages.PackageRestriction("package", values.StrExactMatch("diffball")) assert list( x.version for x in self.repo.itermatch(rp, sorter=sorted)) == ["0.7", "1.0"] assert \ self.repo.match(packages.OrRestriction(rc, rp), sorter=sorted) == \ sorted(VersionedCPV(x) for x in ( "dev-util/diffball-0.7", "dev-util/diffball-1.0", "dev-util/bsdiff-0.4.1", "dev-util/bsdiff-0.4.2")) assert \ sorted(self.repo.itermatch(packages.AndRestriction(rc, rp))) == \ sorted(VersionedCPV(x) for x in ( "dev-util/diffball-0.7", "dev-util/diffball-1.0")) assert sorted(self.repo) == self.repo.match(packages.AlwaysTrue, sorter=sorted) # mix/match cat/pkg to check that it handles that corner case # properly for sorting. assert \ sorted(self.repo, reverse=True) == \ self.repo.match(packages.OrRestriction( rc, rp, packages.AlwaysTrue), sorter=partial(sorted, reverse=True)) rc2 = packages.PackageRestriction("category", values.StrExactMatch("dev-lib")) assert sorted(self.repo.itermatch(packages.AndRestriction(rp, rc2))) == [] # note this mixes a category level match, and a pkg level # match. they *must* be treated as an or. assert \ sorted(self.repo.itermatch(packages.OrRestriction(rp, rc2))) == \ sorted(VersionedCPV(x) for x in ( "dev-util/diffball-0.7", "dev-util/diffball-1.0", "dev-lib/fake-1.0", "dev-lib/fake-1.0-r1")) # this is similar to the test above, but mixes a cat/pkg # candidate with a pkg candidate rp2 = packages.PackageRestriction("package", values.StrExactMatch("fake")) r = packages.OrRestriction(atom("dev-util/diffball"), rp2) assert \ sorted(self.repo.itermatch(r)) == \ sorted(VersionedCPV(x) for x in ( "dev-util/diffball-0.7", "dev-util/diffball-1.0", "dev-lib/fake-1.0", "dev-lib/fake-1.0-r1")) assert \ sorted(self.repo.itermatch( packages.OrRestriction(packages.AlwaysTrue, rp2))) == \ sorted(VersionedCPV(x) for x in ( "dev-util/diffball-0.7", "dev-util/diffball-1.0", "dev-util/bsdiff-0.4.1", "dev-util/bsdiff-0.4.2", "dev-lib/fake-1.0", "dev-lib/fake-1.0-r1")) assert \ sorted(self.repo.itermatch(packages.PackageRestriction( 'category', values.StrExactMatch('dev-util', negate=True)))) == \ sorted(VersionedCPV(x) for x in ("dev-lib/fake-1.0", "dev-lib/fake-1.0-r1")) obj = malleable_obj(livefs=False) pkg_cls = post_curry(MutatedPkg, {'repo': obj}) assert \ sorted(self.repo.itermatch(boolean.AndRestriction(boolean.OrRestriction( packages.PackageRestriction( "repo.livefs", values.EqualityMatch(False)), packages.PackageRestriction( "category", values.StrExactMatch("virtual"))), atom("dev-lib/fake")), pkg_cls=pkg_cls)) == \ sorted(VersionedCPV(x) for x in ( "dev-lib/fake-1.0", "dev-lib/fake-1.0-r1")) assert \ sorted(self.repo.itermatch(packages.PackageRestriction( 'category', values.StrExactMatch('dev-lib', negate=True), negate=True))) == \ sorted(VersionedCPV(x) for x in ( "dev-lib/fake-1.0", "dev-lib/fake-1.0-r1")) assert \ sorted(self.repo.itermatch(packages.PackageRestriction( 'category', values.StrExactMatch('dev-lib', negate=True), negate=True))) == \ sorted(VersionedCPV(x) for x in ( "dev-lib/fake-1.0", "dev-lib/fake-1.0-r1"))
if pkgs := {x.versioned_atom for x in repo.itermatch(packages.OrRestriction(*atoms))}: # pull all matches and drop untracked ebuilds p = git.run( 'status', '--porcelain=v1', '-u', '-z', "*.ebuild", cwd=repo.location, stdout=subprocess.PIPE) for path in p.stdout.strip('\x00').split('\x00'): if mo := _untracked_ebuild_re.match(path): try: untracked = atom_cls(f"={mo.group('category')}/{mo.group('package')}") pkgs.discard(untracked) except MalformedAtom: continue # manifest all staged or committed packages failed = repo.operations.manifest( options.domain, packages.OrRestriction(*pkgs), observer=observer_mod.formatter_output(out)) if any(failed): return 1 # include existing Manifest files for staging manifests = (pjoin(repo.location, f'{x.key}/Manifest') for x in atoms) git_add_files.extend(filter(os.path.exists, manifests)) # mangle files if options.mangle: # don't mangle FILESDIR content skip_regex = re.compile(rf'^{repo.location}/[^/]+/[^/]+/files/.+$') mangler = GentooMangler if options.gentoo_repo else Mangler paths = (pjoin(repo.location, x) for x in changes.paths) git_add_files.extend(mangler(paths, skip_regex=skip_regex))
dbs = list(map(misc.nodeps_repo, dbs)) elif not verify_vdb: vdbs = list(map(misc.nodeps_repo, vdbs)) dbs = list(dbs) if force_replace: resolver_cls = generate_replace_resolver_kls(resolver_cls) return resolver_cls(vdbs + dbs, plan.pkg_sort_highest, plan.merge_plan.prefer_reuse_strategy, **kwds) _vdb_restrict = packages.OrRestriction( packages.PackageRestriction("repo.livefs", values.EqualityMatch(False)), packages.AndRestriction( packages.PackageRestriction("category", values.StrExactMatch("virtual")), packages.PackageRestriction("package_is_real", values.EqualityMatch(False)), ), ) class empty_tree_merge_plan(plan.merge_plan): _vdb_restriction = _vdb_restrict def __init__(self, dbs, *args, **kwds): """ :param args: see :obj:`pkgcore.resolver.plan.merge_plan.__init__` for valid args :param kwds: see :obj:`pkgcore.resolver.plan.merge_plan.__init__`
def masked(self): """Base package mask restriction.""" return packages.OrRestriction(*self.pkg_masks)
def parse_description(value): """Value is used as a regexp matching description or longdescription.""" matcher = values.StrRegex(value, case_sensitive=False) return packages.OrRestriction(*list( packages.PackageRestriction(attr, matcher) for attr in ('description', 'longdescription')))
def masked(self): return packages.OrRestriction(*self.masks)
def deprecated(self): """Base deprecated packages restriction from profiles/package.deprecated.""" return packages.OrRestriction(*self.config.pkg_deprecated)