def test_filtering(self): repo, vrepo = self.setup_repos() a = atom("dev-lib/fake") a2 = atom("dev-util/diffball") self.assertEqual( sorted(vrepo.itermatch(a)), sorted(repo.itermatch(a))) self.assertEqual(sorted(vrepo.itermatch(a2)), sorted([])) repo, vrepo = self.setup_repos(atom("=dev-util/diffball-1.0")) self.assertEqual( sorted(vrepo.itermatch(a)), sorted(repo.itermatch(a))) self.assertEqual( sorted(vrepo.itermatch(a2)), sorted([versioned_CPV("dev-util/diffball-0.7")])) repo, vrepo = self.setup_repos(packages.PackageRestriction( "package", values.OrRestriction( *[values.StrExactMatch(x) for x in ("diffball", "fake")]))) self.assertEqual( sorted(vrepo.itermatch(packages.AlwaysTrue)), sorted(repo.itermatch(atom("dev-util/bsdiff")))) # check sentinel value handling. vrepo = filtered.tree(repo, a2, sentinel_val=True) self.assertEqual( sorted(x.cpvstr for x in vrepo), sorted(['dev-util/diffball-0.7', 'dev-util/diffball-1.0']))
def __init__(self, masked_use={}, stable_masked_use={}, forced_use={}, stable_forced_use={}, provides={}, iuse_effective=[], masks=[], unmasks=[], arch='x86', name='none'): self.provides_repo = SimpleTree(provides) self.masked_use = ChunkedDataDict() self.masked_use.update_from_stream( chunked_data(atom(k), *split_negations(v)) for k, v in masked_use.iteritems()) self.masked_use.freeze() self.stable_masked_use = ChunkedDataDict() self.stable_masked_use.update_from_stream( chunked_data(atom(k), *split_negations(v)) for k, v in stable_masked_use.iteritems()) self.stable_masked_use.freeze() self.forced_use = ChunkedDataDict() self.forced_use.update_from_stream( chunked_data(atom(k), *split_negations(v)) for k, v in forced_use.iteritems()) self.forced_use.freeze() self.stable_forced_use = ChunkedDataDict() self.stable_forced_use.update_from_stream( chunked_data(atom(k), *split_negations(v)) for k, v in stable_forced_use.iteritems()) self.stable_forced_use.freeze() self.masks = tuple(map(atom, masks)) self.unmasks = tuple(map(atom, unmasks)) self.iuse_effective = tuple(iuse_effective) self.arch = arch self.name = name
def test_pkg_use(self): path = pjoin(self.dir, self.profile) self.assertEqualChunks(self.klass(path).pkg_use, {}) self.parsing_checks("package.use", "pkg_use") self.write_file("package.use", "dev-util/bar X") self.assertEqualChunks( self.klass(path).pkg_use, {"dev-util/bar": (chunked_data(atom("dev-util/bar"), (), ("X",)),)} ) self.write_file("package.use", "-dev-util/bar X") self.assertRaises(profiles.ProfileError, getattr, self.klass(path), "pkg_use") self._check_package_use_files(path, "package.use", "pkg_use") self.write_file("package.use", "dev-util/bar -X\ndev-util/foo X") self.assertEqualChunks( self.klass(path).pkg_use, { "dev-util/bar": (chunked_data(atom("dev-util/bar"), ("X",), ()),), "dev-util/foo": (chunked_data(atom("dev-util/foo"), (), ("X",)),), }, ) self.simple_eapi_awareness_check( "package.use", "pkg_use", bad_data="=de/bs-1:1 x\nda/bs y", good_data="=de/bs-1 x\nda/bs y" ) self.write_file("package.use", "dev-util/diffball") self.assertRaises(profiles.ProfileError, getattr, self.klass(path), "pkg_use")
def test_categories_packages(self): ensure_dirs(pjoin(self.dir, 'cat', 'pkg')) ensure_dirs(pjoin(self.dir, 'empty', 'empty')) ensure_dirs(pjoin(self.dir, 'scripts', 'pkg')) ensure_dirs(pjoin(self.dir, 'notcat', 'CVS')) touch(pjoin(self.dir, 'cat', 'pkg', 'pkg-3.ebuild')) repo = self.mk_tree(self.dir) self.assertEqual( {'cat': (), 'notcat': (), 'empty': ()}, dict(repo.categories)) self.assertEqual( {'cat': ('pkg',), 'empty': ('empty',), 'notcat': ()}, dict(repo.packages)) self.assertEqual( {('cat', 'pkg'): ('3',), ('empty', 'empty'): ()}, dict(repo.versions)) for x in ("1-scm", "scm", "1-try", "1_beta-scm", "1_beta-try"): for rev in ("", "-r1"): fp = pjoin(self.dir, 'cat', 'pkg', 'pkg-%s%s.ebuild' % (x, rev)) open(fp, 'w').close() repo = self.mk_tree(self.dir) self.assertRaises(ebuild_errors.InvalidCPV, repo.match, atom('cat/pkg')) repo = self.mk_tree(self.dir, ignore_paludis_versioning=True) self.assertEqual(sorted(x.cpvstr for x in repo.itermatch(atom('cat/pkg'))), ['cat/pkg-3']) os.unlink(fp)
def _process_update(sequence, filename, mods, moved): for lineno, raw_line in enumerate(sequence): line = raw_line.split() if line[0] == 'move': if len(line) != 3: logger.error( 'file %r: %r on line %s: bad move form', filename, raw_line, lineno + 1) continue src, trg = atom(line[1]), atom(line[2]) if src.fullver is not None: logger.error( "file %r: %r on line %s: atom %s must be versionless", filename, raw_line, lineno + 1, src) continue elif trg.fullver is not None: logger.error( "file %r: %r on line %s: atom %s must be versionless", filename, raw_line, lineno + 1, trg) continue if src.key in moved: logger.warning( "file %r: %r on line %s: %s was already moved to %s," " this line is redundant", filename, raw_line, lineno + 1, src, moved[src.key]) continue d = deque() mods[src.key][1].extend([('move', src, trg), d]) # start essentially a new checkpoint in the trg mods[trg.key][1].append(d) mods[trg.key][1] = d moved[src.key] = trg elif line[0] == 'slotmove': if len(line) != 4: logger.error( 'file %r: %r on line %s: bad slotmove form', filename, raw_line, lineno + 1) continue src = atom(line[1]) if src.key in moved: logger.warning( "file %r: %r on line %s: %s was already moved to %s, " "this line is redundant", filename, raw_line, lineno + 1, src, moved[src.key]) continue elif src.slot is not None: logger.error( "file %r: %r on line %s: slotted atom makes no sense " "for slotmoves", filename, lineno + 1, raw_line) continue src_slot = atom(f'{src}:{line[2]}') trg_slot = atom(f'{src.key}:{line[3]}') mods[src.key][1].append(('slotmove', src_slot, line[3]))
def test_add(self): s = self.gen_pkgset("dev-util/diffball\n=dev-util/bsdiff-0.4") s.add(atom("dev-util/foon")) s.add(atom("=dev-util/lib-1")) s.flush() self.assertEqual(map(atom, (x.strip() for x in open(self.fn))), sorted(map(atom, ("dev-util/diffball", "=dev-util/bsdiff-0.4", "dev-util/foon", "=dev-util/lib-1"))))
def test_add(self): s = self.gen_pkgset("dev-util/bsdiff") s.add(atom("dev-util/foon")) s.add(atom("=dev-util/lib-1")) s.add(atom("dev-util/mylib:2,3")) s.flush() self.assertEqual(sorted(x.strip() for x in open(self.fn)), sorted(("dev-util/bsdiff", "dev-util/foon", "dev-util/lib", "dev-util/mylib:2", "dev-util/mylib:3")))
def test_masks(self): path = pjoin(self.dir, self.profile) self.assertEqual(self.klass(path).masks, empty) self.parsing_checks("package.mask", "masks") self.write_file("package.mask", "dev-util/diffball") self.assertEqual(self.klass(path).masks, ((), (atom("dev-util/diffball"),))) self.write_file("package.mask", "-dev-util/diffball") self.assertEqual(self.klass(path).masks, ((atom("dev-util/diffball"),), ())) self.simple_eapi_awareness_check("package.mask", "masks")
def render_to_payload(self): d = PayloadDict() d = dict((atom.atom(k), _build_cp_atom_payload(v, atom.atom(k), True)) for k,v in self._dict.iteritems()) if self._global_settings: data = _build_cp_atom_payload(self._global_settings, packages.AlwaysTrue, payload_form=True) d[packages.AlwaysTrue] = tuple(data) return d
def test_concurrent_access(self): iall = iter(self.repo) self.repo.match(atom("dev-lib/fake")) pkg = iall.next() if pkg.category == 'dev-util': self.repo.match(atom("dev-lib/fake")) else: self.repo.match(atom("dev-util/diffball")) # should not explode... list(iall)
def test_package_mask(self): open(pjoin(self.pdir, 'package.mask'), 'w').write('''\ # lalala it-is/broken <just/newer-than-42 ''') repo = self.mk_tree(self.dir) self.assertEqual(sorted([atom('it-is/broken'), atom('<just/newer-than-42')]), sorted(repo.default_visibility_limiters))
def _modify(self, atom_inst, func): if atom_inst.slot: for slot in atom_inst.slot: if slot == '0': new_atom_inst = atom(atom_inst.key) else: new_atom_inst = atom(atom_inst.key + ":" + slot) func(self, new_atom_inst) else: atom_inst = atom(atom_inst.key) func(self, atom_inst)
def test_packages(self): self.mk_profiles( {"packages": "*dev-util/diffball\ndev-util/foo\ndev-util/foo2\n"}, {"packages": "*dev-util/foo\n-*dev-util/diffball\n-dev-util/foo2\n"}, ) p = self.get_profile("0") self.assertEqual(sorted(p.system), sorted([atom("dev-util/diffball")])) self.assertEqual(sorted(p.masks), sorted(atom("dev-util/foo%s" % x, negate_vers=True) for x in ["", "2"])) p = self.get_profile("1") self.assertEqual(sorted(p.system), sorted([atom("dev-util/foo")])) self.assertEqual(sorted(p.masks), [atom("dev-util/foo", negate_vers=True)])
def test_masks(self): self.mk_profiles( {"package.mask": "dev-util/foo"}, {}, {"package.mask": "-dev-util/confcache\ndev-util/foo"}, **{"package.mask": "dev-util/confcache"} ) self.assertEqual( sorted(self.get_profile("0").masks), sorted(atom("dev-util/" + x) for x in ["confcache", "foo"]) ) self.assertEqual( sorted(self.get_profile("1").masks), sorted(atom("dev-util/" + x) for x in ["confcache", "foo"]) ) self.assertEqual(sorted(self.get_profile("2").masks), [atom("dev-util/foo")])
def test_forced_use(self): path = pjoin(self.dir, self.profile) self.assertEqualChunks(self.klass(path).forced_use, {}) self.parsing_checks("package.use.force", "forced_use") self.wipe_path(pjoin(path, "package.use.force")) self.parsing_checks("use.force", "forced_use") self.write_file("use.force", "") self._check_package_use_files(path, "package.use.force", "forced_use") self.write_file("use.force", "mmx") self.assertEqualChunks( self.klass(path).forced_use, { "dev-util/bar": (chunked_data(atom("dev-util/bar"), ("X",), ("mmx",)),), "dev-util/foo": (chunked_data(atom("dev-util/foo"), (), ("X", "mmx")),), atrue: (chunked_data(atrue, (), ("mmx",)),), }, ) self.write_file("use.force", "mmx\n-foon") self.assertEqualChunks( self.klass(path).forced_use, { "dev-util/bar": (chunked_data(atom("dev-util/bar"), ("X", "foon"), ("mmx",)),), "dev-util/foo": (chunked_data(atom("dev-util/foo"), ("foon",), ("X", "mmx")),), atrue: (chunked_data(atrue, ("foon",), ("mmx",)),), }, ) # verify that use.force is layered first, then package.use.force self.write_file("package.use.force", "dev-util/bar -mmx foon") p = self.klass(path) self.assertEqualChunks( self.klass(path).forced_use, { atrue: (chunked_data(atrue, ("foon",), ("mmx",)),), "dev-util/bar": (chunked_data(atom("dev-util/bar"), ("mmx",), ("foon",)),), }, ) self.write_file("package.use.force", "") self.assertEqualChunks(self.klass(path).forced_use, {atrue: (chunked_data(atrue, ("foon",), ("mmx",)),)}) self.simple_eapi_awareness_check( "package.use.force", "forced_use", bad_data="=de/bs-1:1 x\nda/bs y", good_data="=de/bs-1 x\nda/bs y" ) self.write_file("package.use.force", "dev-util/diffball") self.assertRaises(profiles.ProfileError, getattr, self.klass(path), "forced_use")
def _process_update(sequence, filename, mods, moved): for raw_line in sequence: line = raw_line.split() if line[0] == "move": if len(line) != 3: raise ValueError("move line %r isn't of proper form" % (raw_line,)) src, trg = atom(line[1]), atom(line[2]) if src.fullver is not None: raise ValueError("file %r, line %r; atom %s must be versionless" % (filename, raw_line, src)) elif trg.fullver is not None: raise ValueError("file %r, line %r; atom %s must be versionless" % (filename, raw_line, trg)) if src.key in moved: logger.warning( "file %r, line %r: %s was already moved to %s," " this line is redundant." % (filename, raw_line, src, moved[src.key]) ) continue d = deque() mods[src.key][1].extend([("move", src, trg), d]) # start essentially a new checkpoint in the trg mods[trg.key][1].append(d) mods[trg.key][1] = d moved[src.key] = trg elif line[0] == "slotmove": if len(line) != 4: raise ValueError("slotmove line %r isn't of proper form" % (raw_line,)) src = atom(line[1]) if src.key in moved: logger.warning( "file %r, line %r: %s was already moved to %s," " this line is redundant.", filename, raw_line, src, moved[src.key], ) continue elif src.slot is not None: logger.warning( "file %r, line %r: slotted atom makes no sense for slotmoves, ignoring", filename, raw_line ) src_slot = atom("%s:%s" % (src, line[2])) trg_slot = atom("%s:%s" % (src.key, line[3])) mods[src.key][1].append(("slotmove", src_slot, line[3]))
def parse_moves(location): pjoin = os.path.join # schwartzian comparison, convert it into YYYY-QQ def get_key(fname): return tuple(reversed(fname.split('-'))) moves = {} for update_file in sorted(listdir_files(location), key=get_key): for line in iter_read_bash(pjoin(location, update_file)): line = line.split() if line[0] != 'move': continue moves[atom(line[1])] = atom(line[2]) return moves
def setUp(self): pkg = FakeMutatedPkg('app-arch/bzip2-1.0.1-r1', slot='0') masked_atom = atom('>=app-arch/bzip2-2.0') self.repo1 = FakeRepo(repo_id='gentoo', location='/usr/portage', masks=(masked_atom,)) self.repo2 = FakeRepo(repo_id='fakerepo', location='/var/gentoo/repos/fakerepo') self.livefs = FakeRepo(repo_id='vdb', pkgs=[pkg]) BaseFormatterTest.setUp(self)
def test_forced_use(self): self.formatter.forced_use = collapsed_restrict_to_data(([(atom('=app-arch/bzip2-1.0.3-r6'),('static'))])) # new pkg: static use flag forced on self.formatter.format( FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', iuse=['static'], use=['static']))) self.assertOut('[', Color('fg', 'green'), 'ebuild', Reset(), ' ', Color('fg', 'green'), Bold(), 'N', Reset(), ' ] ', Color('fg', 'green'), 'app-arch/bzip2-1.0.3-r6', Reset(), ' USE="(', Color('fg', 'red'), Bold(), 'static', Reset(), ')"') # rebuilt pkg: toggled static use flag forced on self.formatter.format( FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', iuse=['static'], use=['static']), FakeMutatedPkg('app-arch/bzip2-1.0.3-r6', iuse=['static']))) self.assertOut('[', Color('fg', 'green'), 'ebuild', Reset(), ' ', Color('fg', 'yellow'), Bold(), 'R', Reset(), ' ] ', Color('fg', 'green'), 'app-arch/bzip2-1.0.3-r6', Reset(), ' USE="(', Color('fg', 'green'), Bold(), 'static', Reset(), '*)"') # rebuilt pkg: new static use flag forced on self.formatter.format( FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', iuse=['static'], use=['static']), FakeMutatedPkg('app-arch/bzip2-1.0.3-r6'))) self.assertOut('[', Color('fg', 'green'), 'ebuild', Reset(), ' ', Color('fg', 'yellow'), Bold(), 'R', Reset(), ' ] ', Color('fg', 'green'), 'app-arch/bzip2-1.0.3-r6', Reset(), ' USE="(', Color('fg', 'yellow'), Bold(), 'static', Reset(), '%*)"')
def feed(self, pkgset, reporter): base = os.path.dirname(pkgset[0].ebuild.path) category = os.path.basename( os.path.dirname(os.path.dirname(pkgset[0].ebuild.path))) ebuild_ext = '.ebuild' mismatched = [] invalid = [] # note we don't use os.walk, we need size info also for filename in listdir(base): # while this may seem odd, written this way such that the # filtering happens all in the genexp. if the result was being # handed to any, it's a frame switch each # char, which adds up. if any(True for x in filename if x not in allowed_filename_chars_set): reporter.add_report(Glep31Violation(pkgset[0], filename)) if filename.endswith(ebuild_ext) or filename in \ ("Manifest", "metadata.xml"): if os.stat(pjoin(base, filename)).st_mode & 0111: reporter.add_report(ExecutableFile(pkgset[0], filename)) if filename.endswith(ebuild_ext): utf8_check(pkgset[0], base, filename, reporter) pkg_name = os.path.basename(filename[:-len(ebuild_ext)]) try: pkg_atom = atom('=%s/%s' % (category, pkg_name)) if pkg_atom.package != os.path.basename(base): mismatched.append(pkg_name) except MalformedAtom: invalid.append(pkg_name) if mismatched: reporter.add_report(MismatchedPN(pkgset[0], mismatched)) if invalid: reporter.add_report(InvalidPN(pkgset[0], invalid)) if not os.path.exists(pjoin(base, 'files')): return unprocessed_dirs = deque(["files"]) while unprocessed_dirs: cwd = unprocessed_dirs.pop() for fn in listdir(pjoin(base, cwd)): afn = pjoin(base, cwd, fn) st = os.lstat(afn) if stat.S_ISDIR(st.st_mode): if fn not in self.ignore_dirs: unprocessed_dirs.append(pjoin(cwd, fn)) elif stat.S_ISREG(st.st_mode): if st.st_mode & 0111: reporter.add_report(ExecutableFile(pkgset[0], pjoin(cwd, fn))) if not fn.startswith("digest-"): if st.st_size > 20480: reporter.add_report(SizeViolation(pkgset[0], fn, st.st_size)) if any(True for x in fn if x not in allowed_filename_chars_set): reporter.add_report(Glep31Violation(pkgset[0], pjoin(cwd, fn)))
def test_worldfile_atom(self): self.formatter.world_list = [atom('app-arch/bzip2')] self.formatter.format( FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6'))) self.assertOut('[', Color('fg', 'green'), Bold(), 'ebuild', Reset(), ' ', Color('fg', 'green'), Bold(), 'N', Reset(), ' ] ', Color('fg', 'green'), Bold(), 'app-arch/bzip2-1.0.3-r6', Reset())
def _simple_redirect_test(self, attr, arg1='=dev-util/diffball-1.0', arg2=None): l = [] uniq_obj = object() def f(*a, **kw): a = a[1:-1] l.extend((a, kw)) return uniq_obj # if replace, override _replace since replace reflects to it class my_ops(operations): locals()['_cmd_implementation_%s' % attr] = f self.repo.operations_kls = my_ops args = [self.repo.match(atom(arg1))] if arg2: args.append(versioned_CPV(arg2)) self.repo.frozen = False op = getattr(self.repo.operations, attr) def simple_check(op, args, **kw): l[:] = [] self.assertEqual(op(*args, **kw), uniq_obj) self.assertEqual(len(l), 2) self.assertEqual(list(l[0]), args) self.assertTrue(l) self.assertTrue(self.repo.operations.supports(attr)) simple_check(op, args) self.assertFalse(l[1]) simple_check(op, args) self.assertNotIn('force', l[1]) self.repo.frozen = True self.assertFalse(self.repo.operations.supports(attr)) self.assertFalse(hasattr(self.repo.operations, attr))
def test_remove(self): s = self.gen_pkgset("dev-util/diffball\ndev-util/bsdiff") s.remove(atom("=dev-util/diffball-0.4")) s.flush() with open(self.fn) as f: self.assertEqual(sorted(x.strip() for x in f if x.strip()), ["dev-util/bsdiff"])
def test_iter(self): repo, vrepo = self.setup_repos( packages.PackageRestriction( "package", values.OrRestriction(*[values.StrExactMatch(x) for x in ("diffball", "fake")]) ) ) self.assertEqual(sorted(vrepo), sorted(repo.itermatch(atom("dev-util/bsdiff"))))
def insert_blockers(self, stack, choices, blocks): # level blockers. was_livefs = choices.current_pkg.repo.livefs for x in blocks: if not was_livefs: self._ensure_livefs_is_loaded(x) rewrote_blocker = self.generate_mangled_blocker(choices, x) l = self.state.add_blocker(choices, rewrote_blocker, key=x.key) if l: # blocker caught something. yay. self._dprint("%s blocker %s hit %s for atom %s pkg %s", (stack[-1].mode, x, l, stack[-1].atom, choices.current_pkg)) if x.weak_blocker: # note that we use the top frame of the stacks' dbs; this # is to allow us to upgrade as needed. # For this to match, it's *only* possible if the blocker is resolved # since the limiter is already in place. result = self._rec_add_atom(packages.KeyedAndRestriction( restriction.Negate(x), _atom.atom(x.key), key=x.key), stack, stack[0].dbs) if not result: # ok, inserted a new version. did it take care of the conflict? # it /may/ not have, via filling a different slot... result = self.state.match_atom(x) if not result: # ignore the blocker, we resolved past it. continue return x, l return None
def parse_atom(restriction, repo, livefs_repos, return_none=False): """Use :obj:`parserestrict.parse_match` to produce a single atom. This matches the restriction against a repo. If multiple pkgs match, then the restriction is applied against installed repos skipping pkgs from the 'virtual' category. If multiple pkgs still match the restriction, AmbiguousQuery is raised otherwise the matched atom is returned. :param restriction: string to convert. :param repo: :obj:`pkgcore.repository.prototype.tree` instance to search in. :param livefs_repos: :obj:`pkgcore.config.domain.all_livefs_repos` instance to search in. :param return_none: indicates if no matches raises or returns C{None} :return: an atom or C{None}. """ key_matches = set(x.key for x in repo.itermatch(restriction)) if not key_matches: raise NoMatches(restriction) elif len(key_matches) > 1: installed_matches = set(x.key for x in livefs_repos.itermatch(restriction) if x.category != 'virtual') if len(installed_matches) == 1: restriction = atom(installed_matches.pop()) else: raise AmbiguousQuery(restriction, sorted(key_matches)) if isinstance(restriction, atom): # atom is guaranteed to be fine, since it's cat/pkg return restriction return packages.KeyedAndRestriction(restriction, key=key_matches.pop())
def setup_repos(self, restrictions=None): repo = SimpleTree({ "dev-util": {"diffball": ["1.0", "0.7"], "bsdiff": ["0.4.1", "0.4.2"]}, "dev-lib": {"fake": ["1.0", "1.0-r1"]}}) if restrictions is None: restrictions = atom("dev-util/diffball") vrepo = filtered.tree(repo, restrictions) return repo, vrepo
def __init__(self, masked_use={}, forced_use={}, provides={}, masks=[], virtuals={}, arch='x86', name='none'): self.provides_repo = SimpleTree(provides) self.masked_use = {atom(k): v for k,v in masked_use.iteritems()} self.forced_use = {atom(k): v for k,v in forced_use.iteritems()} self.masks = tuple(map(atom, masks)) self.virtuals = SimpleTree(virtuals) self.arch = arch self.name = name self.forced_data = collapsed_restrict_to_data( [(AlwaysTrue, (self.arch,))], self.forced_use.iteritems()) self.masked_data = collapsed_restrict_to_data( [(AlwaysTrue, default_arches)], self.masked_use.iteritems())
def test_collision_slotted(self): pkgs = [ FakePkg('foo/bar-1.0.1', slot='0'), FakePkg('foo/bar-2.0.2', slot='2'), FakePkg('foon/bar-3.4.5', slot='0'), ] installed_pkgs = [ FakePkg('foo/bar-1.0.0', slot='0'), FakePkg('foo/bar-2.0.1', slot='2'), ] installed_repos = FakeRepo(pkgs=installed_pkgs) repo = FakeRepo(pkgs=pkgs) a = pmerge.parse_target(parse_match("bar:0"), repo, installed_repos) assert len(a) == 1 assert a[0].key == 'foo/bar' assert a[0].match(atom('foo/bar:0')) assert not a[0].match(atom('foo/bar:2'))
def parse_match(text): """generate appropriate restriction for text Parsing basically breaks it down into chunks split by /, with each chunk allowing for prefix/postfix globbing- note that a postfixed glob on package token is treated as package attribute matching, not as necessarily a version match. If only one chunk is found, it's treated as a package chunk. Finally, it supports a nonstandard variation of atom syntax where the category can be dropped. Examples: - `*`: match all - `dev-*/*`: category must start with 'dev-' - `dev-*`: package must start with 'dev-' - `*-apps/portage*`: category must end in '-apps', package must start with 'portage' - `>=portage-2.1`: atom syntax, package 'portage', version greater then or equal to '2.1' :param text: string to attempt to parse :type text: string :return: :obj:`pkgcore.restrictions.packages` derivative """ # Ensure the text var is a string if we're under py3k. if not is_py3k: text = text.encode('ascii') orig_text = text = text.strip() if "!" in text: raise ParseError( "!, or any form of blockers make no sense in this usage: %s" % ( text,)) tsplit = text.rsplit("/", 1) if len(tsplit) == 1: ops, text = collect_ops(text) if not ops: if "*" in text: r = convert_glob(text) if r is None: return packages.AlwaysTrue return packages.PackageRestriction("package", r) elif text.startswith("*"): raise ParseError( "cannot do prefix glob matches with version ops: %s" % ( orig_text,)) # ok... fake category. whee. try: r = list(util.collect_package_restrictions( atom.atom("%scategory/%s" % (ops, text)).restrictions, attrs=("category",), invert=True)) except errors.MalformedAtom, e: raise_from(ParseError(str(e))) if len(r) == 1: return r[0] return packages.AndRestriction(*r)
def _simple_redirect_test(self, attr, arg1='=dev-util/diffball-1.0', arg2=None): l = [] uniq_obj = object() def f(*a, **kw): a = a[1:-1] l.extend((a, kw)) return uniq_obj # if replace, override _replace since replace reflects to it class my_ops(operations): locals()['_cmd_implementation_%s' % attr] = f self.repo.operations_kls = my_ops args = [self.repo.match(atom(arg1))] if arg2: args.append(versioned_CPV(arg2)) self.repo.frozen = False op = getattr(self.repo.operations, attr) def simple_check(op, args, **kw): l[:] = [] self.assertEqual(op(*args, **kw), uniq_obj) self.assertEqual(len(l), 2) self.assertEqual(list(l[0]), args) self.assertTrue(l) self.assertTrue(self.repo.operations.supports(attr)) simple_check(op, args) self.assertFalse(l[1]) simple_check(op, args) #, force=True) self.assertNotIn('force', l[1]) self.repo.frozen = True self.assertFalse(self.repo.operations.supports(attr)) self.assertFalse(hasattr(self.repo.operations, attr))
def _simple_redirect_test(self, attr, arg1='=dev-util/diffball-1.0', arg2=None): l = [] uniq_obj = object() def f(*a, **kw): a = a[1:-1] l.extend((a, kw)) return uniq_obj # if replace, override _replace since replace reflects to it class my_ops(operations): locals()[f'_cmd_implementation_{attr}'] = f self.repo.operations_kls = my_ops args = [self.repo.match(atom(arg1))] if arg2: args.append(VersionedCPV(arg2)) self.repo.frozen = False op = getattr(self.repo.operations, attr) def simple_check(op, args, **kw): l[:] = [] assert op(*args, **kw) == uniq_obj assert len(l) == 2 assert list(l[0]) == args assert l assert self.repo.operations.supports(attr) simple_check(op, args) assert not l[1] simple_check(op, args) assert 'force' not in l[1] self.repo.frozen = True assert not self.repo.operations.supports(attr) assert not hasattr(self.repo.operations, attr)
def restrict_use(blocker, profile): """Restrict blocker atom USE flags to available profile flags.""" if blocker.use is None: return blocker a = atom(str(blocker)) use_map = dict() # strip USE flags # TODO: move this to an atom attr if blocker.use is not None: for x in blocker.use: u = x if x[0] == '-': continue if x[-3:] in ("(+)", "(-)"): u = u[:-3] use_map[u] = x use = set(use_map.keys()) & profile.use object.__setattr__(a, 'use', frozenset(use_map[x] for x in use)) return a
def _atoms(self): try: s = set() for x in readlines_ascii(self.path, True): if not x or x.startswith("#"): continue elif x.startswith("@"): if self.error_on_subsets: raise ValueError( "set %s isn't a valid atom in pkgset %r" % (x, self.path)) logger.warning( "set item %r found in pkgset %r: it will be " "wiped on update since portage/pkgcore store set items " "in a separate way", x[1:], self.path) continue s.add(atom(x)) except InvalidDependency as e: compatibility.raise_from( errors.ParsingError("parsing %r" % self.path, exception=e)) return s
def converter(key): # todo: convert this to using a common exception base, with # conversion of ValueErrors/atom exceptions... chunks = key.split(':', 1) return (atom.atom(chunks[0]), chunks[1])
def test_targets(self): options, _func = self.tool.parse_args(self.args + ['dev-util/foo']) assert list(options.restrictions) == [(base.package_scope, atom.atom('dev-util/foo'))]
def test_identify_candidates(self): with pytest.raises(TypeError): self.repo.match("asdf") rc = packages.PackageRestriction("category", values.StrExactMatch("dev-util")) assert \ sorted(set(x.package for x in self.repo.itermatch(rc))) == \ sorted(["diffball", "bsdiff"]) rp = packages.PackageRestriction("package", values.StrExactMatch("diffball")) assert list( x.version for x in self.repo.itermatch(rp, sorter=sorted)) == ["0.7", "1.0"] assert \ self.repo.match(packages.OrRestriction(rc, rp), sorter=sorted) == \ sorted(VersionedCPV(x) for x in ( "dev-util/diffball-0.7", "dev-util/diffball-1.0", "dev-util/bsdiff-0.4.1", "dev-util/bsdiff-0.4.2")) assert \ sorted(self.repo.itermatch(packages.AndRestriction(rc, rp))) == \ sorted(VersionedCPV(x) for x in ( "dev-util/diffball-0.7", "dev-util/diffball-1.0")) assert sorted(self.repo) == self.repo.match(packages.AlwaysTrue, sorter=sorted) # mix/match cat/pkg to check that it handles that corner case # properly for sorting. assert \ sorted(self.repo, reverse=True) == \ self.repo.match(packages.OrRestriction( rc, rp, packages.AlwaysTrue), sorter=partial(sorted, reverse=True)) rc2 = packages.PackageRestriction("category", values.StrExactMatch("dev-lib")) assert sorted(self.repo.itermatch(packages.AndRestriction(rp, rc2))) == [] # note this mixes a category level match, and a pkg level # match. they *must* be treated as an or. assert \ sorted(self.repo.itermatch(packages.OrRestriction(rp, rc2))) == \ sorted(VersionedCPV(x) for x in ( "dev-util/diffball-0.7", "dev-util/diffball-1.0", "dev-lib/fake-1.0", "dev-lib/fake-1.0-r1")) # this is similar to the test above, but mixes a cat/pkg # candidate with a pkg candidate rp2 = packages.PackageRestriction("package", values.StrExactMatch("fake")) r = packages.OrRestriction(atom("dev-util/diffball"), rp2) assert \ sorted(self.repo.itermatch(r)) == \ sorted(VersionedCPV(x) for x in ( "dev-util/diffball-0.7", "dev-util/diffball-1.0", "dev-lib/fake-1.0", "dev-lib/fake-1.0-r1")) assert \ sorted(self.repo.itermatch( packages.OrRestriction(packages.AlwaysTrue, rp2))) == \ sorted(VersionedCPV(x) for x in ( "dev-util/diffball-0.7", "dev-util/diffball-1.0", "dev-util/bsdiff-0.4.1", "dev-util/bsdiff-0.4.2", "dev-lib/fake-1.0", "dev-lib/fake-1.0-r1")) assert \ sorted(self.repo.itermatch(packages.PackageRestriction( 'category', values.StrExactMatch('dev-util', negate=True)))) == \ sorted(VersionedCPV(x) for x in ("dev-lib/fake-1.0", "dev-lib/fake-1.0-r1")) obj = malleable_obj(livefs=False) pkg_cls = post_curry(MutatedPkg, {'repo': obj}) assert \ sorted(self.repo.itermatch(boolean.AndRestriction(boolean.OrRestriction( packages.PackageRestriction( "repo.livefs", values.EqualityMatch(False)), packages.PackageRestriction( "category", values.StrExactMatch("virtual"))), atom("dev-lib/fake")), pkg_cls=pkg_cls)) == \ sorted(VersionedCPV(x) for x in ( "dev-lib/fake-1.0", "dev-lib/fake-1.0-r1")) assert \ sorted(self.repo.itermatch(packages.PackageRestriction( 'category', values.StrExactMatch('dev-lib', negate=True), negate=True))) == \ sorted(VersionedCPV(x) for x in ( "dev-lib/fake-1.0", "dev-lib/fake-1.0-r1")) assert \ sorted(self.repo.itermatch(packages.PackageRestriction( 'category', values.StrExactMatch('dev-lib', negate=True), negate=True))) == \ sorted(VersionedCPV(x) for x in ( "dev-lib/fake-1.0", "dev-lib/fake-1.0-r1"))
def feed(self, pkgset, reporter): base = os.path.dirname(pkgset[0].ebuild.path) category = os.path.basename( os.path.dirname(os.path.dirname(pkgset[0].ebuild.path))) ebuild_ext = '.ebuild' mismatched = [] invalid = [] # note we don't use os.walk, we need size info also for filename in listdir(base): # while this may seem odd, written this way such that the # filtering happens all in the genexp. if the result was being # handed to any, it's a frame switch each # char, which adds up. if any(True for x in filename if x not in allowed_filename_chars_set): reporter.add_report(Glep31Violation(pkgset[0], filename)) if filename.endswith(ebuild_ext) or filename in \ ("Manifest", "metadata.xml"): if os.stat(pjoin(base, filename)).st_mode & 0111: reporter.add_report(ExecutableFile(pkgset[0], filename)) if filename.endswith(ebuild_ext): utf8_check(pkgset[0], base, filename, reporter) pkg_name = os.path.basename(filename[:-len(ebuild_ext)]) try: pkg_atom = atom('=%s/%s' % (category, pkg_name)) if pkg_atom.package != os.path.basename(base): mismatched.append(pkg_name) except MalformedAtom: invalid.append(pkg_name) if mismatched: reporter.add_report(MismatchedPN(pkgset[0], mismatched)) if invalid: reporter.add_report(InvalidPN(pkgset[0], invalid)) if not os.path.exists(pjoin(base, 'files')): return unprocessed_dirs = deque(["files"]) while unprocessed_dirs: cwd = unprocessed_dirs.pop() for fn in listdir(pjoin(base, cwd)): afn = pjoin(base, cwd, fn) st = os.lstat(afn) if stat.S_ISDIR(st.st_mode): if fn not in self.ignore_dirs: unprocessed_dirs.append(pjoin(cwd, fn)) elif stat.S_ISREG(st.st_mode): if st.st_mode & 0111: reporter.add_report( ExecutableFile(pkgset[0], pjoin(cwd, fn))) if not fn.startswith("digest-"): if st.st_size > 20480: reporter.add_report( SizeViolation(pkgset[0], fn, st.st_size)) if any(True for x in fn if x not in allowed_filename_chars_set): reporter.add_report( Glep31Violation(pkgset[0], pjoin(cwd, fn)))
def unversioned_atom(self): return atom.atom(self.key)
def slotted_atom(self): return atom("%s:%s" % (self.key, self.slot))
def cp_atom(atom_str): repo = pkgcore.config.load_config().repo['gentoo'] atom = atom_mod.atom(atom_str) return repo.match(atom)
def find_single_use(repo, cp): atom = atom_mod.atom(cp + '[python_single_target_python3_8]') return repo.match(atom)
def mk_pkg(self, cpvstr): pkg = atom.atom(cpvstr) filesdir = pjoin(self.repo.location, pkg.category, pkg.package, 'files') os.makedirs(filesdir, exist_ok=True) return filesdir
def find_normal_use(repo, cp): atom = atom_mod.atom(cp + '[python_targets_python3_8]') return repo.match(atom)
def main(options, out, err): pkg = options.pkg repos = None if os.path.isfile(pkg) and pkg.endswith('.ebuild'): ebuild_path = os.path.abspath(pkg) repo_path = os.path.abspath( os.path.join(pkg, os.pardir, os.pardir, os.pardir)) # find the ebuild's repo # TODO: iterating through the repos feels wrong, we could use a # multi-keyed dict with repo IDs and paths as keys with repo # objects as values (same thing we need for better portage-2 # profile support) for x in options.domain.repos: if getattr(x, 'repository_type', None) == 'source' and \ x.raw_repo.location == repo_path: repos = x break if repos is None: err.write('no configured repo contains: %s' % ebuild_path) return 1 ebuild_P = os.path.basename(os.path.splitext(ebuild_path)[0]) ebuild_category = ebuild_path.split(os.sep)[-3] pkg = atom.atom('=%s/%s' % (ebuild_category, ebuild_P)) else: try: pkg = atom.atom(pkg) repos = options.domain.all_repos except MalformedAtom: err.write('not a valid atom or ebuild: "%s"' % pkg) return 1 pkgs = repos.match(pkg) if not pkgs: err.write('got no matches for %s\n' % (pkg, )) return 1 if len(pkgs) > 1: err.write('got multiple matches for %s:' % (pkg, )) if len(set((pkg.slot, pkg.repo) for pkg in pkgs)) != 1: for pkg in sorted(pkgs): err.write("repo %r, slot %r, %s" % ( getattr(pkg.repo, 'repo_id', 'unknown'), pkg.slot, pkg.cpvstr, ), prefix=" ") err.write() err.write( "please refine your restriction to match only one slot/repo pair\n" ) return 1 pkgs = [max(pkgs)] err.write("choosing %r, slot %r, %s" % (getattr( pkgs[0].repo, 'repo_id', 'unknown'), pkgs[0].slot, pkgs[0].cpvstr), prefix=' ') kwds = {} build_obs = observer.build_observer(observer.formatter_output(out), not options.debug) phases = [x for x in options.phase if x != 'clean'] clean = (len(phases) != len(options.phase)) if options.no_auto: kwds["ignore_deps"] = True if "setup" in phases: phases.insert(0, "fetch") # by default turn off startup cleans; we clean by ourselves if # told to do so via an arg build = options.domain.build_pkg(pkgs[0], build_obs, clean=False, allow_fetching=True) if clean: build.cleanup(force=True) build._reload_state() phase_funcs = (getattr(build, x) for x in phases) for phase, f in izip(phases, phase_funcs): out.write('executing phase %s' % (phase, )) f(**kwds)
def test_stdin_targets(self): with patch('sys.stdin', StringIO('dev-util/foo')): options, _func = self.tool.parse_args(self.args + ['-']) assert list(options.restrictions) == [(base.package_scope, atom.atom('dev-util/foo'))]
def test_contains(self): self.assertIn( atom("x11-base/xorg-x11"), self.gen_pkgset("x11-base/xorg-x11"))
def test_iter(self): repo, vrepo = self.setup_repos(packages.PackageRestriction( "package", values.OrRestriction( *[values.StrExactMatch(x) for x in ("diffball", "fake")]))) self.assertEqual( sorted(vrepo), sorted(repo.itermatch(atom("dev-util/bsdiff"))))
def test_iter(self): self.assertEqual( sorted(self.gen_pkgset("dev-util/diffball\ndev-util/bsdiff")), sorted(atom(x) for x in ["dev-util/diffball", "dev-util/bsdiff"]))
def main(options, out, err): target = options.target domain = options.domain repo = domain.ebuild_repos_raw if target.endswith('.ebuild'): if not os.path.isfile(target): argparser.error("ebuild doesn't exist: '%s'" % target) try: restriction = repo.path_restrict(target) except ValueError as e: argparser.error(e) else: try: restriction = atom.atom(target) except MalformedAtom: if os.path.isfile(target): argparser.error("file not an ebuild: '%s'" % target) else: argparser.error("invalid package atom: '%s'" % target) pkgs = repo.match(restriction) if not pkgs: argparser.error("no matches: '%s'" % (target,)) pkg = max(pkgs) if len(pkgs) > 1: err.write("got multiple matches for '%s':" % (target,)) if len(set((p.slot, p.repo) for p in pkgs)) != 1: for p in pkgs: err.write( "%s:%s::%s" % (p.cpvstr, p.slot, getattr(p.repo, 'repo_id', 'unknown')), prefix=' ') err.write() argparser.error("please refine your restriction to one match") err.write( "choosing %s:%s::%s" % (pkg.cpvstr, pkg.slot, getattr(pkg.repo, 'repo_id', 'unknown')), prefix=' ') kwds = {} phase_obs = observer.phase_observer(observer.formatter_output(out), not options.debug) phases = [x for x in options.phase if x != 'clean'] clean = (len(phases) != len(options.phase)) if options.no_auto: kwds["ignore_deps"] = True if "setup" in phases: phases.insert(0, "fetch") # by default turn off startup cleans; we clean by ourselves if # told to do so via an arg build = domain.build_pkg(pkg, phase_obs, clean=False, allow_fetching=True) if clean: build.cleanup(force=True) build._reload_state() phase_funcs = [] for phase in phases: p = getattr(build, phase, None) if p is None: argparser.error("unknown phase: '%s'" % phase) phase_funcs.append(p) try: for phase, f in izip(phases, phase_funcs): out.write('executing phase %s' % (phase,)) f(**kwds) except format.errors: return 1
def _collect_default_providers(virtuals): return {virt: frozenset(atom(x.key) for y in data.itervalues() for x in y) for virt, data in virtuals.iteritems()}
def test_simple_query(self): a = atom("=dev-util/diffball-1.0") self.repo.match(a) self.assertTrue(self.repo.match(a)) self.assertFalse(self.repo.match(atom("dev-util/monkeys_rule")))
def _process_updates(sequence, filename, mods, moved): for lineno, raw_line in enumerate(sequence, 1): line = raw_line.strip() if not line: logger.error(f'file {filename!r}: empty line {lineno}') continue elif line != raw_line: logger.error( f'file {filename!r}: extra whitespace in {raw_line!r} on line {lineno}' ) line = line.split() if line[0] == 'move': if len(line) != 3: logger.error( f'file {filename!r}: {raw_line!r} on line {lineno}: bad move form' ) continue src, trg = atom(line[1]), atom(line[2]) if src.fullver is not None: logger.error( f"file {filename!r}: {raw_line!r} on line {lineno}: " f"atom {src} must be versionless") continue elif trg.fullver is not None: logger.error( f"file {filename!r}: {raw_line!r} on line {lineno}: " f"atom {trg} must be versionless") continue if src.key in moved: logger.warning( f"file {filename!r}: {raw_line!r} on line {lineno}: " f"{src} was already moved to {moved[src.key]}, " "this line is redundant") continue d = deque() mods[src.key][1].extend([('move', src, trg), d]) # start essentially a new checkpoint in the trg mods[trg.key][1].append(d) mods[trg.key][1] = d moved[src.key] = trg elif line[0] == 'slotmove': if len(line) != 4: logger.error( f"file {filename!r}: {raw_line!r} on line {lineno}: " "bad slotmove form") continue src = atom(line[1]) if src.key in moved: logger.warning( f"file {filename!r}: {raw_line!r} on line {lineno}: " f"{src} was already moved to {moved[src.key]}, " "this line is redundant") continue elif src.slot is not None: logger.error( f"file {filename!r}: {raw_line!r} on line {lineno}: " "slotted atom makes no sense for slotmoves") continue src_slot = atom(f'{src}:{line[2]}') trg_slot = atom(f'{src.key}:{line[3]}') mods[src.key][1].append(('slotmove', src_slot, line[3])) else: logger.error( f'file {filename!r}: {raw_line!r} on line {lineno}: unknown command' )
def make_package_list(self) -> int: repo, git_repo = self.get_git_repository() with git_repo: start_time = datetime.datetime.utcnow() packages = self.args.package if self.args.arch is None: initial_arches = '*' else: initial_arches = ' '.join(self.args.arch) b = BugInfo(BugCategory.KEYWORDREQ, f'{packages[0]} {initial_arches}\n') plist = dict(match_package_list(repo, b, only_new=True)) assert len(plist) == 1 cc_arches = sorted([ f'{x}@gentoo.org' for x in set(itertools.chain.from_iterable(plist.values())) if '-' not in x ]) it = 1 # prepare the initial set b = BugInfo(BugCategory.KEYWORDREQ, '\n'.join(packages), cc=cc_arches) new_plist = dict(match_package_list(repo, b, only_new=True)) add_keywords(plist.items(), b.category == BugCategory.STABLEREQ) while True: log.info(f'Iteration {it}: running pkgcheck ...') plist = new_plist check_res, issues = check_dependencies(repo, plist.items()) # all good? we're done! if check_res: break new_packages = set() for i in issues: eapi = repo[(i.category, i.package, i.version)].eapi for d in i.deps: # TODO: handle USE-deps meaningfully # TODO: handle <-deps r = atom(d, eapi=eapi).no_usedeps for m in repo.itermatch(r): new_packages.add(m.key) break else: log.error(f'No match for dependency: {d}') return 1 assert new_packages log.info(f'New packages: {" ".join(sorted(new_packages))}') # apply on *new* packages b = BugInfo(BugCategory.KEYWORDREQ, '\n'.join(new_packages), cc=cc_arches) new_plist = dict(match_package_list(repo, b, only_new=True)) for p in list(new_packages): if not any(x.key == p for x in new_plist): log.info(f'Package {p} seems to be a red herring ' f'(already keyworded everywhere)') new_packages.remove(p) add_keywords(new_plist.items(), b.category == BugCategory.STABLEREQ) # but test on *old* log.info(f'Iteration {it}: verifying ...') check_res, issues = check_dependencies(repo, plist.items()) if not check_res: log.error('Attempt to satisfy dependencies failed:') log.error('\n'.join(format_results(issues))) log.error('Please correct the package list and retry.') break for x in sorted(new_packages): # TODO: handle it gracefully assert x not in packages packages.append(x) it += 1 end_time = datetime.datetime.utcnow() log.info(f'Time elapsed: {end_time - start_time}') log.info(f'Target CC: {" ".join(cc_arches)}') log.info('Package list follows:') print(f'{packages[0]} {initial_arches}') print('\n'.join(f'{x} ^' for x in packages[1:])) return 0
def test_split_version_ranges(pkgs, expected): assert list(split_version_ranges(atom(x) for x in pkgs)) == expected
def feed(self, pkgset, reporter): pkg = pkgset[0] base = os.path.dirname(pkg.path) category = os.path.basename(os.path.dirname(os.path.dirname(pkg.path))) ebuild_ext = '.ebuild' mismatched = [] invalid = [] # note we don't use os.walk, we need size info also for filename in listdir(base): # while this may seem odd, written this way such that the # filtering happens all in the genexp. if the result was being # handed to any, it's a frame switch each # char, which adds up. if any(True for x in filename if x not in allowed_filename_chars_set): reporter.add_report(Glep31Violation(pkg, filename)) if (filename.endswith(ebuild_ext) or filename in ("Manifest", "metadata.xml")): if os.stat(pjoin(base, filename)).st_mode & 0o111: reporter.add_report(ExecutableFile(pkg, filename)) if filename.endswith(ebuild_ext): utf8_check(pkg, base, filename, reporter) pkg_name = os.path.basename(filename[:-len(ebuild_ext)]) try: pkg_atom = atom(f'={category}/{pkg_name}') if pkg_atom.package != os.path.basename(base): mismatched.append(pkg_name) except MalformedAtom: invalid.append(pkg_name) if mismatched: reporter.add_report(MismatchedPN(pkg, mismatched)) if invalid: reporter.add_report(InvalidPN(pkg, invalid)) if not os.path.exists(pjoin(base, 'files')): return unprocessed_dirs = deque(["files"]) files_by_size = defaultdict(list) while unprocessed_dirs: cwd = unprocessed_dirs.pop() for fn in listdir(pjoin(base, cwd)): afn = pjoin(base, cwd, fn) st = os.lstat(afn) if stat.S_ISDIR(st.st_mode): if fn not in self.ignore_dirs: unprocessed_dirs.append(pjoin(cwd, fn)) elif stat.S_ISREG(st.st_mode): if st.st_mode & 0o111: reporter.add_report(ExecutableFile( pkg, pjoin(cwd, fn))) if not fn.startswith("digest-"): if st.st_size == 0: reporter.add_report(EmptyFile(pkg, pjoin(cwd, fn))) else: files_by_size[st.st_size].append(pjoin(cwd, fn)) if st.st_size > 20480: reporter.add_report( SizeViolation(pkg, pjoin(cwd, fn), st.st_size)) if any(True for x in fn if x not in allowed_filename_chars_set): reporter.add_report( Glep31Violation(pkg, pjoin(cwd, fn))) files_by_digest = defaultdict(list) for size, files in files_by_size.items(): if len(files) > 1: for f in files: digest = get_chksums(pjoin(base, f), self.digest_algo)[0] files_by_digest[digest].append(f) for digest, files in files_by_digest.items(): if len(files) > 1: reporter.add_report(DuplicateFiles(pkg, files))
def test_simple_query(self): a = atom("=dev-util/diffball-1.0") self.repo.match(a) assert self.repo.match(a) assert not self.repo.match(atom("dev-util/monkeys_rule"))
def slotted_atom(self): return atom(f'{self.key}:{self.slot}')
def parse_match(text): """generate appropriate restriction for text Parsing basically breaks it down into chunks split by /, with each chunk allowing for prefix/postfix globbing- note that a postfixed glob on package token is treated as package attribute matching, not as necessarily a version match. If only one chunk is found, it's treated as a package chunk. Finally, it supports a nonstandard variation of atom syntax where the category can be dropped. Examples: - `*`: match all - `dev-*/*`: category must start with 'dev-' - `dev-*`: package must start with 'dev-' - `*-apps/portage*`: category must end in '-apps', package must start with 'portage' - `>=portage-2.1`: atom syntax, package 'portage', version greater then or equal to '2.1' - dev-qt/*:5: all Qt 5 libs - boost:0/1.60: all packages named boost with a slot/subslot of 0/1.60.0 :param text: string to attempt to parse :type text: string :return: :obj:`pkgcore.restrictions.packages` derivative """ # Ensure the text var is a string if we're under py3k. if not is_py3k: text = text.encode('ascii') orig_text = text = text.strip() if "!" in text: raise ParseError( "'!' or any form of blockers make no sense in this usage: '%s'" % (text, )) restrictions = [] if '::' in text: text, repo_id = text.rsplit('::', 1) restrictions.append(restricts.RepositoryDep(repo_id)) if ':' in text: text, slot = text.rsplit(':', 1) slot, _sep, subslot = slot.partition('/') if slot: restrictions.append(restricts.SlotDep(slot)) if subslot: restrictions.append(restricts.SubSlotDep(subslot)) tsplit = text.rsplit("/", 1) if len(tsplit) == 1: ops, text = collect_ops(text) if not ops: if "*" in text: r = convert_glob(text) if r is None: restrictions.append(packages.AlwaysTrue) else: restrictions.append( packages.PackageRestriction("package", r)) if len(restrictions) == 1: return restrictions[0] return packages.AndRestriction(*restrictions) elif text.startswith("*"): raise ParseError( "cannot do prefix glob matches with version ops: %s" % (orig_text, )) # ok... fake category. whee. try: r = list( collect_package_restrictions(atom.atom( "%scategory/%s" % (ops, text)).restrictions, attrs=("category", ), invert=True)) except errors.MalformedAtom as e: e.atom = orig_text raise_from(ParseError(str(e))) if not restrictions and len(r) == 1: return r[0] restrictions.extend(r) return packages.AndRestriction(*restrictions) elif text[0] in "=<>~" or "*" not in text: try: return atom.atom(orig_text) except errors.MalformedAtom as e: raise_from(ParseError(str(e))) r = map(convert_glob, tsplit) if not r[0] and not r[1]: restrictions.append(packages.AlwaysTrue) elif not r[0]: restrictions.append(packages.PackageRestriction("package", r[1])) elif not r[1]: restrictions.append(packages.PackageRestriction("category", r[0])) else: restrictions.extend(( packages.PackageRestriction("category", r[0]), packages.PackageRestriction("package", r[1]), )) if len(restrictions) == 1: return restrictions[0] return packages.AndRestriction(*restrictions)
def test_atom(self): config = self.parse('--print-revdep', 'a/spork', '--all', domain=domain_config) self.assertEqual([atom.atom('a/spork')], config.print_revdep)