def test_existing_masks(self): self.masks_path.write_text(textwrap.dedent("""\ # Random Dev <*****@*****.**> (2021-03-24) # masked cat/masked """)) with os_environ(EDITOR="sed -i '1s/$/mask comment/'"), \ patch('sys.argv', self.args + ['=cat/pkg-0']), \ pytest.raises(SystemExit), \ chdir(pjoin(self.repo.path)): self.script() assert self.profile.masks == frozenset([atom_cls('cat/masked'), atom_cls('=cat/pkg-0')])
def _mask_validate(parser, namespace): atoms = [] if namespace.targets: for x in namespace.targets: if os.path.exists(x) and x.endswith('.ebuild'): restrict = namespace.repo.path_restrict(x) pkg = next(namespace.repo.itermatch(restrict)) atom = pkg.versioned_atom else: try: atom = atom_cls(x) except MalformedAtom: mask.error(f'invalid atom: {x!r}') if not namespace.repo.match(atom): mask.error(f'no repo matches: {x!r}') atoms.append(atom) else: restrict = namespace.repo.path_restrict(os.getcwd()) # repo, category, and package level restricts if len(restrict) != 3: mask.error('not in a package directory') pkg = next(namespace.repo.itermatch(restrict)) atoms.append(pkg.unversioned_atom) namespace.atoms = sorted(atoms)
def test_mask_ebuild_path(self): with os_environ(EDITOR="sed -i '1s/$/mask comment/'"), \ patch('sys.argv', self.args + ['cat/pkg/pkg-0.ebuild']), \ pytest.raises(SystemExit), \ chdir(pjoin(self.repo.path)): self.script() assert self.profile.masks == frozenset([atom_cls('=cat/pkg-0')])
def _pkg_atoms(paths): """Filter package atoms from commit paths.""" for x in paths: try: yield atom_cls(os.sep.join(x.split(os.sep, 2)[:2])) except MalformedAtom: continue
def determine_changes(options): """Determine changes staged in git.""" # stage changes as requested if options.git_add_arg: git.run('add', options.git_add_arg, options.cwd) # determine staged changes p = git.run( 'diff', '--name-status', '--cached', '-z', stdout=subprocess.PIPE) # ebuild path regex, validation is handled on instantiation _ebuild_re = re.compile(r'^(?P<category>[^/]+)/[^/]+/(?P<package>[^/]+)\.ebuild$') _eclass_re = re.compile(r'^eclass/(?P<name>[^/]+\.eclass)$') # if no changes exist, exit early if not p.stdout: commit.error('no staged changes exist') data = deque(p.stdout.strip('\x00').split('\x00')) changes = defaultdict(OrderedSet) while data: status = data.popleft() old_path = None if status.startswith('R'): status = 'R' old_path = data.popleft() path = data.popleft() path_components = path.split(os.sep) if path_components[0] in options.repo.categories and len(path_components) > 2: if mo := _ebuild_re.match(path): # ebuild changes try: atom = atom_cls(f"={mo.group('category')}/{mo.group('package')}") old = None if status == 'R' and (om := _ebuild_re.match(old_path)): old = atom_cls(f"={om.group('category')}/{om.group('package')}") changes[PkgChange].add(PkgChange( status, path, atom=atom, ebuild=True, old=old)) except MalformedAtom: continue
def _parse_file_line(line): """Pull atoms and status from file change lines.""" # match initially added ebuilds match = ebuild_ADM_regex.match(line) if match: status = match.group('status') category = match.group('category') pkg = match.group('P') try: return atom_cls(f'={category}/{pkg}'), status except MalformedAtom: return None # match renamed ebuilds match = ebuild_R_regex.match(line) if match: status = match.group('status') category = match.group('category') pkg = match.group('P') try: return atom_cls(f'={category}/{pkg}'), status except MalformedAtom: return None
def check_args(cls, parser, namespace): if namespace.commits: if namespace.targets: targets = ' '.join(namespace.targets) s = pluralism(namespace.targets) parser.error(f'--commits is mutually exclusive with target{s}: {targets}') ref = namespace.commits repo = namespace.target_repo targets = list(repo.category_dirs) + ['eclass'] try: p = subprocess.run( ['git', 'diff', '--cached', ref, '--name-only'] + targets, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=repo.location, encoding='utf8') except FileNotFoundError: parser.error('git not available to determine targets for --commits') if p.returncode != 0: error = p.stderr.splitlines()[0] parser.error(f'failed running git: {error}') elif not p.stdout: # no changes exist, exit early parser.exit() pkgs, eclasses = partition( p.stdout.splitlines(), predicate=lambda x: x.startswith('eclass/')) pkgs = sorted(atom_cls(os.sep.join(x.split(os.sep, 2)[:2])) for x in pkgs) eclasses = filter(None, (eclass_regex.match(x) for x in eclasses)) eclasses = sorted(x.group('eclass') for x in eclasses) restrictions = [] if pkgs: restrict = packages.OrRestriction(*pkgs) restrictions.append((base.package_scope, restrict)) if eclasses: func = partial(cls._committed_eclass, frozenset(eclasses)) restrict = values.AnyMatch(values.FunctionRestriction(func)) restrictions.append((base.eclass_scope, restrict)) # no pkgs or eclasses to check, exit early if not restrictions: parser.exit() namespace.restrictions = restrictions
def _commit(options, out, err): repo = options.repo git_add_files = [] # determine changes from staged files changes = determine_changes(options) _untracked_ebuild_re = re.compile(r'^\?\? (?P<category>[^/]+)/[^/]+/(?P<package>[^/]+)\.ebuild$') # update manifests for existing packages if atoms := {x.atom.unversioned_atom for x in changes.ebuild_changes}: if pkgs := {x.versioned_atom for x in repo.itermatch(packages.OrRestriction(*atoms))}: # pull all matches and drop untracked ebuilds p = git.run( 'status', '--porcelain=v1', '-u', '-z', "*.ebuild", cwd=repo.location, stdout=subprocess.PIPE) for path in p.stdout.strip('\x00').split('\x00'): if mo := _untracked_ebuild_re.match(path): try: untracked = atom_cls(f"={mo.group('category')}/{mo.group('package')}") pkgs.discard(untracked) except MalformedAtom: continue
def check_args(cls, parser, namespace): if namespace.commits: if namespace.targets: targets = ' '.join(namespace.targets) parser.error('--commits is mutually exclusive with ' f'target{_pl(namespace.targets)}: {targets}') repo = namespace.target_repo ret, out = spawn_get_output( ['git', 'diff', 'origin', '--name-only'] + list(repo.categories), cwd=repo.location) if ret != 0: parser.error( 'git not available to determine targets for --commits') elif not out: # no pkg changes exist parser.exit() pkgs = sorted( atom_cls(os.sep.join(x.split(os.sep, 2)[:2])) for x in out) combined_restrict = packages.OrRestriction(*pkgs) namespace.restrictions = [(base.package_scope, combined_restrict)]
def test_cwd_target(self, repo, make_git_repo, capsys, tool): git_repo = make_git_repo(repo.location) # empty repo with pytest.raises(SystemExit), \ chdir(repo.location): tool.parse_args(['mask']) out, err = capsys.readouterr() assert err.strip() == 'pkgdev mask: error: not in a package directory' # not in package dir repo.create_ebuild('cat/pkg-0') git_repo.add_all('cat/pkg-0') with pytest.raises(SystemExit), \ chdir(repo.location): tool.parse_args(['mask']) out, err = capsys.readouterr() assert err.strip() == 'pkgdev mask: error: not in a package directory' # masking CWD package with chdir(pjoin(repo.location, 'cat/pkg')): options, _ = tool.parse_args(['mask']) assert options.atoms == [atom_cls('cat/pkg')]
def test_targets(self, repo, make_git_repo, capsys, tool): git_repo = make_git_repo(repo.location) # invalid atom with pytest.raises(SystemExit), \ chdir(repo.location): tool.parse_args(['mask', 'pkg']) out, err = capsys.readouterr() assert err.strip() == "pkgdev mask: error: invalid atom: 'pkg'" # nonexistent pkg with pytest.raises(SystemExit), \ chdir(repo.location): tool.parse_args(['mask', 'cat/nonexistent']) out, err = capsys.readouterr() assert err.strip() == "pkgdev mask: error: no repo matches: 'cat/nonexistent'" # masked pkg repo.create_ebuild('cat/pkg-0') git_repo.add_all('cat/pkg-0') with chdir(repo.location): options, _ = tool.parse_args(['mask', 'cat/pkg']) assert options.atoms == [atom_cls('cat/pkg')]
def feed(self, pkgset): pkg = pkgset[0] pkg_path = pjoin(self.options.target_repo.location, pkg.category, pkg.package) ebuild_ext = '.ebuild' mismatched = [] invalid = [] unknown = [] # note we don't use os.walk, we need size info also for filename in listdir(pkg_path): path = pjoin(pkg_path, filename) if self.gitignored(path): continue if os.path.isfile(path) and os.stat(path).st_mode & 0o111: yield ExecutableFile(filename, pkg=pkg) # While this may seem odd, written this way such that the filtering # happens all in the genexp. If the result was being handed to any, # it's a frame switch each char, which adds up. banned_chars = set(filename) - allowed_filename_chars_set if banned_chars: yield BannedCharacter(filename, sorted(banned_chars), pkg=pkg) if filename.endswith(ebuild_ext): try: with open(path, mode='rb') as f: f.read(8192).decode() except UnicodeDecodeError as e: yield InvalidUTF8(filename, str(e), pkg=pkg) pkg_name = os.path.basename(filename[:-len(ebuild_ext)]) try: pkg_atom = atom_cls(f'={pkg.category}/{pkg_name}') if pkg_atom.package != os.path.basename(pkg_path): mismatched.append(pkg_name) except MalformedAtom: invalid.append(pkg_name) elif (self.options.gentoo_repo and filename not in ('Manifest', 'metadata.xml', 'files')): unknown.append(filename) if mismatched: yield MismatchedPN(sorted(mismatched), pkg=pkg) if invalid: yield InvalidPN(sorted(invalid), pkg=pkg) if unknown: yield UnknownPkgDirEntry(sorted(unknown), pkg=pkg) files_by_size = defaultdict(list) pkg_path_len = len(pkg_path) + 1 for root, dirs, files in os.walk(pjoin(pkg_path, 'files')): # don't visit any ignored directories for d in self.ignore_dirs.intersection(dirs): dirs.remove(d) base_dir = root[pkg_path_len:] for filename in files: path = pjoin(root, filename) if self.gitignored(path): continue file_stat = os.lstat(path) if stat.S_ISREG(file_stat.st_mode): if file_stat.st_mode & 0o111: yield ExecutableFile(pjoin(base_dir, filename), pkg=pkg) if file_stat.st_size == 0: yield EmptyFile(pjoin(base_dir, filename), pkg=pkg) else: files_by_size[file_stat.st_size].append(pjoin(base_dir, filename)) if file_stat.st_size > 20480: yield SizeViolation( pjoin(base_dir, filename), file_stat.st_size, pkg=pkg) banned_chars = set(filename) - allowed_filename_chars_set if banned_chars: yield BannedCharacter( pjoin(base_dir, filename), sorted(banned_chars), pkg=pkg) files_by_digest = defaultdict(list) for size, files in files_by_size.items(): if len(files) > 1: for f in files: digest = get_chksums(pjoin(pkg_path, f), self.digest_algo)[0] files_by_digest[digest].append(f) for digest, files in files_by_digest.items(): if len(files) > 1: yield DuplicateFiles(sorted(files), pkg=pkg)
def feed(self, pkg): for attr in sorted(x.lower() for x in pkg.eapi.dep_keys): try: deps = getattr(pkg, attr) except MetadataException as e: cls = globals()[f'Invalid{attr.capitalize()}'] yield cls(attr, e.msg(), pkg=pkg) continue nodes, unstated = self.iuse_filter( (atom_cls, boolean.OrRestriction), pkg, deps, attr=attr) yield from unstated outdated_blockers = set() nonexistent_blockers = set() deprecated = set() for node in nodes: if isinstance(node, boolean.OrRestriction): in_or_restriction = True else: in_or_restriction = False for atom in iflatten_instance(node, (atom_cls,)): if self.deprecated(atom) and not self.masked(atom): deprecated.add(atom) if in_or_restriction and atom.slot_operator == '=': yield BadDependency( attr, atom, '= slot operator used inside || block', pkg=pkg) if pkg.eapi.options.has_use_dep_defaults and atom.use is not None: missing_use_deps = self._check_use_deps(attr, atom) for use, atoms in missing_use_deps.items(): pkgs = (x.cpvstr for x in sorted(atoms)) yield MissingUseDepDefault(attr, str(atom), use, pkgs, pkg=pkg) if atom.op == '=' and not atom.revision: yield MissingPackageRevision(attr, str(atom), pkg=pkg) if atom.blocks: if atom.match(pkg): yield BadDependency(attr, atom, "package blocks itself", pkg=pkg) elif atom.slot_operator == '=': yield BadDependency( attr, atom, '= slot operator used in blocker', pkg=pkg) elif self.existence_repo is not None: # check for outdated blockers (2+ years old) if atom.op == '=*': s = f"={atom.cpvstr}*" else: s = atom.op + atom.cpvstr unblocked = atom_cls(s) if not self.options.search_repo.match(unblocked): matches = self.existence_repo.match(unblocked) if matches: removal = max(x.date for x in matches) removal = datetime.strptime(removal, '%Y-%m-%d') years = round((self.today - removal).days / 365, 2) if years > 2: outdated_blockers.add((atom, years)) else: nonexistent_blockers.add(atom) for atom, years in sorted(outdated_blockers): yield OutdatedBlocker(attr.upper(), str(atom), years, pkg=pkg) for atom in sorted(nonexistent_blockers): yield NonexistentBlocker(attr.upper(), str(atom), pkg=pkg) for atom in sorted(deprecated): yield DeprecatedPkg(attr.upper(), str(atom), pkg=pkg)
path_components = path.split(os.sep) if path_components[0] in options.repo.categories and len(path_components) > 2: if mo := _ebuild_re.match(path): # ebuild changes try: atom = atom_cls(f"={mo.group('category')}/{mo.group('package')}") old = None if status == 'R' and (om := _ebuild_re.match(old_path)): old = atom_cls(f"={om.group('category')}/{om.group('package')}") changes[PkgChange].add(PkgChange( status, path, atom=atom, ebuild=True, old=old)) except MalformedAtom: continue else: # non-ebuild package level changes atom = atom_cls(os.sep.join(path_components[:2])) changes[PkgChange].add(PkgChange(status, path, atom=atom, ebuild=False)) elif mo := _eclass_re.match(path): changes[EclassChange].add(EclassChange(status, path, name=mo.group('name'))) else: changes[path_components[0]].add(Change(status, path)) return GitChanges(options, changes) def determine_msg_args(options, changes): """Determine message-related arguments used with `git commit`.""" args = [] if options.file: args.extend(['-F', options.file]) elif options.template: