def make_changes( self, local_tree, subpath, update_changelog, reporter, committer, base_proposal=None, ): base_revid = local_tree.last_revision() with ControlEditor.from_tree(local_tree, subpath) as updater: updater.source["Rules-Requires-Root"] = "no" result = RulesRequiresRootResult(updater.source["Source"]) if update_changelog in (True, None): add_changelog_entry( local_tree, osutils.pathjoin(subpath, "debian/changelog"), ["Set Rules-Requires-Root: no."], ) revid = local_tree.commit("Set Rules-Requires-Root.", committer=committer, allow_pointless=False) branches = [("main", None, base_revid, revid)] tags = [] return ChangerResult( description="Set Rules-Requires-Root", mutator=result, branches=branches, tags=tags, sufficient_for_proposal=True, proposed_commit_message="Set Rules-Requires-Root.", )
def add_build_dependency(context, requirement: AptRequirement): if not isinstance(requirement, AptRequirement): raise TypeError(requirement) control_path = context.abspath("debian/control") try: with ControlEditor(path=control_path) as updater: for binary in updater.binaries: if requirement.touches_package(binary["Package"]): raise CircularDependency(binary["Package"]) for rel in requirement.relations: updater.source["Build-Depends"] = ensure_relation( updater.source.get("Build-Depends", ""), PkgRelation.str([rel])) except FormattingUnpreservable as e: logging.info("Unable to edit %s in a way that preserves formatting.", e.path) return False desc = requirement.pkg_relation_str() if not updater.changed: logging.info("Giving up; dependency %s was already present.", desc) return False logging.info("Adding build dependency: %s", desc) return context.commit("Add missing build dependency on %s." % desc)
def _scrub_obsolete(wt: WorkingTree, debian_path: str, compat_release: str, upgrade_release: str, allow_reformatting: bool = True) -> ScrubObsoleteResult: specific_files = [] control_path = os.path.join(debian_path, "control") try: with ControlEditor(wt.abspath(control_path), allow_reformatting=allow_reformatting) as editor: specific_files.append(control_path) package = editor.source["Source"] control_removed = drop_old_relations(editor, compat_release, upgrade_release) except FileNotFoundError: if wt.has_filename(os.path.join(debian_path, "debcargo.toml")): control_removed = [] else: raise NotDebianPackage(wt, debian_path) maintscript_removed = [] for path, removed in update_maintscripts( wt, debian_path, PackageChecker(upgrade_release, build=False), package, allow_reformatting): if removed: maintscript_removed.append((path, removed, upgrade_release)) specific_files.append(path) return ScrubObsoleteResult( specific_files=specific_files, control_removed=control_removed, maintscript_removed=maintscript_removed, )
def apply_transition(wt, debian_path, ben, update_changelog, allow_reformatting): control_path = os.path.join(debian_path, "control") try: with ControlEditor(wt.abspath(control_path), allow_reformatting=allow_reformatting) as editor: return _apply_transition(editor, ben) except FileNotFoundError: raise NotDebianPackage(wt, debian_path)
def tree_drop_mia_uploaders(local_tree, subpath, update_changelog=None, committer=None): control_path = local_tree.abspath( osutils.pathjoin(subpath, "debian/control")) changelog_entries = [] with ControlEditor(path=control_path) as editor: source = editor.source["Source"] bugs = all_mia_teammaint_bugs().intersection(get_package_bugs(source)) if not bugs: raise ChangerError("nothing-to-do", "No MIA people") removed_uploaders = [] fixed_bugs = [] for bug in bugs: mia_people = get_mia_maintainers(bug) if mia_people is None: logging.warning('No MIA people (X-Debbugs-CC) found in bug %d', bug) continue removed_mia = drop_uploaders(editor, mia_people) if len(removed_mia) == 0: continue if len(removed_mia) == 1: description = "Remove MIA uploader %s." % removed_mia[0] else: description = "Remove MIA uploaders %s." % ( ", ".join(removed_mia)) if removed_mia == mia_people: description += " Closes: #%d" % bug changelog_entries.append(description) removed_uploaders.extend(removed_mia) if not changelog_entries: return MIAResult(source, removed_uploaders=[], bugs=fixed_bugs) if update_changelog in (True, None): add_changelog_entry( local_tree, osutils.pathjoin(subpath, "debian/changelog"), changelog_entries, ) local_tree.commit("Remove MIA uploaders.", committer=committer, allow_pointless=False) return MIAResult(source, removed_uploaders=removed_uploaders, bugs=fixed_bugs)
def apply_multiarch_hints(hints, minimum_certainty="certain"): changes = [] appliers = {applier.kind: applier for applier in APPLIERS} with ControlEditor() as editor: for binary in editor.binaries: for hint in hints.get(binary["Package"], []): kind = hint["link"].rsplit("#", 1)[1] applier = appliers[kind] if not certainty_sufficient(applier.certainty, minimum_certainty): continue description = applier.fn(binary, hint) if description: changes.append((binary, hint, description, applier.certainty)) return changes
def warn(msg): sys.stderr.write("%s\n" % msg) def diligence(): return int(os.environ.get("DILIGENCE", "0")) def source_package_name(): return os.environ.get("DEB_SOURCE") def is_debcargo_package(): return os.path.exists('debian/debcargo.toml') if is_debcargo_package(): from debmutate.debcargo import DebcargoControlShimEditor, DebcargoEditor try: control = DebcargoControlShimEditor.from_debian_dir('debian') except AttributeError: control = DebcargoControlShimEditor(DebcargoEditor()) else: control = ControlEditor() def vendor() -> str: return get_vendor_name()
def make_changes( # noqa: C901 self, local_tree, subpath, update_changelog, reporter, committer, base_proposal=None, ): base_revid = local_tree.last_revision() control_path = local_tree.abspath( osutils.pathjoin(subpath, "debian/control")) changelog_entries = [] try: with ControlEditor(path=control_path) as editor: if self.check_wnpp: try: wnpp_bug = find_wnpp_bug(editor.source["Source"]) except KeyError: raise ChangerError( "nothing-to-do", "Package is purported to be orphaned, " "but no open wnpp bug exists.", ) else: wnpp_bug = None editor.source[ "Maintainer"] = "Debian QA Group <*****@*****.**>" try: del editor.source["Uploaders"] except KeyError: pass if editor.changed: if wnpp_bug is not None: changelog_entries.append("Orphan package - see bug %d." % wnpp_bug) else: changelog_entries.append("Orphan package.") result = OrphanResult(wnpp_bug=wnpp_bug) if self.update_vcs: with ControlEditor(path=control_path) as editor: result.package_name = editor.source["Source"] result.old_vcs_url = editor.source.get("Vcs-Git") editor.source[ "Vcs-Git"] = "https://salsa.debian.org/%s/%s.git" % ( self.salsa_user, result.package_name, ) result.new_vcs_url = editor.source["Vcs-Git"] editor.source[ "Vcs-Browser"] = "https://salsa.debian.org/%s/%s" % ( self.salsa_user, result.package_name, ) result.salsa_user = self.salsa_user if result.old_vcs_url == result.new_vcs_url: result.old_vcs_url = result.new_vcs_url = None if editor.changed: changelog_entries.append( "Update VCS URLs to point to Debian group.") if not changelog_entries: raise ChangerError("nothing-to-do", "Already orphaned") if update_changelog in (True, None): add_changelog_entry( local_tree, osutils.pathjoin(subpath, "debian/changelog"), ["QA Upload."] + changelog_entries, ) local_tree.commit("Move package to QA team.", committer=committer, allow_pointless=False) except FormattingUnpreservable as e: raise ChangerError( "formatting-unpreservable", "unable to preserve formatting while editing %s" % e.path, ) except (ChangeConflict, GeneratedFile) as e: raise ChangerError("generated-file", "unable to edit generated file: %r" % e) result.pushed = False if self.update_vcs and self.salsa_push and result.new_vcs_url: parent_branch_url = local_tree.branch.get_parent() if parent_branch_url is not None: parent_branch = Branch.open(parent_branch_url) else: parent_branch = local_tree.branch push_result = push_to_salsa( local_tree, parent_branch, self.salsa_user, result.package_name, dry_run=self.dry_run, ) if push_result: result.pushed = True reporter.report_metadata("old_vcs_url", result.old_vcs_url) reporter.report_metadata("new_vcs_url", result.new_vcs_url) reporter.report_metadata("pushed", result.pushed) reporter.report_metadata("wnpp_bug", result.wnpp_bug) branches = [("main", None, base_revid, local_tree.last_revision())] tags = [] return ChangerResult( description="Move package to QA team.", mutator=result, branches=branches, tags=tags, sufficient_for_proposal=True, proposed_commit_message=( "Set the package maintainer to the QA team."), )
def main(argv): # noqa: C901 import argparse parser = argparse.ArgumentParser(prog="upload-pending-commits") parser.add_argument("packages", nargs="*") parser.add_argument("--dry-run", action="store_true", help="Dry run changes.") parser.add_argument( "--acceptable-keys", help="List of acceptable GPG keys", action="append", default=[], type=str, ) parser.add_argument( "--gpg-verification", help="Verify GPG signatures on commits", action="store_true", ) parser.add_argument( "--min-commit-age", help="Minimum age of the last commit, in days", type=int, default=0, ) parser.add_argument("--diff", action="store_true", help="Show diff.") parser.add_argument( "--builder", type=str, help="Build command", default=(DEFAULT_BUILDER + " --source --source-only-changes " "--debbuildopt=-v${LAST_VERSION}"), ) parser.add_argument( "--maintainer", type=str, action="append", help="Select all packages maintainer by specified maintainer.", ) parser.add_argument( "--vcswatch", action="store_true", default=False, help="Use vcswatch to determine what packages need uploading.", ) parser.add_argument("--exclude", type=str, action="append", default=[], help="Ignore source package") parser.add_argument( "--autopkgtest-only", action="store_true", help="Only process packages with autopkgtests.", ) parser.add_argument( "--allowed-committer", type=str, action="append", help="Require that all new commits are from specified committers", ) args = parser.parse_args(argv) ret = 0 if not args.packages and not args.maintainer: (name, email) = get_maintainer() if email: logging.info("Processing packages maintained by %s", email) args.maintainer = [email] else: parser.print_usage() sys.exit(1) if args.vcswatch: packages = select_vcswatch_packages(args.packages, args.maintainer, args.autopkgtest_only) else: logging.info("Use --vcswatch to only process packages for which " "vcswatch found pending commits.") if args.maintainer: packages = select_apt_packages(args.packages, args.maintainer) else: packages = args.packages if not packages: logging.info("No packages found.") parser.print_usage() sys.exit(1) # TODO(jelmer): Sort packages by last commit date; least recently changed # commits are more likely to be successful. if len(packages) > 1: logging.info("Uploading packages: %s", ", ".join(packages)) for package in packages: logging.info("Processing %s", package) # Can't use open_packaging_branch here, since we want to use pkg_source # later on. if "/" not in package: try: pkg_source = apt_get_source_package(package) except NoSuchPackage: logging.info("%s: package not found in apt", package) ret = 1 continue try: vcs_type, vcs_url = source_package_vcs(pkg_source) except KeyError: logging.info("%s: no declared vcs location, skipping", pkg_source["Package"]) ret = 1 continue source_name = pkg_source["Package"] if source_name in args.exclude: continue source_version = pkg_source["Version"] has_testsuite = "Testsuite" in pkg_source else: vcs_url = package vcs_type = None source_name = None source_version = None has_testsuite = None (location, branch_name, subpath) = split_vcs_url(vcs_url) if subpath is None: subpath = "" probers = select_probers(vcs_type) try: main_branch = open_branch(location, probers=probers, name=branch_name) except (BranchUnavailable, BranchMissing, BranchUnsupported) as e: logging.exception("%s: %s", vcs_url, e) ret = 1 continue with Workspace(main_branch) as ws: if source_name is None: with ControlEditor( ws.local_tree.abspath( os.path.join(subpath, "debian/control"))) as ce: source_name = ce.source["Source"] with ChangelogEditor( ws.local_tree.abspath( os.path.join(subpath, "debian/changelog"))) as cle: source_version = cle[0].version has_testsuite = "Testsuite" in ce.source if source_name in args.exclude: continue if (args.autopkgtest_only and not has_testsuite and not ws.local_tree.has_filename( os.path.join(subpath, "debian/tests/control"))): logging.info("%s: Skipping, package has no autopkgtest.", source_name) continue branch_config = ws.local_tree.branch.get_config_stack() if args.gpg_verification: gpg_strategy = gpg.GPGStrategy(branch_config) if args.acceptable_keys: acceptable_keys = args.acceptable_keys else: acceptable_keys = list( get_maintainer_keys(gpg_strategy.context)) gpg_strategy.set_acceptable_keys(",".join(acceptable_keys)) else: gpg_strategy = None try: target_changes, tag_name = prepare_upload_package( ws.local_tree, subpath, source_name, source_version, builder=args.builder, gpg_strategy=gpg_strategy, min_commit_age=args.min_commit_age, allowed_committers=args.allowed_committer, ) except CommitterNotAllowed as e: logging.warn( "%s: committer %s not in allowed list: %r", source_name, e.committer, e.allowed_committers, ) continue except BuildFailedError as e: logging.warn("%s: package failed to build: %s", source_name, e) ret = 1 continue except LastReleaseRevisionNotFound as e: logging.warn( "%s: Unable to find revision matching last release " "%s, skipping.", source_name, e.version, ) ret = 1 continue except LastUploadMoreRecent as e: logging.warn( "%s: Last upload (%s) was more recent than VCS (%s)", source_name, e.archive_version, e.vcs_version, ) ret = 1 continue except MissingChangelogError: logging.info("%s: No changelog found, skipping.", source_name) ret = 1 continue except RecentCommits as e: logging.info("%s: Recent commits (%d days), skipping.", source_name, e.commit_age) continue except NoUnuploadedChanges: logging.info("%s: No unuploaded changes, skipping.", source_name) continue except NoUnreleasedChanges: logging.info("%s: No unreleased changes, skipping.", source_name) continue tags = [] if tag_name is not None: logging.info("Pushing tag %s", tag_name) tags.append(tag_name) try: ws.push(dry_run=args.dry_run, tags=tags) except PermissionDenied: logging.info( "%s: Permission denied pushing to branch, skipping.", source_name) ret = 1 continue if not args.dry_run: dput_changes(target_changes) if args.diff: sys.stdout.flush() ws.show_diff(sys.stdout.buffer) sys.stdout.buffer.flush() return ret
def prepare_upload_package( local_tree, subpath, pkg, last_uploaded_version, builder, gpg_strategy=None, min_commit_age=None, allowed_committers=None, ): if local_tree.has_filename(os.path.join(subpath, "debian/gbp.conf")): subprocess.check_call(["gbp", "dch", "--ignore-branch"], cwd=local_tree.abspath(".")) cl, top_level = find_changelog(local_tree, merge=False, max_blocks=None) if cl.version == last_uploaded_version: raise NoUnuploadedChanges(cl.version) previous_version_in_branch = changelog_find_previous_upload(cl) if last_uploaded_version > previous_version_in_branch: raise LastUploadMoreRecent(last_uploaded_version, previous_version_in_branch) logging.info("Checking revisions since %s" % last_uploaded_version) with local_tree.lock_read(): try: last_release_revid = find_last_release_revid( local_tree.branch, last_uploaded_version) except NoSuchTag: raise LastReleaseRevisionNotFound(pkg, last_uploaded_version) graph = local_tree.branch.repository.get_graph() revids = list( graph.iter_lefthand_ancestry(local_tree.branch.last_revision(), [last_release_revid])) if not revids: logging.info("No pending changes") return if gpg_strategy: logging.info("Verifying GPG signatures...") count, result, all_verifiables = gpg.bulk_verify_signatures( local_tree.branch.repository, revids, gpg_strategy) for revid, code, key in result: if code != gpg.SIGNATURE_VALID: raise Exception("No valid GPG signature on %r: %d" % (revid, code)) for revid, rev in local_tree.branch.repository.iter_revisions(revids): if rev is not None: check_revision(rev, min_commit_age, allowed_committers) if cl.distributions != "UNRELEASED": raise NoUnreleasedChanges(cl.version) qa_upload = False team_upload = False control_path = local_tree.abspath(os.path.join(subpath, "debian/control")) with ControlEditor(control_path) as e: maintainer = parseaddr(e.source["Maintainer"]) if maintainer[1] == "*****@*****.**": qa_upload = True # TODO(jelmer): Check whether this is a team upload # TODO(jelmer): determine whether this is a NMU upload if qa_upload or team_upload: changelog_path = local_tree.abspath( os.path.join(subpath, "debian/changelog")) with ChangelogEditor(changelog_path) as e: if qa_upload: changeblock_ensure_first_line(e[0], "QA upload.") elif team_upload: changeblock_ensure_first_line(e[0], "Team upload.") local_tree.commit( specific_files=[os.path.join(subpath, "debian/changelog")], message="Mention QA Upload.", allow_pointless=False, reporter=NullCommitReporter(), ) tag_name = release(local_tree, subpath) target_dir = tempfile.mkdtemp() builder = builder.replace("${LAST_VERSION}", last_uploaded_version) target_changes = _build_helper(local_tree, subpath, local_tree.branch, target_dir, builder=builder) debsign(target_changes) return target_changes, tag_name
def update_offical_vcs(wt, subpath, repo_url=None, committer=None, force=False, create=False): # TODO(jelmer): Allow creation of the repository as well check_clean_tree(wt, wt.basis_tree(), subpath) debcargo_path = os.path.join(subpath, 'debian/debcargo.toml') control_path = os.path.join(subpath, 'debian/control') if wt.has_filename(debcargo_path): from debmutate.debcargo import DebcargoControlShimEditor editor = DebcargoControlShimEditor.from_debian_dir( wt.abspath(os.path.join(subpath, 'debian'))) elif wt.has_filename(control_path): control_path = wt.abspath(control_path) editor = ControlEditor(control_path) else: raise FileNotFoundError(control_path) with editor: try: vcs_type, url = source_package_vcs(editor.source) except KeyError: pass else: if not force: raise VcsAlreadySpecified(vcs_type, url) maintainer_email = parseaddr(editor.source['Maintainer'])[1] source = editor.source['Source'] if repo_url is None: repo_url = guess_repository_url(source, maintainer_email) if repo_url is None: raise NoVcsLocation() logging.info('Using repository URL: %s', repo_url) # TODO(jelmer): Detect vcs type in a better way if hasattr(wt.branch.repository, '_git'): vcs_type = 'Git' else: vcs_type = 'Bzr' update_control_for_vcs_url(editor.source, vcs_type, repo_url) if committer is None: committer = get_committer(wt) try: wt.commit( message='Set Vcs headers.', allow_pointless=False, reporter=NullCommitReporter(), committer=committer, ) except PointlessCommit: if not force: # This can't happen raise if create: from breezy.forge import create_project try: forge, project = create_project(repo_url) except AlreadyControlDirError: logging.info('%s already exists', repo_url) except UnsupportedForge: logging.error('Unable to find a way to create %s', repo_url) else: logging.info('Created %s', repo_url) return repo_url