def commit(self, summary: str, update_changelog: Optional[bool] = None) -> bool: if update_changelog is None: update_changelog = self.update_changelog with self.tree.lock_write(): try: if update_changelog: cl_path = self.abspath("debian/changelog") with ChangelogEditor(cl_path) as editor: editor.add_entry([summary]) debcommit(self.tree, committer=self.committer, subpath=self.subpath, reporter=self.commit_reporter) else: self.tree.commit( message=summary, committer=self.committer, specific_files=[self.subpath], reporter=self.commit_reporter, ) except PointlessCommit: return False else: return True
def changelog_add_new_version(tree, subpath, upstream_version, distribution_name, changelog, package): """Add an entry to the changelog for a new version. :param tree: WorkingTree in which the package lives :param upstream_version: Upstream version to add :param distribution_name: Distribution name (debian, ubuntu, ...) :param changelog: Changelog object :param package: Package name """ if changelog is None: epoch = None else: epoch = changelog.epoch if not tree.has_filename(osutils.pathjoin(subpath, 'debian')): tree.mkdir(osutils.pathjoin(subpath, 'debian')) cl_path = osutils.pathjoin(subpath, "debian/changelog") create = (not tree.has_filename(cl_path)) if create: cl = ChangelogEditor.create(tree.abspath(cl_path)) else: cl = ChangelogEditor(tree.abspath(cl_path)) with cl: cl.auto_version(version=new_package_version(upstream_version, distribution_name, epoch), package=package) cl.add_entry([upstream_merge_changelog_line(upstream_version)]) if not tree.is_versioned(cl_path): tree.add([cl_path])
def add_dummy_changelog_entry( tree: MutableTree, subpath: str, suffix: str, suite: str, message: str, timestamp=None, maintainer=None, allow_reformatting: bool = True, ): """Add a dummy changelog entry to a package. Args: directory: Directory to run in suffix: Suffix for the version suite: Debian suite message: Changelog message """ def add_suffix(v, suffix): m = re.fullmatch( "(.*)(" + re.escape(suffix) + ")([0-9]+)", v, ) if m: return m.group(1) + m.group(2) + "%d" % (int(m.group(3)) + 1) else: return v + suffix + "1" if control_files_in_root(tree, subpath): path = os.path.join(subpath, "changelog") else: path = os.path.join(subpath, "debian", "changelog") if maintainer is None: maintainer = get_maintainer() if timestamp is None: timestamp = datetime.now() with ChangelogEditor( tree.abspath(os.path.join(path)), # type: ignore allow_reformatting=allow_reformatting) as editor: version = editor[0].version if version.debian_revision: version.debian_revision = add_suffix(version.debian_revision, suffix) else: version.upstream_version = add_suffix(version.upstream_version, suffix) editor.auto_version(version, timestamp=timestamp) editor.add_entry(summary=[message], maintainer=maintainer, timestamp=timestamp, urgency='low') editor[0].distributions = suite
def release(local_tree, subpath): """Release a tree.""" (changelog, top_level) = find_changelog(local_tree, subpath, merge=False, max_blocks=2) # TODO(jelmer): If this changelog is automatically updated, # insert missing entries now. if distribution_is_unreleased(changelog.distributions): if top_level: changelog_path = 'changelog' else: changelog_path = 'debian/changelog' changelog_abspath = local_tree.abspath( os.path.join(subpath, changelog_path)) with ChangelogEditor(changelog_abspath) as e: mark_for_release(e.changelog) return debcommit_release(local_tree, subpath=subpath) return None
def backport_package(local_tree, subpath, target_release, author=None): changes = [] # TODO(jelmer): Check that package has a high enough popcon count, # and warn otherwise? # TODO(jelmer): Iterate Build-Depends and verify that depends are # satisfied by target_distribution # TODO(jelmer): Update Vcs-Git/Vcs-Browser header? target_distribution = backport_distribution(target_release) version_suffix = backport_suffix(target_release) logging.info( "Using target distribution %s, version suffix %s", target_distribution, version_suffix, ) clp = local_tree.abspath(os.path.join(subpath, "debian/changelog")) if author is None: author = "%s <%s>" % get_maintainer() with ChangelogEditor(clp) as cl: # TODO(jelmer): If there was an existing backport, use that version since_version = cl[0].version cl.new_block( package=cl[0].package, distributions=target_distribution, urgency="low", author=author, date=format_date(), version=create_bpo_version(since_version, version_suffix), ) block = cl[0] changeblock_add_line( block, ["Backport to %s." % target_release] + [" +" + line for line in changes], ) return since_version
def wrap_block(changelog, i): new_changes = wrap_block_lines(changelog[i].changes()) if new_changes != changelog[i].changes(): if i == 0: for lineno, change in enumerate(changelog[i].changes(), 2): if len(change) <= WIDTH: continue # Lintian only warns about the first block. fixed_lintian_tag( 'source', 'debian-changelog-line-too-long', info='line %d' % lineno) changelog[i]._changes = new_changes updated.append(changelog[i].version) return True return False with ChangelogEditor() as updater: if 'CHANGELOG_THOROUGH' not in os.environ: wrap_block(updater.changelog, 0) else: for i in range(len(updater.changelog)): wrap_block(updater.changelog, i) report_result( 'Wrap long lines in changelog entries: %s.' % ( ', '.join([str(v) for v in updated])))
def main(argv): # noqa: C901 import argparse parser = argparse.ArgumentParser(prog="upload-pending-commits") parser.add_argument("packages", nargs="*") parser.add_argument("--dry-run", action="store_true", help="Dry run changes.") parser.add_argument( "--acceptable-keys", help="List of acceptable GPG keys", action="append", default=[], type=str, ) parser.add_argument( "--gpg-verification", help="Verify GPG signatures on commits", action="store_true", ) parser.add_argument( "--min-commit-age", help="Minimum age of the last commit, in days", type=int, default=0, ) parser.add_argument("--diff", action="store_true", help="Show diff.") parser.add_argument( "--builder", type=str, help="Build command", default=(DEFAULT_BUILDER + " --source --source-only-changes " "--debbuildopt=-v${LAST_VERSION}"), ) parser.add_argument( "--maintainer", type=str, action="append", help="Select all packages maintainer by specified maintainer.", ) parser.add_argument( "--vcswatch", action="store_true", default=False, help="Use vcswatch to determine what packages need uploading.", ) parser.add_argument("--exclude", type=str, action="append", default=[], help="Ignore source package") parser.add_argument( "--autopkgtest-only", action="store_true", help="Only process packages with autopkgtests.", ) parser.add_argument( "--allowed-committer", type=str, action="append", help="Require that all new commits are from specified committers", ) args = parser.parse_args(argv) ret = 0 if not args.packages and not args.maintainer: (name, email) = get_maintainer() if email: logging.info("Processing packages maintained by %s", email) args.maintainer = [email] else: parser.print_usage() sys.exit(1) if args.vcswatch: packages = select_vcswatch_packages(args.packages, args.maintainer, args.autopkgtest_only) else: logging.info("Use --vcswatch to only process packages for which " "vcswatch found pending commits.") if args.maintainer: packages = select_apt_packages(args.packages, args.maintainer) else: packages = args.packages if not packages: logging.info("No packages found.") parser.print_usage() sys.exit(1) # TODO(jelmer): Sort packages by last commit date; least recently changed # commits are more likely to be successful. if len(packages) > 1: logging.info("Uploading packages: %s", ", ".join(packages)) for package in packages: logging.info("Processing %s", package) # Can't use open_packaging_branch here, since we want to use pkg_source # later on. if "/" not in package: try: pkg_source = apt_get_source_package(package) except NoSuchPackage: logging.info("%s: package not found in apt", package) ret = 1 continue try: vcs_type, vcs_url = source_package_vcs(pkg_source) except KeyError: logging.info("%s: no declared vcs location, skipping", pkg_source["Package"]) ret = 1 continue source_name = pkg_source["Package"] if source_name in args.exclude: continue source_version = pkg_source["Version"] has_testsuite = "Testsuite" in pkg_source else: vcs_url = package vcs_type = None source_name = None source_version = None has_testsuite = None (location, branch_name, subpath) = split_vcs_url(vcs_url) if subpath is None: subpath = "" probers = select_probers(vcs_type) try: main_branch = open_branch(location, probers=probers, name=branch_name) except (BranchUnavailable, BranchMissing, BranchUnsupported) as e: logging.exception("%s: %s", vcs_url, e) ret = 1 continue with Workspace(main_branch) as ws: if source_name is None: with ControlEditor( ws.local_tree.abspath( os.path.join(subpath, "debian/control"))) as ce: source_name = ce.source["Source"] with ChangelogEditor( ws.local_tree.abspath( os.path.join(subpath, "debian/changelog"))) as cle: source_version = cle[0].version has_testsuite = "Testsuite" in ce.source if source_name in args.exclude: continue if (args.autopkgtest_only and not has_testsuite and not ws.local_tree.has_filename( os.path.join(subpath, "debian/tests/control"))): logging.info("%s: Skipping, package has no autopkgtest.", source_name) continue branch_config = ws.local_tree.branch.get_config_stack() if args.gpg_verification: gpg_strategy = gpg.GPGStrategy(branch_config) if args.acceptable_keys: acceptable_keys = args.acceptable_keys else: acceptable_keys = list( get_maintainer_keys(gpg_strategy.context)) gpg_strategy.set_acceptable_keys(",".join(acceptable_keys)) else: gpg_strategy = None try: target_changes, tag_name = prepare_upload_package( ws.local_tree, subpath, source_name, source_version, builder=args.builder, gpg_strategy=gpg_strategy, min_commit_age=args.min_commit_age, allowed_committers=args.allowed_committer, ) except CommitterNotAllowed as e: logging.warn( "%s: committer %s not in allowed list: %r", source_name, e.committer, e.allowed_committers, ) continue except BuildFailedError as e: logging.warn("%s: package failed to build: %s", source_name, e) ret = 1 continue except LastReleaseRevisionNotFound as e: logging.warn( "%s: Unable to find revision matching last release " "%s, skipping.", source_name, e.version, ) ret = 1 continue except LastUploadMoreRecent as e: logging.warn( "%s: Last upload (%s) was more recent than VCS (%s)", source_name, e.archive_version, e.vcs_version, ) ret = 1 continue except MissingChangelogError: logging.info("%s: No changelog found, skipping.", source_name) ret = 1 continue except RecentCommits as e: logging.info("%s: Recent commits (%d days), skipping.", source_name, e.commit_age) continue except NoUnuploadedChanges: logging.info("%s: No unuploaded changes, skipping.", source_name) continue except NoUnreleasedChanges: logging.info("%s: No unreleased changes, skipping.", source_name) continue tags = [] if tag_name is not None: logging.info("Pushing tag %s", tag_name) tags.append(tag_name) try: ws.push(dry_run=args.dry_run, tags=tags) except PermissionDenied: logging.info( "%s: Permission denied pushing to branch, skipping.", source_name) ret = 1 continue if not args.dry_run: dput_changes(target_changes) if args.diff: sys.stdout.flush() ws.show_diff(sys.stdout.buffer) sys.stdout.buffer.flush() return ret
def prepare_upload_package( local_tree, subpath, pkg, last_uploaded_version, builder, gpg_strategy=None, min_commit_age=None, allowed_committers=None, ): if local_tree.has_filename(os.path.join(subpath, "debian/gbp.conf")): subprocess.check_call(["gbp", "dch", "--ignore-branch"], cwd=local_tree.abspath(".")) cl, top_level = find_changelog(local_tree, merge=False, max_blocks=None) if cl.version == last_uploaded_version: raise NoUnuploadedChanges(cl.version) previous_version_in_branch = changelog_find_previous_upload(cl) if last_uploaded_version > previous_version_in_branch: raise LastUploadMoreRecent(last_uploaded_version, previous_version_in_branch) logging.info("Checking revisions since %s" % last_uploaded_version) with local_tree.lock_read(): try: last_release_revid = find_last_release_revid( local_tree.branch, last_uploaded_version) except NoSuchTag: raise LastReleaseRevisionNotFound(pkg, last_uploaded_version) graph = local_tree.branch.repository.get_graph() revids = list( graph.iter_lefthand_ancestry(local_tree.branch.last_revision(), [last_release_revid])) if not revids: logging.info("No pending changes") return if gpg_strategy: logging.info("Verifying GPG signatures...") count, result, all_verifiables = gpg.bulk_verify_signatures( local_tree.branch.repository, revids, gpg_strategy) for revid, code, key in result: if code != gpg.SIGNATURE_VALID: raise Exception("No valid GPG signature on %r: %d" % (revid, code)) for revid, rev in local_tree.branch.repository.iter_revisions(revids): if rev is not None: check_revision(rev, min_commit_age, allowed_committers) if cl.distributions != "UNRELEASED": raise NoUnreleasedChanges(cl.version) qa_upload = False team_upload = False control_path = local_tree.abspath(os.path.join(subpath, "debian/control")) with ControlEditor(control_path) as e: maintainer = parseaddr(e.source["Maintainer"]) if maintainer[1] == "*****@*****.**": qa_upload = True # TODO(jelmer): Check whether this is a team upload # TODO(jelmer): determine whether this is a NMU upload if qa_upload or team_upload: changelog_path = local_tree.abspath( os.path.join(subpath, "debian/changelog")) with ChangelogEditor(changelog_path) as e: if qa_upload: changeblock_ensure_first_line(e[0], "QA upload.") elif team_upload: changeblock_ensure_first_line(e[0], "Team upload.") local_tree.commit( specific_files=[os.path.join(subpath, "debian/changelog")], message="Mention QA Upload.", allow_pointless=False, reporter=NullCommitReporter(), ) tag_name = release(local_tree, subpath) target_dir = tempfile.mkdtemp() builder = builder.replace("${LAST_VERSION}", last_uploaded_version) target_changes = _build_helper(local_tree, subpath, local_tree.branch, target_dir, builder=builder) debsign(target_changes) return target_changes, tag_name
#!/usr/bin/python3 import asyncio from debmutate.changelog import ChangelogEditor import sys from lintian_brush.debbugs import find_archived_wnpp_bugs, find_wnpp_bugs from lintian_brush.fixer import net_access_allowed, report_result if not net_access_allowed(): sys.exit(0) with ChangelogEditor() as editor: if editor.changelog[-1].bugs_closed: sys.exit(0) loop = asyncio.get_event_loop() wnpp_bugs = loop.run_until_complete( find_wnpp_bugs(editor.changelog[-1].package)) if wnpp_bugs: certainty = 'certain' else: wnpp_bugs = loop.run_until_complete( find_archived_wnpp_bugs(editor.changelog[-1].package)) certainty = 'confident' if not wnpp_bugs: sys.exit(0) for i, line in enumerate(editor.changelog[-1]._changes): if not line: continue
r for r in DebianDistroInfo().get_all('object') if r.codename.lower() == upgrade_release() ] except ValueError: date_threshold = None else: date_threshold = release.release if date_threshold is None: # Release has not yet or will never be released # Default to 5 years date_threshold = (datetime.now() - timedelta(days=DEFAULT_AGE_THRESHOLD_DAYS)).date() cl_dates = [] with ChangelogEditor() as cl: for block in cl: try: dt = email.utils.parsedate_to_datetime(block.date) except (TypeError, ValueError): warn('Invalid date %r for %s' % (block.date, block.version)) # parsedate_to_datetime is buggy and raises a TypeError # when the date is invalid. # We can't reliably check anymore :( sys.exit(2) cl_dates.append((block.version, dt)) def is_well_past(version): for (cl_version, cl_dt) in cl_dates: if cl_version <= version and cl_dt.date() > date_threshold: