def update_ctx(dpath, ctx): ctx.setdefault('exports', {}) ctx.setdefault('build_depends', set()) maintainer, email = get_maintainer() ctx['creator'] = '{} <{}>'.format(maintainer, email) if 'maintainer' not in ctx: ctx['maintainer'] = '{} <{}>'.format(maintainer, email) if 'debian_revision' not in ctx: ctx['debian_revision'] = '0~pypi2deb' ctx['binary_arch'] = 'all' ctx.setdefault('clean_files', set()) for root, dirs, file_names in walk(dpath): if any(fname.endswith(('.c', '.cpp', '.pyx')) for fname in file_names): ctx['binary_arch'] = 'any' for fname in file_names: if fname.endswith('.pyx'): if 'python' in ctx['interpreters']: ctx['build_depends'].add('cython') if 'python3' in ctx['interpreters']: ctx['build_depends'].add('cython3') for ext in ('c', 'cpp'): fname_c = fname[:-3] + ext if fname_c in file_names: ctx['clean_files'].add(join(root.replace(dpath, '.'), fname_c))
def update_ctx(dpath, ctx): ctx.setdefault('exports', {}) ctx.setdefault('build_depends', set()) maintainer, email = get_maintainer() ctx['creator'] = '{} <{}>'.format(maintainer, email) if 'maintainer' not in ctx: ctx['maintainer'] = '{} <{}>'.format(maintainer, email) if 'debian_revision' not in ctx: ctx['debian_revision'] = '0~pypi2deb' ctx['binary_arch'] = 'all' ctx.setdefault('clean_files', set()) for root, dirs, file_names in walk(dpath): if any(fname.endswith(('.c', '.cpp', '.pyx')) for fname in file_names): ctx['binary_arch'] = 'any' for fname in file_names: if fname.endswith('.pyx'): if 'python' in ctx['interpreters']: ctx['build_depends'].add('cython') if 'python3' in ctx['interpreters']: ctx['build_depends'].add('cython3') for ext in ('c', 'cpp'): fname_c = fname[:-3] + ext if fname_c in file_names: ctx['clean_files'].add( join(root.replace(dpath, '.'), fname_c))
def backport_package(local_tree, subpath, target_release, author=None): changes = [] # TODO(jelmer): Check that package has a high enough popcon count, # and warn otherwise? # TODO(jelmer): Iterate Build-Depends and verify that depends are # satisfied by target_distribution # TODO(jelmer): Update Vcs-Git/Vcs-Browser header? target_distribution = backport_distribution(target_release) version_suffix = backport_suffix(target_release) logging.info( "Using target distribution %s, version suffix %s", target_distribution, version_suffix, ) clp = local_tree.abspath(os.path.join(subpath, "debian/changelog")) if author is None: author = "%s <%s>" % get_maintainer() with ChangelogEditor(clp) as cl: # TODO(jelmer): If there was an existing backport, use that version since_version = cl[0].version cl.new_block( package=cl[0].package, distributions=target_distribution, urgency="low", author=author, date=format_date(), version=create_bpo_version(since_version, version_suffix), ) block = cl[0] changeblock_add_line( block, ["Backport to %s." % target_release] + [" +" + line for line in changes], ) return since_version
#!/usr/bin/python3 from debian.changelog import get_maintainer from lintian_brush.fixer import ( control, report_result, meets_minimum_certainty, fixed_lintian_tag, ) import sys # TODO(jelmer): Bump this up if there's a way that we can verify that e.g. the # ITP was filed by get_maintainer() ? CERTAINTY = 'possible' if not meets_minimum_certainty(CERTAINTY): sys.exit(0) with control as updater: if updater.source.get('Maintainer'): sys.exit(0) maintainer = get_maintainer() updater.source['Maintainer'] = "%s <%s>" % maintainer fixed_lintian_tag(updater.source, 'required-field', 'Maintainer') report_result('Set the maintainer field to: %s <%s>.' % maintainer, certainty=CERTAINTY)
def main(argv=None): # noqa: C901 parser = argparse.ArgumentParser(prog="lintian-brush") fixer_group = parser.add_argument_group("fixer selection") fixer_group.add_argument("fixers", metavar="FIXER", nargs="*", help="specific fixer to run") fixer_group.add_argument( "--fixers-dir", type=str, help="path to fixer scripts. [%(default)s]", default=find_fixers_dir(), ) fixer_group.add_argument( "--exclude", metavar="EXCLUDE", type=str, action="append", help="Exclude a fixer.", ) fixer_group.add_argument( "--modern", help=("Use features/compatibility levels that are not available in " "stable. (makes backporting harder)"), action="store_true", default=False, ) fixer_group.add_argument("--compat-release", type=str, help=argparse.SUPPRESS) # Hide the minimum-certainty option for the moment. fixer_group.add_argument( "--minimum-certainty", type=str, choices=SUPPORTED_CERTAINTIES, default=None, help=argparse.SUPPRESS, ) fixer_group.add_argument("--opinionated", action="store_true", help=argparse.SUPPRESS) fixer_group.add_argument( "--diligent", action="count", default=0, dest="diligence", help=argparse.SUPPRESS, ) fixer_group.add_argument("--uncertain", action="store_true", help="Include changes with lower certainty.") fixer_group.add_argument("--yolo", action="store_true", help=argparse.SUPPRESS) fixer_group.add_argument("--force-subprocess", action="store_true", default=False, help=argparse.SUPPRESS) package_group = parser.add_argument_group("package preferences") package_group.add_argument( "--allow-reformatting", default=None, action="store_true", help="Allow file reformatting and stripping of comments.") package_group.add_argument( "--no-update-changelog", action="store_false", default=None, dest="update_changelog", help="do not update the changelog", ) package_group.add_argument( "--update-changelog", action="store_true", dest="update_changelog", help="force updating of the changelog", default=None, ) package_group.add_argument("--trust", action="store_true", help=argparse.SUPPRESS) output_group = parser.add_argument_group("output") output_group.add_argument("--verbose", help="be verbose", action="store_true", default=('SVP_API' in os.environ)) output_group.add_argument("--diff", help="Print resulting diff afterwards.", action="store_true") output_group.add_argument("--version", action="version", version="%(prog)s " + version_string) output_group.add_argument("--list-fixers", action="store_true", help="list available fixers") output_group.add_argument( "--list-tags", action="store_true", help="list lintian tags for which fixers are available", ) output_group.add_argument( "--dry-run", help=("Do not make any changes to the current repository. " "Note: currently creates a temporary clone of the repository."), action="store_true", ) output_group.add_argument( "--identity", help="Print user identity that would be used when committing", action="store_true", default=False, ) parser.add_argument( "-d", "--directory", metavar="DIRECTORY", help="directory to run in", type=str, default=".", ) parser.add_argument( "--disable-net-access", help="Do not probe external services.", action="store_true", default=False, ) parser.add_argument("--disable-inotify", action="store_true", default=False, help=argparse.SUPPRESS) args = parser.parse_args(argv) logging.basicConfig(level=logging.INFO, format='%(message)s') if args.list_fixers and args.list_tags: parser.print_usage() return 1 fixers = available_lintian_fixers(args.fixers_dir, force_subprocess=args.force_subprocess) if args.list_fixers: for script in sorted([fixer.name for fixer in fixers]): print(script) elif args.list_tags: tags = set() for fixer in fixers: tags.update(fixer.lintian_tags) for tag in sorted(tags): print(tag) else: try: if args.dry_run: branch, subpath = Branch.open_containing(args.directory) td = tempfile.mkdtemp() atexit.register(shutil.rmtree, td) # TODO(jelmer): Make a slimmer copy to_dir = branch.controldir.sprout( td, None, create_tree_if_local=True, source_branch=branch, stacked=branch._format.supports_stacking(), ) wt = to_dir.open_workingtree() else: wt, subpath = WorkingTree.open_containing(args.directory) except NotBranchError: logging.error( "No version control directory found (e.g. a .git directory).") return 1 except DependencyNotPresent as e: logging.error( "Unable to open tree at %s: missing package %s", args.directory, e.library, ) return 1 if args.identity: print("Committer identity: %s" % get_committer(wt)) print("Changelog identity: %s <%s>" % get_maintainer()) return 0 since_revid = wt.last_revision() if args.fixers: try: fixers = select_fixers(fixers, args.fixers, args.exclude) except KeyError as e: logging.error("Unknown fixer specified: %s", e.args[0]) return 1 debian_info = distro_info.DebianDistroInfo() if args.modern: if args.compat_release: logging.error( "--compat-release and --modern are incompatible.") return 1 compat_release = debian_info.devel() else: compat_release = args.compat_release minimum_certainty = args.minimum_certainty allow_reformatting = args.allow_reformatting update_changelog = args.update_changelog try: cfg = Config.from_workingtree(wt, subpath) except FileNotFoundError: pass else: if minimum_certainty is None: minimum_certainty = cfg.minimum_certainty() if compat_release is None: compat_release = cfg.compat_release() if allow_reformatting is None: allow_reformatting = cfg.allow_reformatting() if update_changelog is None: update_changelog = cfg.update_changelog() if minimum_certainty is None: if args.uncertain or args.yolo: minimum_certainty = "possible" else: minimum_certainty = DEFAULT_MINIMUM_CERTAINTY if compat_release is None: compat_release = debian_info.stable() if allow_reformatting is None: allow_reformatting = False with wt.lock_write(): if control_files_in_root(wt, subpath): report_fatal( "control-files-in-root", "control files live in root rather than debian/ " "(LarstIQ mode)", ) try: overall_result = run_lintian_fixers( wt, fixers, update_changelog=update_changelog, compat_release=compat_release, verbose=args.verbose, minimum_certainty=minimum_certainty, trust_package=args.trust, allow_reformatting=allow_reformatting, use_inotify=(False if args.disable_inotify else None), subpath=subpath, net_access=not args.disable_net_access, opinionated=args.opinionated, diligence=args.diligence, ) except NotDebianPackage: report_fatal("not-debian-package", "Not a Debian package") return 1 except WorkspaceDirty: logging.error("%s: Please commit pending changes first.", wt.basedir) if args.verbose: from breezy.status import show_tree_status show_tree_status(wt) return 1 except ChangelogCreateError as e: report_fatal("changelog-create-error", "Error creating changelog entry: %s" % e) return 1 except DescriptionMissing as e: report_fatal( "fixer-description-missing", "Fixer %s made changes but did not provide description." % e.fixer) return 1 if overall_result.overridden_lintian_issues: if len(overall_result.overridden_lintian_issues) == 1: logging.info("%d change skipped because of lintian overrides.", len(overall_result.overridden_lintian_issues)) else: logging.info( "%d changes skipped because of lintian overrides.", len(overall_result.overridden_lintian_issues)) if overall_result.success: all_tags = set() for result, summary in overall_result.success: all_tags.update(result.fixed_lintian_tags) if all_tags: logging.info("Lintian tags fixed: %r", all_tags) else: logging.info("Some changes were made, " "but there are no affected lintian tags.") min_certainty = overall_result.minimum_success_certainty() if min_certainty != "certain": logging.info( "Some changes were made with lower certainty (%s); " "please double check the changes.", min_certainty, ) else: report_fatal("nothing-to-do", "No changes made.") return 0 if overall_result.failed_fixers and not args.verbose: logging.info( "Some fixer scripts failed to run: %r. " "Run with --verbose for details.", set(overall_result.failed_fixers.keys()), ) if overall_result.formatting_unpreservable and not args.verbose: logging.info( "Some fixer scripts were unable to preserve formatting: %r. " "Run with --allow-reformatting to reformat %r.", set(overall_result.formatting_unpreservable), set(overall_result.formatting_unpreservable.values()), ) if args.diff: from breezy.diff import show_diff_trees show_diff_trees(wt.branch.repository.revision_tree(since_revid), wt, sys.stdout.buffer) if os.environ.get('SVP_API') == '1': applied = [] if 'SVP_RESUME' in os.environ: with open(os.environ['SVP_RESUME'], 'r') as f: base = json.load(f) applied.extend(base['applied']) all_fixed_lintian_tags = set() for result, summary in overall_result.success: applied.append({ "summary": summary, "description": result.description, "fixed_lintian_tags": result.fixed_lintian_tags, "revision_id": result.revision_id.decode("utf-8"), "certainty": result.certainty, }) all_fixed_lintian_tags.update(result.fixed_lintian_tags) failed = { name: str(e) for (name, e) in overall_result.failed_fixers.items() } debian_context = {} if overall_result.changelog_behaviour: debian_context[ 'changelog'] = overall_result.changelog_behaviour.json() with open(os.environ['SVP_RESULT'], 'w') as f: json.dump( { 'value': calculate_value(all_fixed_lintian_tags), 'debian': debian_context, 'context': { 'applied': applied, 'failed': failed, "versions": { "lintian-brush": lintian_brush_version_string, "breezy": breezy.version_string, } } }, f)
def main(argv): # noqa: C901 import argparse parser = argparse.ArgumentParser(prog="upload-pending-commits") parser.add_argument("packages", nargs="*") parser.add_argument("--dry-run", action="store_true", help="Dry run changes.") parser.add_argument( "--acceptable-keys", help="List of acceptable GPG keys", action="append", default=[], type=str, ) parser.add_argument( "--gpg-verification", help="Verify GPG signatures on commits", action="store_true", ) parser.add_argument( "--min-commit-age", help="Minimum age of the last commit, in days", type=int, default=0, ) parser.add_argument("--diff", action="store_true", help="Show diff.") parser.add_argument( "--builder", type=str, help="Build command", default=(DEFAULT_BUILDER + " --source --source-only-changes " "--debbuildopt=-v${LAST_VERSION}"), ) parser.add_argument( "--maintainer", type=str, action="append", help="Select all packages maintainer by specified maintainer.", ) parser.add_argument( "--vcswatch", action="store_true", default=False, help="Use vcswatch to determine what packages need uploading.", ) parser.add_argument("--exclude", type=str, action="append", default=[], help="Ignore source package") parser.add_argument( "--autopkgtest-only", action="store_true", help="Only process packages with autopkgtests.", ) parser.add_argument( "--allowed-committer", type=str, action="append", help="Require that all new commits are from specified committers", ) args = parser.parse_args(argv) ret = 0 if not args.packages and not args.maintainer: (name, email) = get_maintainer() if email: logging.info("Processing packages maintained by %s", email) args.maintainer = [email] else: parser.print_usage() sys.exit(1) if args.vcswatch: packages = select_vcswatch_packages(args.packages, args.maintainer, args.autopkgtest_only) else: logging.info("Use --vcswatch to only process packages for which " "vcswatch found pending commits.") if args.maintainer: packages = select_apt_packages(args.packages, args.maintainer) else: packages = args.packages if not packages: logging.info("No packages found.") parser.print_usage() sys.exit(1) # TODO(jelmer): Sort packages by last commit date; least recently changed # commits are more likely to be successful. if len(packages) > 1: logging.info("Uploading packages: %s", ", ".join(packages)) for package in packages: logging.info("Processing %s", package) # Can't use open_packaging_branch here, since we want to use pkg_source # later on. if "/" not in package: try: pkg_source = apt_get_source_package(package) except NoSuchPackage: logging.info("%s: package not found in apt", package) ret = 1 continue try: vcs_type, vcs_url = source_package_vcs(pkg_source) except KeyError: logging.info("%s: no declared vcs location, skipping", pkg_source["Package"]) ret = 1 continue source_name = pkg_source["Package"] if source_name in args.exclude: continue source_version = pkg_source["Version"] has_testsuite = "Testsuite" in pkg_source else: vcs_url = package vcs_type = None source_name = None source_version = None has_testsuite = None (location, branch_name, subpath) = split_vcs_url(vcs_url) if subpath is None: subpath = "" probers = select_probers(vcs_type) try: main_branch = open_branch(location, probers=probers, name=branch_name) except (BranchUnavailable, BranchMissing, BranchUnsupported) as e: logging.exception("%s: %s", vcs_url, e) ret = 1 continue with Workspace(main_branch) as ws: if source_name is None: with ControlEditor( ws.local_tree.abspath( os.path.join(subpath, "debian/control"))) as ce: source_name = ce.source["Source"] with ChangelogEditor( ws.local_tree.abspath( os.path.join(subpath, "debian/changelog"))) as cle: source_version = cle[0].version has_testsuite = "Testsuite" in ce.source if source_name in args.exclude: continue if (args.autopkgtest_only and not has_testsuite and not ws.local_tree.has_filename( os.path.join(subpath, "debian/tests/control"))): logging.info("%s: Skipping, package has no autopkgtest.", source_name) continue branch_config = ws.local_tree.branch.get_config_stack() if args.gpg_verification: gpg_strategy = gpg.GPGStrategy(branch_config) if args.acceptable_keys: acceptable_keys = args.acceptable_keys else: acceptable_keys = list( get_maintainer_keys(gpg_strategy.context)) gpg_strategy.set_acceptable_keys(",".join(acceptable_keys)) else: gpg_strategy = None try: target_changes, tag_name = prepare_upload_package( ws.local_tree, subpath, source_name, source_version, builder=args.builder, gpg_strategy=gpg_strategy, min_commit_age=args.min_commit_age, allowed_committers=args.allowed_committer, ) except CommitterNotAllowed as e: logging.warn( "%s: committer %s not in allowed list: %r", source_name, e.committer, e.allowed_committers, ) continue except BuildFailedError as e: logging.warn("%s: package failed to build: %s", source_name, e) ret = 1 continue except LastReleaseRevisionNotFound as e: logging.warn( "%s: Unable to find revision matching last release " "%s, skipping.", source_name, e.version, ) ret = 1 continue except LastUploadMoreRecent as e: logging.warn( "%s: Last upload (%s) was more recent than VCS (%s)", source_name, e.archive_version, e.vcs_version, ) ret = 1 continue except MissingChangelogError: logging.info("%s: No changelog found, skipping.", source_name) ret = 1 continue except RecentCommits as e: logging.info("%s: Recent commits (%d days), skipping.", source_name, e.commit_age) continue except NoUnuploadedChanges: logging.info("%s: No unuploaded changes, skipping.", source_name) continue except NoUnreleasedChanges: logging.info("%s: No unreleased changes, skipping.", source_name) continue tags = [] if tag_name is not None: logging.info("Pushing tag %s", tag_name) tags.append(tag_name) try: ws.push(dry_run=args.dry_run, tags=tags) except PermissionDenied: logging.info( "%s: Permission denied pushing to branch, skipping.", source_name) ret = 1 continue if not args.dry_run: dput_changes(target_changes) if args.diff: sys.stdout.flush() ws.show_diff(sys.stdout.buffer) sys.stdout.buffer.flush() return ret
def add_autobuild_changelog_entry(base_branch, basedir, package, distribution=None, author_name=None, author_email=None, append_version=None): """Add a new changelog entry for an autobuild. :param base_branch: Recipe base branch :param basedir: Base working directory :param package: package name :param distribution: Optional distribution (defaults to last entry distribution) :param author_name: Name of the build requester :param author_email: Email of the build requester :param append_version: Optional version suffix to add """ debian_dir = os.path.join(basedir, "debian") if not os.path.exists(debian_dir): os.makedirs(debian_dir) cl_path = os.path.join(debian_dir, "changelog") file_found = False if os.path.exists(cl_path): file_found = True cl_f = open(cl_path) try: contents = cl_f.read() finally: cl_f.close() cl = changelog.Changelog(file=contents) else: cl = changelog.Changelog() if len(cl._blocks) > 0: if distribution is None: distribution = cl._blocks[0].distributions.split()[0] else: if file_found: if len(contents.strip()) > 0: reason = ("debian/changelog didn't contain any " "parseable stanzas") else: reason = "debian/changelog was empty" else: reason = "debian/changelog was not present" if distribution is None: distribution = DEFAULT_UBUNTU_DISTRIBUTION if base_branch.format in (0.1, 0.2, 0.3): try: substitute_changelog_vars(base_branch, None, cl) except SubstitutionUnavailable as e: raise errors.BzrCommandError( "No previous changelog to " "take the upstream version from as %s was " "used: %s: %s." % (e.name, e.reason, reason)) # Use debian packaging environment variables # or default values if they don't exist if author_name is None or author_email is None: author_name, author_email = changelog.get_maintainer() # The python-debian package breaks compatibility at version 0.1.20 by # switching to expecting (but not checking for) unicode rather than # bytestring inputs. Detect this and decode environment if needed. if getattr(changelog.Changelog, "__unicode__", None) is not None: enc = osutils.get_user_encoding() author_name = author_name.decode(enc) author_email = author_email.decode(enc) author = "%s <%s>" % (author_name, author_email) date = utils.formatdate(localtime=True) version = base_branch.deb_version if append_version is not None: version += append_version try: changelog.Version(version) except (changelog.VersionError, ValueError) as e: raise errors.BzrCommandError("Invalid deb-version: %s: %s" % (version, e)) cl.new_block(package=package, version=version, distributions=distribution, urgency="low", changes=['', ' * Auto build.', ''], author=author, date=date) with open(cl_path, 'w') as cl_f: cl.write_to_open_file(cl_f)