def checkcommitmessage(ui, repo, **kwargs): """ Checks a single commit message for adherence to commit message rules. """ message = encoding.fromlocal(repo["tip"].description()) if ui.configbool("checkmessage", "allownonprintable"): return False printable = set(string.printable) badlines = [] for lnum, line in enumerate(message.splitlines()): for c in line: if ord(c) < 128 and c not in printable: badlines.append((lnum + 1, line)) break if badlines: ui.warn( _("+-------------------------------------------------------------\n" )) ui.warn( _("| Non-printable characters in commit message are not allowed.\n" )) ui.warn(_("| Edit your commit message to fix this issue.\n")) ui.warn(_("| The problematic commit message can be found at:\n")) for num, l in badlines: ui.warn(_("| Line {}: {!r}\n".format(num, l))) ui.warn( _("+-------------------------------------------------------------\n" )) # False means success return bool(badlines)
def _escapebookmark(bookmark): """ If ``bookmark`` contains "bookmarks" as a substring then replace it with "bookmarksbookmarks". Also, encode * since it is used for prefix pattern matching This is intended to make parsing bookmark names unambiguous, however it still has bugs. """ bookmark = encoding.fromlocal(bookmark) bookmark = bookmark.replace("*", "*%") return bookmark.replace("bookmarks", "bookmarksbookmarks")
def listkeyspatterns(self, namespace, patterns): if not self.capable("pushkey"): yield {}, None f = peer.future() self.ui.debug( 'preparing listkeys for "%s" with pattern "%s"\n' % (namespace, patterns) ) yield { "namespace": encoding.fromlocal(namespace), "patterns": wireproto.encodelist(patterns), }, f d = f.value self.ui.debug('received listkey for "%s": %i bytes\n' % (namespace, len(d))) yield pushkey.decodekeys(d)
def testasciifastpath(self): s = b"\0" * 100 self.assertTrue(s is encoding.tolocal(s)) self.assertTrue(s is encoding.fromlocal(s))
def crdump(ui, repo, *revs, **opts): """ Dump the info about the revisions in format that's friendly for sending the patches for code review. The output is a JSON list with dictionary for each specified revision: :: { "output_directory": an output directory for all temporary files "commits": [ { "node": commit hash, "date": date in format [unixtime, timezone offset], "desc": commit message, "patch_file": path to file containing patch in unified diff format relative to output_directory, "commit_cloud": true if the commit is in commit cloud, "files": list of files touched by commit, "binary_files": [ { "filename": path to file relative to repo root, "old_file": path to file (relative to output_directory) with a dump of the old version of the file, "new_file": path to file (relative to output_directory) with a dump of the newversion of the file, }, ... ], "user": commit author, "p1": { "node": hash, "differential_revision": xxxx }, "public_base": { "node": public base commit hash, "svnrev": svn revision of public base (if hgsvn repo), }, "obsolete": { "date": [ time, timezone ], "flag": marker's flags, "metadata": { "operation": changes made, "user": user name }, "prednode": predecessor commit in hash, "succnodes": [ successors in hash ] } }, ... ] } """ revs = list(revs) revs.extend(opts["rev"]) if not revs: raise error.Abort(_("revisions must be specified")) revs = scmutil.revrange(repo, revs) if "unified" in opts: contextlines = opts["unified"] cdata = [] outdir = tempfile.mkdtemp(suffix="hg.crdump") try: lfs = None if opts["lfs"]: try: lfs = extensions.find("lfs") except KeyError: pass # lfs extension is not enabled notbackedup = set(repo[rev].node() for rev in revs) if ui.configbool("crdump", "commitcloud", False): try: oldquiet = repo.ui.quiet # Silence any output from commitcloud repo.ui.quiet = True notbackedup = commitcloud.backup.backup(repo, revs)[1] except Exception: # Don't let commit cloud exceptions block crdump pass finally: repo.ui.quiet = oldquiet for rev in revs: ctx = repo[rev] rdata = { "node": hex(ctx.node()), "date": list(map(int, ctx.date())), "desc": encoding.fromlocal(ctx.description()), "files": ctx.files(), "p1": { "node": ctx.parents()[0].hex() }, "user": encoding.fromlocal(ctx.user()), "bookmarks": list(map(encoding.fromlocal, ctx.bookmarks())), "commit_cloud": False if ctx.node() in notbackedup else True, "manifest_node": hex(ctx.manifestnode()), } if ctx.parents()[0].phase() != phases.public: # we need this only if parent is in the same draft stack rdata["p1"]["differential_revision"] = phabricatorrevision( ctx.parents()[0]) if opts["obsolete"]: markers = obsutil.getmarkers(repo, [ctx.node()]) obsolete = dumpmarkers(markers) if obsolete: rdata["obsolete"] = obsolete rdata["branch"] = "" pbctx = publicbase(repo, ctx) if pbctx: rdata["public_base"] = {"node": hex(pbctx.node())} try: globalrevs = extensions.find("globalrevs") globalrev = globalrevs.getglobalrev(ui, pbctx) rdata["public_base"]["svnrev"] = globalrev except KeyError: pass if extensions.isenabled(ui, "remotenames"): downstreams = repo.revs("%n:: & remotebookmark()", pbctx.node()) downstreambookmarks = set() for r in downstreams: downstreambookmarks.update( repo.names["hoistednames"].names( repo, repo[r].node())) # If there's a single downstream remotebookmark, or master is a # downstream remotebookmark, report it as the current branch. if downstreambookmarks: if "master" in downstreambookmarks: rdata["branch"] = "master" elif len(downstreambookmarks) == 1: rdata["branch"] = list(downstreambookmarks)[0] rdata["patch_file"] = dumppatch(ui, repo, ctx, outdir, contextlines) if not opts["nobinary"]: rdata["binary_files"] = dumpbinaryfiles( ui, repo, ctx, outdir, lfs) cdata.append(rdata) ui.write( json.dumps( { "output_directory": outdir, "commits": cdata }, sort_keys=True, indent=4, separators=(",", ": "), )) ui.write("\n") except Exception: shutil.rmtree(outdir) raise