def smartlogrevset(repo, subset, x): """``smartlog([heads], [master])`` Changesets relevent to you. 'heads' overrides what feature branches to include. (default: 'interestingbookmarks() + heads(draft()) + .') 'master' is the head of the public branch. (default: 'interestingmaster()') """ args = revset.getargsdict(x, "smartlogrevset", "heads master") if "master" in args: masterset = revset.getset(repo, subset, args["master"]) else: masterset = repo.revs("interestingmaster()") if "heads" in args: heads = set(revset.getset(repo, subset, args["heads"])) else: heads = set(repo.revs("interestingbookmarks() + heads(draft()) + .")) # Remove "null" commit. "::x" does not support it. masterset -= smartset.baseset([nodemod.nullrev]) if nodemod.nullrev in heads: heads.remove(nodemod.nullrev) # Explicitly disable revnum deprecation warnings. with repo.ui.configoverride({("devel", "legacy.revnum:real"): ""}): # Select ancestors that are draft. drafts = repo.revs("draft() & ::%ld", heads) # Include parents of drafts, and public heads. revs = repo.revs( "parents(%ld) + %ld + %ld + %ld", drafts, drafts, heads, masterset ) # Include the ancestor of above commits to make the graph connected. # # When calculating ancestors, filter commits using 'public()' to reduce the # number of commits to calculate. This is sound because the above logic # includes p1 of draft commits, and assume master is public. Practically, # this optimization can make a 3x difference. revs = smartset.baseset(repo.revs("ancestor(%ld & public()) + %ld", revs, revs)) # Collapse long obsoleted stack - only keep their heads and roots. # This is incompatible with automation (namely, nuclide-core) yet. if repo.ui.configbool("smartlog", "collapse-obsolete") and not repo.ui.plain(): obsrevs = smartset.baseset(repo.revs("%ld & obsolete()", revs)) hiderevs = smartset.baseset( repo.revs("%ld - (heads(%ld) + roots(%ld))", obsrevs, obsrevs, obsrevs) ) revs = repo.revs("%ld - %ld", revs, hiderevs) return subset & revs
def draftbranchrevset(repo, subset, x): """``draftbranch(set)`` The draft branches containing the given changesets. """ args = revset.getargs(x, 1, 1, _("draftbranch expects one argument")) revs = revset.getset(repo, subset, args[0]) return subset & repo.revs("(draft() & ::%ld)::", revs)
def lastsubmitted(repo, subset, x): revs = revset.getset(repo, revset.fullreposet(repo), x) phabrevs = set() for rev in revs: phabrev = diffprops.parserevfromcommitmsg(repo[rev].description()) if phabrev is None: mess = _( "local changeset is not associated with a differential revision" ) raise error.Abort(mess) phabrevs.add(phabrev) resultrevs = set() for phabrev in phabrevs: diffrev = _differentialhash(repo.ui, repo, phabrev) if diffrev is None or not isinstance(diffrev, dict) or "hash" not in diffrev: mess = _("unable to determine previous changeset hash") raise error.Abort(mess) lasthash = str(diffrev["hash"]) _maybepull(repo, lasthash) resultrevs.add(repo[lasthash].rev()) return subset & smartset.baseset(sorted(resultrevs), repo=repo)
def mutrelatedrevset(repo, subset, x): """``mutrelated([set])`` Changesets that are related via mutations. """ args = revset.getargs(x, 1, 1, _("mutrelated expects one argument")) revs = revset.getset(repo, subset, args[0]) return subset & repo.revs("predecessors(%ld):: + successors(%ld)::", revs, revs)
def _destrestack(repo, subset, x): """restack destination for given single source revision""" unfi = repo.unfiltered() obsoleted = unfi.revs("obsolete()") getparents = unfi.changelog.parentrevs getphase = unfi._phasecache.phase nodemap = unfi.changelog.nodemap src = revset.getset(repo, subset, x).first() # Empty src or already obsoleted - Do not return a destination if not src or src in obsoleted: return smartset.baseset() # Find the obsoleted "base" by checking source's parent recursively base = src while base not in obsoleted: base = getparents(base)[0] # When encountering a public revision which cannot be obsoleted, stop # the search early and return no destination. Do the same for nullrev. if getphase(repo, base) == phases.public or base == nullrev: return smartset.baseset() # Find successors for given base # NOTE: Ideally we can use obsutil.successorssets to detect divergence # case. However it does not support cycles (unamend) well. So we use # allsuccessors and pick non-obsoleted successors manually as a workaround. basenode = repo[base].node() if mutation.enabled(repo): succnodes = mutation.allsuccessors(repo, [basenode]) else: succnodes = obsutil.allsuccessors(repo.obsstore, [basenode]) succnodes = [ n for n in succnodes if (n != basenode and n in nodemap and nodemap[n] not in obsoleted) ] # In case of a split, only keep its heads succrevs = list(unfi.revs("heads(%ln)", succnodes)) if len(succrevs) == 0: # Prune - Find the first non-obsoleted ancestor while base in obsoleted: base = getparents(base)[0] if base == nullrev: # Root node is pruned. The new base (destination) is the # virtual nullrev. return smartset.baseset([nullrev]) return smartset.baseset([base]) elif len(succrevs) == 1: # Unique visible successor case - A valid destination return smartset.baseset([succrevs[0]]) else: # Multiple visible successors - Choose the one with a greater revision # number. This is to be compatible with restack old behavior. We might # want to revisit it when we introduce the divergence concept to users. return smartset.baseset([max(succrevs)])
def focusedbranchrevset(repo, subset, x): """``focusedbranch([set])`` The focused branches of the given changesets, being the draft stack and any draft changesets that are related via mutations. """ args = revset.getargs(x, 1, 1, _("focusedbranch expects one argument")) revs = revset.getset(repo, subset, args[0]) return subset & repo.revs("draft() & mutrelated(draftbranch(%ld)) + %ld", revs, revs)
def mutrelatedrevset(repo, subset, x): """``mutrelated([set])`` Draft changesets that are related via mutations. """ args = revset.getargs(x, 1, 1, _("mutrelated expects one argument")) revs = revset.getset(repo, subset, args[0]) return subset & repo.revs( "descendants((predecessors(%ld) + successors(%ld)) & not public())", revs, revs )
def fastlogfollow(orig, repo, subset, x, name, followfirst=False): if followfirst: # fastlog does not support followfirst=True repo.ui.debug("fastlog: not used because 'followfirst' is set\n") return orig(repo, subset, x, name, followfirst) args = revset.getargsdict(x, name, "file startrev") if "file" not in args: # Not interesting for fastlog case. repo.ui.debug("fastlog: not used because 'file' is not provided\n") return orig(repo, subset, x, name, followfirst) if "startrev" in args: revs = revset.getset(repo, smartset.fullreposet(repo), args["startrev"]) it = iter(revs) try: startrev = next(it) except StopIteration: startrev = repo["."].rev() try: next(it) # fastlog does not support multiple startrevs repo.ui.debug( "fastlog: not used because multiple revs are provided\n") return orig(repo, subset, x, name, followfirst) except StopIteration: # supported by fastlog: startrev contains a single rev pass else: startrev = repo["."].rev() reponame = repo.ui.config("fbscmquery", "reponame") if not reponame or not repo.ui.configbool("fastlog", "enabled"): repo.ui.debug("fastlog: not used because fastlog is disabled\n") return orig(repo, subset, x, name, followfirst) path = revset.getstring(args["file"], _("%s expected a pattern") % name) if path.startswith("path:"): # strip "path:" prefix path = path[5:] if any( path.startswith("%s:" % prefix) for prefix in matchmod.allpatternkinds): # Patterns other than "path:" are not supported repo.ui.debug( "fastlog: not used because '%s:' patterns are not supported\n" % path.split(":", 1)[0]) return orig(repo, subset, x, name, followfirst) files = [path] if not files or "." in files: # Walking the whole repo - bail on fastlog repo.ui.debug( "fastlog: not used because walking through the entire repo\n") return orig(repo, subset, x, name, followfirst) dirs = set() wvfs = repo.wvfs for path in files: if wvfs.isdir(path) and not wvfs.islink(path): dirs.update([path + "/"]) else: if repo.ui.configbool("fastlog", "files"): dirs.update([path]) else: # bail on symlinks, and also bail on files for now # with follow behavior, for files, we are supposed # to track copies / renames, but it isn't convenient # to do this through scmquery repo.ui.debug( "fastlog: not used because %s is not a directory\n" % path) return orig(repo, subset, x, name, followfirst) rev = startrev parents = repo.changelog.parentrevs public = set() # Our criterion for invoking fastlog is finding a single # common public ancestor from the current head. First we # have to walk back through drafts to find all interesting # public parents. Typically this will just be one, but if # there are merged drafts, we may have multiple parents. if repo[rev].phase() == phases.public: public.add(rev) else: queue = deque() queue.append(rev) seen = set() while queue: cur = queue.popleft() if cur not in seen: seen.add(cur) if repo[cur].mutable(): for p in parents(cur): if p != nullrev: queue.append(p) else: public.add(cur) def fastlog(repo, startrev, dirs, localmatch): filefunc = repo.changelog.readfiles for parent in lazyparents(startrev, public, parents): files = filefunc(parent) if dirmatches(files, dirs): yield parent repo.ui.debug("found common parent at %s\n" % repo[parent].hex()) for rev in combinator(repo, parent, dirs, localmatch): yield rev def combinator(repo, rev, dirs, localmatch): """combinator(repo, rev, dirs, localmatch) Make parallel local and remote queries along ancestors of rev along path and combine results, eliminating duplicates, restricting results to those which match dirs """ LOCAL = "L" REMOTE = "R" queue = util.queue(FASTLOG_QUEUE_SIZE + 100) hash = repo[rev].hex() local = LocalIteratorThread(queue, LOCAL, rev, dirs, localmatch, repo) remote = FastLogThread(queue, REMOTE, reponame, "hg", hash, dirs, repo) # Allow debugging either remote or local path debug = repo.ui.config("fastlog", "debug") if debug != "local": repo.ui.debug("starting fastlog at %s\n" % hash) remote.start() if debug != "remote": local.start() seen = set([rev]) try: while True: try: producer, success, msg = queue.get(True, 3600) except util.empty: raise error.Abort("Timeout reading log data") if not success: if producer == LOCAL: raise error.Abort(msg) elif msg: repo.ui.log("hgfastlog", msg) continue if msg is None: # Empty message means no more results return rev = msg if debug: if producer == LOCAL: repo.ui.debug("LOCAL:: %s\n" % msg) elif producer == REMOTE: repo.ui.debug("REMOTE:: %s\n" % msg) if rev not in seen: seen.add(rev) yield rev finally: local.stop() remote.stop() revgen = fastlog(repo, rev, dirs, dirmatches) fastlogset = smartset.generatorset(revgen, iterasc=False) # Optimization: typically for "reverse(:.) & follow(path)" used by # "hg log". The left side is more expensive, although it has smaller # "weight". Make sure fastlogset is on the left side to avoid slow # walking through ":.". if subset.isdescending(): fastlogset.reverse() return fastlogset & subset return subset & fastlogset