def gitnode(repo, subset, x): """``gitnode(id)`` Return the hg revision corresponding to a given git rev.""" l = revset.getargs(x, 1, 1, _("id requires one argument")) n = revset.getstring(l[0], _("id requires a string")) reponame = repo.ui.config("fbscmquery", "reponame") if not reponame: # We don't know who we are, so we can't ask for a translation return subset.filter(lambda r: False) backingrepos = repo.ui.configlist("fbscmquery", "backingrepos", default=[reponame]) lasterror = None hghash = None for backingrepo in backingrepos: try: client = graphql.Client(repo=repo) hghash = client.getmirroredrev(backingrepo, "git", reponame, "hg", n) if hghash != "": break except Exception as ex: lasterror = ex if not hghash: if lasterror: repo.ui.warn( ("Could not translate revision {0}: {1}\n".format(n, lasterror)) ) else: repo.ui.warn(("Could not translate revision {0}\n".format(n))) return subset.filter(lambda r: False) rn = repo[node.bin(hghash)].rev() return subset & smartset.baseset([rn])
def gitnode(repo, subset, x): """``gitnode(id)`` Return the hg revision corresponding to a given git rev.""" l = revset.getargs(x, 1, 1, _("id requires one argument")) n = revset.getstring(l[0], _("id requires a string")) hexhgnode = _lookup_node(repo, n, from_scm_type="git") if not hexhgnode: raise error.RepoLookupError(_("unknown revision '%s'") % n) rev = repo[hexhgnode].rev() return subset.filter(lambda r: r == rev)
def upstream(repo, subset, x): """Select changesets in an upstream repository according to remotenames.""" upstream_names = repo.ui.configlist("remotenames", "upstream") # override default args from hgrc with args passed in on the command line if x: upstream_names = [ revset.getstring(symbol, "remote path must be a string") for symbol in revset.getlist(x) ] default_path = dict(repo.ui.configitems("paths")).get("default") if not upstream_names and default_path: upstream_names = [activepath(repo.ui, default_path)] def filt(name): if upstream_names: return name in upstream_names return True return upstream_revs(filt, repo, subset, x)
def revset_gitnode(repo, subset, x): """``gitnode(hash)`` Select the changeset that originates in the given Git revision. The hash may be abbreviated: `gitnode(a5b)` selects the revision whose Git hash starts with `a5b`. Aborts if multiple changesets match the abbreviation. """ args = revset.getargs(x, 1, 1, "gitnode takes one argument") rev = revset.getstring(args[0], "the argument to gitnode() must be a hash") git = repo.githandler node = repo.changelog.node def matches(r): gitnode = git.map_git_get(hex(node(r))) if gitnode is None: return False return gitnode.startswith(rev) result = baseset(r for r in subset if matches(r)) if 0 <= len(result) < 2: return result raise LookupError(rev, git.map_file, _("ambiguous identifier"))
def filelogrevset(orig, repo, subset, x): """``filelog(pattern)`` Changesets connected to the specified filelog. For performance reasons, ``filelog()`` does not show every changeset that affects the requested file(s). See :hg:`help log` for details. For a slower, more accurate result, use ``file()``. """ if not shallowrepo.requirement in repo.requirements: return orig(repo, subset, x) # i18n: "filelog" is a keyword pat = revset.getstring(x, _("filelog requires a pattern")) m = match.match(repo.root, repo.getcwd(), [pat], default="relpath", ctx=repo[None]) s = set() if not match.patkind(pat): # slow for r in subset: ctx = repo[r] cfiles = ctx.files() for f in m.files(): if f in cfiles: s.add(ctx.rev()) break else: # partial files = (f for f in repo[None] if m(f)) for f in files: fctx = repo[None].filectx(f) s.add(fctx.linkrev()) for actx in fctx.ancestors(): s.add(actx.linkrev()) return smartset.baseset([r for r in subset if r in s])
def fastlogfollow(orig, repo, subset, x, name, followfirst=False): if followfirst: # fastlog does not support followfirst=True repo.ui.debug("fastlog: not used because 'followfirst' is set\n") return orig(repo, subset, x, name, followfirst) args = revset.getargsdict(x, name, "file startrev") if "file" not in args: # Not interesting for fastlog case. repo.ui.debug("fastlog: not used because 'file' is not provided\n") return orig(repo, subset, x, name, followfirst) if "startrev" in args: revs = revset.getset(repo, smartset.fullreposet(repo), args["startrev"]) it = iter(revs) try: startrev = next(it) except StopIteration: startrev = repo["."].rev() try: next(it) # fastlog does not support multiple startrevs repo.ui.debug( "fastlog: not used because multiple revs are provided\n") return orig(repo, subset, x, name, followfirst) except StopIteration: # supported by fastlog: startrev contains a single rev pass else: startrev = repo["."].rev() reponame = repo.ui.config("fbscmquery", "reponame") if not reponame or not repo.ui.configbool("fastlog", "enabled"): repo.ui.debug("fastlog: not used because fastlog is disabled\n") return orig(repo, subset, x, name, followfirst) path = revset.getstring(args["file"], _("%s expected a pattern") % name) if path.startswith("path:"): # strip "path:" prefix path = path[5:] if any( path.startswith("%s:" % prefix) for prefix in matchmod.allpatternkinds): # Patterns other than "path:" are not supported repo.ui.debug( "fastlog: not used because '%s:' patterns are not supported\n" % path.split(":", 1)[0]) return orig(repo, subset, x, name, followfirst) files = [path] if not files or "." in files: # Walking the whole repo - bail on fastlog repo.ui.debug( "fastlog: not used because walking through the entire repo\n") return orig(repo, subset, x, name, followfirst) dirs = set() wvfs = repo.wvfs for path in files: if wvfs.isdir(path) and not wvfs.islink(path): dirs.update([path + "/"]) else: if repo.ui.configbool("fastlog", "files"): dirs.update([path]) else: # bail on symlinks, and also bail on files for now # with follow behavior, for files, we are supposed # to track copies / renames, but it isn't convenient # to do this through scmquery repo.ui.debug( "fastlog: not used because %s is not a directory\n" % path) return orig(repo, subset, x, name, followfirst) rev = startrev parents = repo.changelog.parentrevs public = set() # Our criterion for invoking fastlog is finding a single # common public ancestor from the current head. First we # have to walk back through drafts to find all interesting # public parents. Typically this will just be one, but if # there are merged drafts, we may have multiple parents. if repo[rev].phase() == phases.public: public.add(rev) else: queue = deque() queue.append(rev) seen = set() while queue: cur = queue.popleft() if cur not in seen: seen.add(cur) if repo[cur].mutable(): for p in parents(cur): if p != nullrev: queue.append(p) else: public.add(cur) def fastlog(repo, startrev, dirs, localmatch): filefunc = repo.changelog.readfiles for parent in lazyparents(startrev, public, parents): files = filefunc(parent) if dirmatches(files, dirs): yield parent repo.ui.debug("found common parent at %s\n" % repo[parent].hex()) for rev in combinator(repo, parent, dirs, localmatch): yield rev def combinator(repo, rev, dirs, localmatch): """combinator(repo, rev, dirs, localmatch) Make parallel local and remote queries along ancestors of rev along path and combine results, eliminating duplicates, restricting results to those which match dirs """ LOCAL = "L" REMOTE = "R" queue = util.queue(FASTLOG_QUEUE_SIZE + 100) hash = repo[rev].hex() local = LocalIteratorThread(queue, LOCAL, rev, dirs, localmatch, repo) remote = FastLogThread(queue, REMOTE, reponame, "hg", hash, dirs, repo) # Allow debugging either remote or local path debug = repo.ui.config("fastlog", "debug") if debug != "local": repo.ui.debug("starting fastlog at %s\n" % hash) remote.start() if debug != "remote": local.start() seen = set([rev]) try: while True: try: producer, success, msg = queue.get(True, 3600) except util.empty: raise error.Abort("Timeout reading log data") if not success: if producer == LOCAL: raise error.Abort(msg) elif msg: repo.ui.log("hgfastlog", msg) continue if msg is None: # Empty message means no more results return rev = msg if debug: if producer == LOCAL: repo.ui.debug("LOCAL:: %s\n" % msg) elif producer == REMOTE: repo.ui.debug("REMOTE:: %s\n" % msg) if rev not in seen: seen.add(rev) yield rev finally: local.stop() remote.stop() revgen = fastlog(repo, rev, dirs, dirmatches) fastlogset = smartset.generatorset(revgen, iterasc=False) # Optimization: typically for "reverse(:.) & follow(path)" used by # "hg log". The left side is more expensive, although it has smaller # "weight". Make sure fastlogset is on the left side to avoid slow # walking through ":.". if subset.isdescending(): fastlogset.reverse() return fastlogset & subset return subset & fastlogset