def _revsetutil(repo, subset, x, rtypes):
    """utility function to return a set of revs based on the rtypes"""
    args = revsetlang.getargs(x, 0, 1, _(b'only one argument accepted'))
    if args:
        kind, pattern, matcher = stringutil.stringmatcher(
            revsetlang.getstring(args[0], _(b'argument must be a string')))
    else:
        kind = pattern = None
        matcher = util.always

    nodes = set()
    cl = repo.changelog
    for rtype in rtypes:
        if rtype in repo.names:
            ns = repo.names[rtype]
            for name in ns.listnames(repo):
                if not matcher(name):
                    continue
                nodes.update(ns.nodes(repo, name))
    if kind == b'literal' and not nodes:
        raise error.RepoLookupError(
            _(b"remote name '%s' does not exist") % pattern)

    revs = (cl.rev(n) for n in nodes if cl.hasnode(n))
    return subset & smartset.baseset(revs)
Example #2
0
def revsetstringset(orig, repo, subset, revstr, *args, **kwargs):
    """Wrapper that recognizes revisions starting with 'D'"""

    if revstr.startswith('D') and revstr[1:].isdigit():
        return smartset.baseset(revsetdiff(repo, subset, revstr[1:]))

    return orig(repo, subset, revstr, *args, **kwargs)
Example #3
0
def revsettransplanted(repo, subset, x):
    """Transplanted changesets in set, or all transplanted changesets."""
    if x:
        s = revset.getset(repo, subset, x)
    else:
        s = subset
    return smartset.baseset(
        [r for r in s if repo[r].extra().get(b'transplant_source')])
Example #4
0
def _oldworkingcopyparent(repo, subset, x):
    """``oldworkingcopyparent([index])``
    previous working copy parent

    'index' is how many undoable commands you want to look back.  See 'hg undo'.
    """
    args = revset.getargsdict(x, 'oldoworkingcopyrevset', 'reverseindex')
    reverseindex = revsetlang.getinteger(args.get('reverseindex'),
                    _('index must be a positive interger'), 1)
    revs = _getoldworkingcopyparent(repo, reverseindex)
    return subset & smartset.baseset(revs)
Example #5
0
 def walk(tree):
     op = tree[0]
     if op == b'symbol':
         drev = _parsedrev(tree[1])
         if drev:
             return smartset.baseset([drev])
         elif tree[1] in _knownstatusnames:
             drevs = [r for r in validids
                      if _getstatusname(prefetched[r]) == tree[1]]
             return smartset.baseset(drevs)
         else:
             raise error.Abort(_(b'unknown symbol: %s') % tree[1])
     elif op in {b'and_', b'add', b'sub'}:
         assert len(tree) == 3
         return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
     elif op == b'group':
         return walk(tree[1])
     elif op == b'ancestors':
         return getstack(walk(tree[1]))
     else:
         raise error.ProgrammingError(b'illegal tree: %r' % tree)
Example #6
0
def _cachedgetoldworkingcopyparent(repo, wkpnode):
    if not util.safehasattr(repo, '_undooldworkingparentcache'):
        repo._undooldworkingparentcache = {}
    cache = repo._undooldworkingparentcache
    key = wkpnode
    if key not in cache:
        oldworkingparent = _readnode(repo, "workingparent.i", wkpnode)
        oldworkingparent = filter(None, oldworkingparent.split("\n"))
        oldwkprevstring = revsetlang.formatspec('%ls', oldworkingparent)
        urepo = repo.unfiltered()
        cache[key] = smartset.baseset(urepo.revs(oldwkprevstring))
    return cache[key]
Example #7
0
 def walk(tree):
     op = tree[0]
     if op == b'symbol':
         drev = _parsedrev(tree[1])
         if drev:
             return smartset.baseset([drev])
         elif tree[1] in _knownstatusnames:
             drevs = [r for r in validids
                      if _getstatusname(prefetched[r]) == tree[1]]
             return smartset.baseset(drevs)
         else:
             raise error.Abort(_(b'unknown symbol: %s') % tree[1])
     elif op in {b'and_', b'add', b'sub'}:
         assert len(tree) == 3
         return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
     elif op == b'group':
         return walk(tree[1])
     elif op == b'ancestors':
         return getstack(walk(tree[1]))
     else:
         raise error.ProgrammingError(b'illegal tree: %r' % tree)
Example #8
0
def _revsetutil(repo, subset, x, rtypes):
    """utility function to return a set of revs based on the rtypes"""

    revs = set()
    cl = repo.changelog
    for rtype in rtypes:
        if rtype in repo.names:
            ns = repo.names[rtype]
            for name in ns.listnames(repo):
                revs.update(ns.nodes(repo, name))

    results = (cl.rev(n) for n in revs if cl.hasnode(n))
    return subset & smartset.baseset(sorted(results))
Example #9
0
def reachableroots(repo, roots, heads, includepath=False):
    # type: (Repo, smartset.abstractsmartset, smartset.abstractsmartset, bool) -> smartset.abstractsmartset
    """return (heads(::<roots> and ::<heads>))

    If includepath is True, return (<roots>::<heads>)."""
    if not roots:
        return smartset.baseset()

    roots = list(roots)
    heads = list(heads)

    assert len(roots) == 1

    return gitfullreposet(repo, root=roots[0], heads=heads)
Example #10
0
 def printrevset(orig, repo, pats, opts):
     revs, filematcher = orig(repo, pats, opts)
     if opts.get(b'print_revset'):
         expr = logrevset(repo, pats, opts)
         if expr:
             tree = revsetlang.parse(expr)
             tree = revsetlang.analyze(tree)
         else:
             tree = []
         ui = repo.ui
         ui.write(b'%s\n' % stringutil.pprint(opts.get(b'rev', [])))
         ui.write(revsetlang.prettyformat(tree) + b'\n')
         ui.write(stringutil.prettyrepr(revs) + b'\n')
         revs = smartset.baseset()  # display no revisions
     return revs, filematcher
Example #11
0
def _cachedgetolddrafts(repo, draftnode, obsnode):
    if not util.safehasattr(repo, '_undoolddraftcache'):
        repo._undoolddraftcache = {}
    cache = repo._undoolddraftcache
    key = draftnode + obsnode
    if key not in cache:
        olddraftheads = _readnode(repo, "draftheads.i", draftnode)
        oldheadslist = olddraftheads.split("\n")
        oldobs = _readnode(repo, "draftobsolete.i", obsnode)
        oldobslist = filter(None, oldobs.split("\n"))
        oldlogrevstring = revsetlang.formatspec(
            '(draft() & ancestors(%ls)) - %ls', oldheadslist, oldobslist)
        urepo = repo.unfiltered()
        cache[key] = smartset.baseset(urepo.revs(oldlogrevstring))
    return cache[key]
Example #12
0
def _olddraft(repo, subset, x):
    """``olddraft([index])``
    previous draft commits

    'index' is how many undoable commands you want to look back
    an undoable command is one that changed draft heads, bookmarks
    and or working copy parent.  Note that olddraft uses an absolute index and
    so olddraft(1) represents the state after an hg undo -a and not an hg undo.
    Note: this revset may include hidden commits
    """
    args = revset.getargsdict(x, 'olddraftrevset', 'reverseindex')
    reverseindex = revsetlang.getinteger(args.get('reverseindex'),
                _('index must be a positive integer'), 1)
    revs = _getolddrafts(repo, reverseindex)
    return subset & smartset.baseset(revs)
Example #13
0
    def __and__(self, other):
        """As self contains the whole repo, all of the other set should also be
        in self. Therefore `self & other = other`.

        This boldly assumes the other contains valid revs only.
        """
        # other not a smartset, make is so
        if not util.safehasattr(other, 'isascending'):
            # filter out hidden revision
            # (this boldly assumes all smartset are pure)
            #
            # `other` was used with "&", let's assume this is a set like
            # object.
            other = smartset.baseset(other)

        other.sort(reverse=self.isdescending())
        return other
Example #14
0
 def getstack(topdrevids):
     """given a top, get a stack from the bottom, [id] -> [id]"""
     visited = set()
     result = []
     queue = [{r'ids': [i]} for i in topdrevids]
     while queue:
         params = queue.pop()
         drev = fetch(params)
         if drev[r'id'] in visited:
             continue
         visited.add(drev[r'id'])
         result.append(int(drev[r'id']))
         auxiliary = drev.get(r'auxiliary', {})
         depends = auxiliary.get(r'phabricator:depends-on', [])
         for phid in depends:
             queue.append({b'phids': [phid]})
     result.reverse()
     return smartset.baseset(result)
Example #15
0
def _calculateset(repo, subset, x, f):
    """f is a function that converts input nodes to output nodes

    repo, subset, x are typical revsetpredicate parameters.

    This function takes care of converting between revs/nodes, and filtering.
    """
    revs = revset.getset(repo, revset.fullreposet(repo), x)
    cl = repo.unfiltered().changelog
    torev = cl.rev
    tonode = cl.node
    nodemap = cl.nodemap
    resultrevs = set(torev(n)
                     for n in f(tonode(r) for r in revs)
                     if n in nodemap)
    s = smartset.baseset(resultrevs - set(revs) - repo.changelog.filteredrevs)
    s.sort()
    return subset & s
Example #16
0
def _destrestack(repo, subset, x):
    """restack destination for given single source revision"""
    unfi = repo.unfiltered()
    obsoleted = unfi.revs('obsolete()')
    getparents = unfi.changelog.parentrevs
    getphase = unfi._phasecache.phase
    nodemap = unfi.changelog.nodemap

    src = revset.getset(repo, subset, x).first()

    # Empty src or already obsoleted - Do not return a destination
    if not src or src in obsoleted:
        return smartset.baseset()

    # Find the obsoleted "base" by checking source's parent recursively
    base = src
    while base not in obsoleted:
        base = getparents(base)[0]
        # When encountering a public revision which cannot be obsoleted, stop
        # the search early and return no destination. Do the same for nullrev.
        if getphase(repo, base) == phases.public or base == nullrev:
            return smartset.baseset()

    # Find successors for given base
    # NOTE: Ideally we can use obsutil.successorssets to detect divergence
    # case. However it does not support cycles (unamend) well. So we use
    # allsuccessors and pick non-obsoleted successors manually as a workaround.
    basenode = repo[base].node()
    succnodes = [n for n in obsutil.allsuccessors(repo.obsstore, [basenode])
                 if (n != basenode and n in nodemap
                     and nodemap[n] not in obsoleted)]

    # In case of a split, only keep its heads
    succrevs = list(unfi.revs('heads(%ln)', succnodes))

    if len(succrevs) == 0:
        # Prune - Find the first non-obsoleted ancestor
        while base in obsoleted:
            base = getparents(base)[0]
            if base == nullrev:
                # Root node is pruned. The new base (destination) is the
                # virtual nullrev.
                return smartset.baseset([nullrev])
        return smartset.baseset([base])
    elif len(succrevs) == 1:
        # Unique visible successor case - A valid destination
        return smartset.baseset([succrevs[0]])
    else:
        # Multiple visible successors - Choose the one with a greater revision
        # number. This is to be compatible with restack old behavior. We might
        # want to revisit it when we introduce the divergence concept to users.
        return smartset.baseset([max(succrevs)])
Example #17
0
 def getstack(topdrevids):
     """given a top, get a stack from the bottom, [id] -> [id]"""
     visited = set()
     result = []
     queue = [{r'ids': [i]} for i in topdrevids]
     while queue:
         params = queue.pop()
         drev = fetch(params)
         if drev[r'id'] in visited:
             continue
         visited.add(drev[r'id'])
         result.append(int(drev[r'id']))
         auxiliary = drev.get(r'auxiliary', {})
         depends = auxiliary.get(r'phabricator:depends-on', [])
         for phid in depends:
             queue.append({b'phids': [phid]})
     result.reverse()
     return smartset.baseset(result)
Example #18
0
def _localbranch(repo, subset, x):
    """``_localbranch(changectx)``
    localbranch changesets

    Returns all commits within the same localbranch as the changeset(s). A local
    branch is all draft changesets that are connected, uninterupted by public
    changesets.  Any draft commit within a branch, or a public commit at the
    base of the branch, can be passed used to identify localbranches.
    """
    # executed on an filtered repo
    args = revset.getargsdict(x, 'branchrevset', 'changectx')
    revstring = revsetlang.getstring(args.get('changectx'),
                               _('localbranch argument must be a changectx'))
    revs = repo.revs(revstring)
    # we assume that there is only a single rev
    if repo[revs.first()].phase() == phases.public:
        querystring = revsetlang.formatspec('(children(%d) & draft())::',
                                            revs.first())
    else:
        querystring = revsetlang.formatspec('((::%ld) & draft())::', revs)
    return subset & smartset.baseset(repo.revs(querystring))
Example #19
0
def filelogrevset(orig, repo, subset, x):
    """``filelog(pattern)``
    Changesets connected to the specified filelog.

    For performance reasons, ``filelog()`` does not show every changeset
    that affects the requested file(s). See :hg:`help log` for details. For
    a slower, more accurate result, use ``file()``.
    """

    if not isenabled(repo):
        return orig(repo, subset, x)

    # i18n: "filelog" is a keyword
    pat = revset.getstring(x, _(b"filelog requires a pattern"))
    m = matchmod.match(repo.root,
                       repo.getcwd(), [pat],
                       default=b'relpath',
                       ctx=repo[None])
    s = set()

    if not matchmod.patkind(pat):
        # slow
        for r in subset:
            ctx = repo[r]
            cfiles = ctx.files()
            for f in m.files():
                if f in cfiles:
                    s.add(ctx.rev())
                    break
    else:
        # partial
        files = (f for f in repo[None] if m(f))
        for f in files:
            fctx = repo[None].filectx(f)
            s.add(fctx.linkrev())
            for actx in fctx.ancestors():
                s.add(actx.linkrev())

    return smartset.baseset([r for r in subset if r in s])
Example #20
0
def filelogrevset(orig, repo, subset, x):
    """``filelog(pattern)``
    Changesets connected to the specified filelog.

    For performance reasons, ``filelog()`` does not show every changeset
    that affects the requested file(s). See :hg:`help log` for details. For
    a slower, more accurate result, use ``file()``.
    """

    if not shallowrepo.requirement in repo.requirements:
        return orig(repo, subset, x)

    # i18n: "filelog" is a keyword
    pat = revset.getstring(x, _("filelog requires a pattern"))
    m = match.match(repo.root, repo.getcwd(), [pat], default='relpath',
                       ctx=repo[None])
    s = set()

    if not match.patkind(pat):
        # slow
        for r in subset:
            ctx = repo[r]
            cfiles = ctx.files()
            for f in m.files():
                if f in cfiles:
                    s.add(ctx.rev())
                    break
    else:
        # partial
        files = (f for f in repo[None] if m(f))
        for f in files:
            fctx = repo[None].filectx(f)
            s.add(fctx.linkrev())
            for actx in fctx.ancestors():
                s.add(actx.linkrev())

    return smartset.baseset([r for r in subset if r in s])
Example #21
0
def _phase(repo, subset, *targets):
    """helper to select all rev in <targets> phases"""
    return smartset.baseset()
Example #22
0
def getdag(ui, repo, revs, master):

    # Fake ctx that we stick in the dag so we can special case it later
    class fakectx(object):
        def __init__(self, rev):
            self._rev = rev
        def node(self):
            return "..."
        def obsolete(self):
            return False
        def phase(self):
            return None
        def rev(self):
            return self._rev
        def files(self):
            return []
        def closesbranch(self):
            return False

    knownrevs = set(revs)
    gpcache = {}
    results = []

    # we store parents together with the parent type information
    # but sometimes we need just a list of parents
    # [(a,b), (c,d), (e,f)] => [b, d, f]
    def unzip(parents):
        if parents:
            return list(zip(*parents)[1])
        else:
            return list()

    # For each rev we need to show, compute it's parents in the dag.
    # If we have to reach for a grandparent, insert a fake node so we
    # can show '...' in the graph.
    # Use 'reversed' to start at the lowest commit so fake nodes are
    # placed at their lowest possible positions.
    for rev in reversed(revs):
        ctx = repo[rev]
        # Parents in the dag
        parents = sorted(set([(graphmod.PARENT, p.rev()) for p in ctx.parents()
                              if p.rev() in knownrevs]))
        # Parents not in the dag
        mpars = [p.rev() for p in ctx.parents() if
                 p.rev() != nodemod.nullrev and p.rev() not in unzip(parents)]

        for mpar in mpars:
            gp = gpcache.get(mpar)
            if gp is None:
                gp = gpcache[mpar] = dagop.reachableroots(
                    repo, smartset.baseset(revs), [mpar])
            if not gp:
                parents.append((graphmod.MISSINGPARENT, mpar))
            else:
                gp = [g for g in gp if g not in unzip(parents)]
                for g in gp:
                    parents.append((graphmod.GRANDPARENT, g))

        results.append((ctx.rev(), 'C', ctx, parents))

    # Compute parent rev->parents mapping
    lookup = {}
    for r in results:
        lookup[r[0]] = unzip(r[3])

    def parentfunc(node):
        return lookup.get(node, [])

    # Compute the revs on the master line. We use this for sorting later.
    masters = set()
    queue = [master]
    while queue:
        m = queue.pop()
        if m not in masters:
            masters.add(m)
            queue.extend(lookup.get(m, []))

    # Topologically sort the noderev numbers. Note: unlike the vanilla
    # topological sorting, we move master to the top.
    order = sortnodes([r[0] for r in results], parentfunc, masters)
    order = dict((e[1], e[0]) for e in enumerate(order))

    # Sort the actual results based on their position in the 'order'
    try:
        results.sort(key=lambda x: order[x[0]], reverse=True)
    except ValueError:  # Happened when 'order' is empty
        msg = _('note: smartlog encountered an error\n')
        hint = _('(so the sorting might be wrong.\n\n)')
        ui.warn(msg)
        ui.warn(hint)
        results.reverse()

    # indent the top non-public stack
    if ui.configbool('smartlog', 'indentnonpublic', False):
        rev, ch, ctx, parents = results[0]
        if ctx.phase() != phases.public:
            # find a public parent and add a fake node, so the non-public nodes
            # will be shown in the non-first column
            prev = None
            for i in xrange(1, len(results)):
                pctx = results[i][2]
                if pctx.phase() == phases.public:
                    prev = results[i][0]
                    break
            # append the fake node to occupy the first column
            if prev:
                fakerev = rev + 1
                results.insert(0, (fakerev, 'F', fakectx(fakerev),
                                   [('P', prev)]))

    return results
Example #23
0
def getdag(ui, repo, revs, master):

    # Fake ctx that we stick in the dag so we can special case it later
    class fakectx(object):
        def __init__(self, rev):
            self._rev = rev
        def node(self):
            return "..."
        def obsolete(self):
            return False
        def phase(self):
            return None
        def rev(self):
            return self._rev
        def files(self):
            return []
        def closesbranch(self):
            return False

    knownrevs = set(revs)
    gpcache = {}
    results = []

    # we store parents together with the parent type information
    # but sometimes we need just a list of parents
    # [(a,b), (c,d), (e,f)] => [b, d, f]
    def unzip(parents):
        if parents:
            return list(zip(*parents)[1])
        else:
            return list()

    # For each rev we need to show, compute it's parents in the dag.
    # If we have to reach for a grandparent, insert a fake node so we
    # can show '...' in the graph.
    # Use 'reversed' to start at the lowest commit so fake nodes are
    # placed at their lowest possible positions.
    for rev in reversed(revs):
        ctx = repo[rev]
        # Parents in the dag
        parents = sorted(set([(graphmod.PARENT, p.rev()) for p in ctx.parents()
                              if p.rev() in knownrevs]))
        # Parents not in the dag
        mpars = [p.rev() for p in ctx.parents() if
                 p.rev() != nodemod.nullrev and p.rev() not in unzip(parents)]

        for mpar in mpars:
            gp = gpcache.get(mpar)
            if gp is None:
                gp = gpcache[mpar] = dagop.reachableroots(
                    repo, smartset.baseset(revs), [mpar])
            if not gp:
                parents.append((graphmod.MISSINGPARENT, mpar))
            else:
                gp = [g for g in gp if g not in unzip(parents)]
                for g in gp:
                    parents.append((graphmod.GRANDPARENT, g))

        results.append((ctx.rev(), 'C', ctx, parents))

    # Compute parent rev->parents mapping
    lookup = {}
    for r in results:
        lookup[r[0]] = unzip(r[3])

    def parentfunc(node):
        return lookup.get(node, [])

    # Compute the revs on the master line. We use this for sorting later.
    masters = set()
    queue = [master]
    while queue:
        m = queue.pop()
        if m not in masters:
            masters.add(m)
            queue.extend(lookup.get(m, []))

    # Topologically sort the noderev numbers. Note: unlike the vanilla
    # topological sorting, we move master to the top.
    order = sortnodes([r[0] for r in results], parentfunc, masters)
    order = dict((e[1], e[0]) for e in enumerate(order))

    # Sort the actual results based on their position in the 'order'
    try:
        results.sort(key=lambda x: order[x[0]], reverse=True)
    except ValueError:  # Happened when 'order' is empty
        msg = _('note: smartlog encountered an error\n')
        hint = _('(so the sorting might be wrong.\n\n)')
        ui.warn(msg)
        ui.warn(hint)
        results.reverse()

    # indent the top non-public stack
    if ui.configbool('smartlog', 'indentnonpublic', False):
        rev, ch, ctx, parents = results[0]
        if ctx.phase() != phases.public:
            # find a public parent and add a fake node, so the non-public nodes
            # will be shown in the non-first column
            prev = None
            for i in xrange(1, len(results)):
                pctx = results[i][2]
                if pctx.phase() == phases.public:
                    prev = results[i][0]
                    break
            # append the fake node to occupy the first column
            if prev:
                fakerev = rev + 1
                results.insert(0, (fakerev, 'F', fakectx(fakerev),
                                   [('P', prev)]))

    return results
Example #24
0
def getfastlogrevs(orig, repo, pats, opts):
    blacklist = ['all', 'branch', 'rev', 'sparse']
    if any(opts.get(opt) for opt in blacklist) or not opts.get('follow'):
        return orig(repo, pats, opts)

    reponame = repo.ui.config('fbconduit', 'reponame')
    if reponame and repo.ui.configbool('fastlog', 'enabled'):
        wctx = repo[None]
        match, pats = scmutil.matchandpats(wctx, pats, opts)
        files = match.files()
        if not files or '.' in files:
            # Walking the whole repo - bail on fastlog
            return orig(repo, pats, opts)

        dirs = set()
        wvfs = repo.wvfs
        for path in files:
            if wvfs.isdir(path) and not wvfs.islink(path):
                dirs.update([path + '/'])
            else:
                # bail on symlinks, and also bail on files for now
                # with follow behavior, for files, we are supposed
                # to track copies / renames, but it isn't convenient
                # to do this through scmquery
                return orig(repo, pats, opts)

        rev = repo['.'].rev()

        parents = repo.changelog.parentrevs
        public = set()

        # Our criterion for invoking fastlog is finding a single
        # common public ancestor from the current head.  First we
        # have to walk back through drafts to find all interesting
        # public parents.  Typically this will just be one, but if
        # there are merged drafts, we may have multiple parents.
        if repo[rev].phase() == phases.public:
            public.add(rev)
        else:
            queue = deque()
            queue.append(rev)
            seen = set()
            while queue:
                cur = queue.popleft()
                if cur not in seen:
                    seen.add(cur)
                    if repo[cur].mutable():
                        for p in parents(cur):
                            if p != nullrev:
                                queue.append(p)
                    else:
                        public.add(cur)

        def fastlog(repo, startrev, dirs, localmatch):
            filefunc = repo.changelog.readfiles
            for parent in lazyparents(startrev, public, parents):
                files = filefunc(parent)
                if dirmatches(files, dirs):
                    yield parent
            repo.ui.debug('found common parent at %s\n' % repo[parent].hex())
            for rev in combinator(repo, parent, dirs, localmatch):
                yield rev

        def combinator(repo, rev, dirs, localmatch):
            """combinator(repo, rev, dirs, localmatch)
            Make parallel local and remote queries along ancestors of
            rev along path and combine results, eliminating duplicates,
            restricting results to those which match dirs
            """
            LOCAL = 'L'
            REMOTE = 'R'
            queue = util.queue(FASTLOG_QUEUE_SIZE + 100)
            hash = repo[rev].hex()

            local = LocalIteratorThread(queue, LOCAL, rev,
                                        dirs, localmatch, repo)
            remote = FastLogThread(queue, REMOTE, reponame, 'hg', hash, dirs,
                                   repo)

            # Allow debugging either remote or local path
            debug = repo.ui.config('fastlog', 'debug')
            if debug != 'local':
                repo.ui.debug('starting fastlog at %s\n' % hash)
                remote.start()
            if debug != 'remote':
                local.start()
            seen = set([rev])

            try:
                while True:
                    try:
                        producer, success, msg = queue.get(True, 3600)
                    except util.empty:
                        raise error.Abort("Timeout reading log data")
                    if not success:
                        if producer == LOCAL:
                            raise error.Abort(msg)
                        elif msg:
                            repo.ui.log("hgfastlog", msg)
                            continue

                    if msg is None:
                        # Empty message means no more results
                        return

                    rev = msg
                    if debug:
                        if producer == LOCAL:
                            repo.ui.debug('LOCAL:: %s\n' % msg)
                        elif producer == REMOTE:
                            repo.ui.debug('REMOTE:: %s\n' % msg)

                    if rev not in seen:
                        seen.add(rev)
                        yield rev
            finally:
                local.stop()
                remote.stop()

        # Complex match - use a revset.
        complex = ['date', 'exclude', 'include', 'keyword', 'no_merges',
                   'only_merges', 'prune', 'user']
        if match.anypats() or any(opts.get(opt) for opt in complex):
            f = fastlog(repo, rev, dirs, None)
            revs = smartset.generatorset(f, iterasc=False)
            revs.reverse()
            if not revs:
                return smartset.baseset([]), None, None
            expr, filematcher = cmdutil._makelogrevset(repo, pats, opts, revs)
            matcher = revset.match(repo.ui, expr)
            matched = matcher(repo, revs)
            return matched, expr, filematcher
        else:
            # Simple match without revset shaves ~0.5 seconds off
            # hg log -l 100 -T ' ' on common directories.
            expr = 'fastlog(%s)' % ','.join(dirs)
            return fastlog(repo, rev, dirs, dirmatches), expr, None

    return orig(repo, pats, opts)