Пример #1
0
    def walk(self, match):
        fset = set(match.files())
        # for dirstate.walk, files=['.'] means "walk the whole tree".
        # follow that here, too
        fset.discard('.')

        # avoid the entire walk if we're only looking for specific files
        if fset and not match.anypats():
            if util.all([fn in self for fn in fset]):
                for fn in sorted(fset):
                    if match(fn):
                        yield fn
                raise StopIteration

        for fn in self:
            if fn in fset:
                # specified pattern is the exact name
                fset.remove(fn)
            if match(fn):
                yield fn
        for fn in sorted(fset):
            if fn in self._dirs:
                # specified pattern is a directory
                continue
            if match.bad(fn, _('no such file in rev %s') % self) and match(fn):
                yield fn
Пример #2
0
    def walk(self, match):
        fset = set(match.files())
        # for dirstate.walk, files=['.'] means "walk the whole tree".
        # follow that here, too
        fset.discard('.')

        # avoid the entire walk if we're only looking for specific files
        if fset and not match.anypats():
            if util.all([fn in self for fn in fset]):
                for fn in sorted(fset):
                    if match(fn):
                        yield fn
                raise StopIteration

        for fn in self:
            if fn in fset:
                # specified pattern is the exact name
                fset.remove(fn)
            if match(fn):
                yield fn
        for fn in sorted(fset):
            if fn in self._dirs:
                # specified pattern is a directory
                continue
            match.bad(fn, _('no such file in rev %s') % self)
Пример #3
0
def format(data):
    """Takes a list of MetricResults."""
    # Distinct values in data for forest plot variables:
    forestcombinations = util.combinations([util.unique(x, data) for x in config.fgraphs])
    forestcombinations = [dict(zip(config.fgraphs, x)) for x in forestcombinations]
    forestcombinations = filter(lambda x: x["benchmark"] in config.forests, forestcombinations)
    # Distinct values in data for bar chart variables:
    barcombinations = util.combinations([util.unique(x, data) for x in config.graphs])
    barcombinations = [dict(zip(config.graphs, x)) for x in barcombinations]
    barcombinations = filter(lambda x: not x["benchmark"] in config.forests, barcombinations)
    # Filter out omitted configurations.
    allcombinations = forestcombinations + barcombinations
    for omit in config.omit:
        sieve = lambda x: not util.all([x[y] == omit[y] for y in omit.keys() if x.has_key(y)])
        allcombinations = filter(sieve, allcombinations)
    # Create the graph objects.
    figures = []
    for configuration in allcombinations:
        if configuration in forestcombinations:
            graphtype = ForestPlot
        else:
            graphtype = BarChart
        sorteddata = graphtype.sortdata(data, configuration)
        if sorteddata:
            graph = graphtype(sorteddata)
        else:
            continue
        graph.create()
        figures.append(graph)
    return figures
Пример #4
0
def format(data):
    """Takes a list of MetricResults."""
    # Distinct values in data for forest plot variables:
    forestcombinations = util.combinations(
        [util.unique(x, data) for x in config.fgraphs])
    forestcombinations = [
        dict(zip(config.fgraphs, x)) for x in forestcombinations
    ]
    forestcombinations = filter(lambda x: x["benchmark"] in config.forests,
                                forestcombinations)
    # Distinct values in data for bar chart variables:
    barcombinations = util.combinations(
        [util.unique(x, data) for x in config.graphs])
    barcombinations = [dict(zip(config.graphs, x)) for x in barcombinations]
    barcombinations = filter(lambda x: not x["benchmark"] in config.forests,
                             barcombinations)
    # Filter out omitted configurations.
    allcombinations = forestcombinations + barcombinations
    for omit in config.omit:
        sieve = lambda x: not util.all(
            [x[y] == omit[y] for y in omit.keys() if x.has_key(y)])
        allcombinations = filter(sieve, allcombinations)
    # Create the graph objects.
    figures = []
    for configuration in allcombinations:
        if configuration in forestcombinations:
            graphtype = ForestPlot
        else:
            graphtype = BarChart
        sorteddata = graphtype.sortdata(data, configuration)
        if sorteddata: graph = graphtype(sorteddata)
        else: continue
        graph.create()
        figures.append(graph)
    return figures
Пример #5
0
    def matches(self, match):
        '''generate a new manifest filtered by the match argument'''
        if match.always():
            return self.copy()

        files = match.files()
        if (match.matchfn == match.exact or
            (not match.anypats() and util.all(fn in self for fn in files))):
            return self.intersectfiles(files)

        mf = self.copy()
        for fn in mf.keys():
            if not match(fn):
                del mf[fn]
        return mf
Пример #6
0
    def matches(self, match):
        '''generate a new manifest filtered by the match argument'''
        if match.always():
            return self.copy()

        files = match.files()
        if (len(files) < 100
                and (match.matchfn == match.exact or
                     (not match.anypats() and util.all(fn in self
                                                       for fn in files)))):
            return self.intersectfiles(files)

        lm = manifestdict('')
        lm._lm = self._lm.filtercopy(match)
        return lm
Пример #7
0
 def matches(self, match):
     '''
     return files in the dirstate (in whatever state) filtered by match
     '''
     dmap = self._map
     if match.always():
         return dmap.keys()
     files = match.files()
     if match.matchfn == match.exact:
         # fast path -- filter the other way around, since typically files is
         # much smaller than dmap
         return [f for f in files if f in dmap]
     if not match.anypats() and util.all(fn in dmap for fn in files):
         # fast path -- all the values are known to be files, so just return
         # that
         return list(files)
     return [f for f in dmap if match(f)]
Пример #8
0
 def matches(self, match):
     '''
     return files in the dirstate (in whatever state) filtered by match
     '''
     dmap = self._map
     if match.always():
         return dmap.keys()
     files = match.files()
     if match.matchfn == match.exact:
         # fast path -- filter the other way around, since typically files is
         # much smaller than dmap
         return [f for f in files if f in dmap]
     if not match.anypats() and util.all(fn in dmap for fn in files):
         # fast path -- all the values are known to be files, so just return
         # that
         return list(files)
     return [f for f in dmap if match(f)]
Пример #9
0
def calculateupdates(repo, wctx, mctx, ancestors, branchmerge, force, partial,
                     acceptremote, followcopies):
    "Calculate the actions needed to merge mctx into wctx using ancestors"

    if len(ancestors) == 1:  # default
        actions = manifestmerge(repo, wctx, mctx, ancestors[0], branchmerge,
                                force, partial, acceptremote, followcopies)

    else:  # only when merge.preferancestor=* - the default
        repo.ui.note(
            _("note: merging %s and %s using bids from ancestors %s\n") %
            (wctx, mctx, _(' and ').join(str(anc) for anc in ancestors)))

        # Call for bids
        fbids = {
        }  # mapping filename to bids (action method to list af actions)
        for ancestor in ancestors:
            repo.ui.note(_('\ncalculating bids for ancestor %s\n') % ancestor)
            actions = manifestmerge(repo, wctx, mctx, ancestor, branchmerge,
                                    force, partial, acceptremote, followcopies)
            for m, l in sorted(actions.items()):
                for a in l:
                    f, args, msg = a
                    repo.ui.debug(' %s: %s -> %s\n' % (f, msg, m))
                    if f in fbids:
                        d = fbids[f]
                        if m in d:
                            d[m].append(a)
                        else:
                            d[m] = [a]
                    else:
                        fbids[f] = {m: [a]}

        # Pick the best bid for each file
        repo.ui.note(_('\nauction for merging merge bids\n'))
        actions = dict((m, []) for m in actions.keys())
        for f, bids in sorted(fbids.items()):
            # bids is a mapping from action method to list af actions
            # Consensus?
            if len(bids) == 1:  # all bids are the same kind of method
                m, l = bids.items()[0]
                if util.all(a == l[0] for a in l[1:]):  # len(bids) is > 1
                    repo.ui.note(" %s: consensus for %s\n" % (f, m))
                    actions[m].append(l[0])
                    continue
            # If keep is an option, just do it.
            if "k" in bids:
                repo.ui.note(" %s: picking 'keep' action\n" % f)
                actions['k'].append(bids["k"][0])
                continue
            # If there are gets and they all agree [how could they not?], do it.
            if "g" in bids:
                ga0 = bids["g"][0]
                if util.all(a == ga0 for a in bids["g"][1:]):
                    repo.ui.note(" %s: picking 'get' action\n" % f)
                    actions['g'].append(ga0)
                    continue
            # TODO: Consider other simple actions such as mode changes
            # Handle inefficient democrazy.
            repo.ui.note(_(' %s: multiple bids for merge action:\n') % f)
            for m, l in sorted(bids.items()):
                for _f, args, msg in l:
                    repo.ui.note('  %s -> %s\n' % (msg, m))
            # Pick random action. TODO: Instead, prompt user when resolving
            m, l = bids.items()[0]
            repo.ui.warn(
                _(' %s: ambiguous merge - picked %s action\n') % (f, m))
            actions[m].append(l[0])
            continue
        repo.ui.note(_('end of auction\n\n'))

    # Prompt and create actions. TODO: Move this towards resolve phase.
    for f, args, msg in actions['cd']:
        if f in ancestors[0] and not wctx[f].cmp(ancestors[0][f]):
            # local did change but ended up with same content
            actions['r'].append((f, None, "prompt same"))
        elif repo.ui.promptchoice(
                _("local changed %s which remote deleted\n"
                  "use (c)hanged version or (d)elete?"
                  "$$ &Changed $$ &Delete") % f, 0):
            actions['r'].append((f, None, "prompt delete"))
        else:
            actions['a'].append((f, None, "prompt keep"))
    del actions['cd'][:]

    for f, args, msg in actions['dc']:
        flags, = args
        if f in ancestors[0] and not mctx[f].cmp(ancestors[0][f]):
            # remote did change but ended up with same content
            pass  # don't get = keep local deleted
        elif repo.ui.promptchoice(
                _("remote changed %s which local deleted\n"
                  "use (c)hanged version or leave (d)eleted?"
                  "$$ &Changed $$ &Deleted") % f, 0) == 0:
            actions['g'].append((f, (flags, ), "prompt recreating"))
    del actions['dc'][:]

    if wctx.rev() is None:
        ractions, factions = _forgetremoved(wctx, mctx, branchmerge)
        actions['r'].extend(ractions)
        actions['f'].extend(factions)

    return actions
Пример #10
0
def findcommonheads(ui,
                    local,
                    remote,
                    initialsamplesize=100,
                    fullsamplesize=200,
                    abortwhenunrelated=True):
    '''Return a tuple (common, anyincoming, remoteheads) used to identify
    missing nodes from or in remote.
    '''
    roundtrips = 0
    cl = local.changelog
    dag = dagutil.revlogdag(cl)

    # early exit if we know all the specified remote heads already
    ui.debug("query 1; heads\n")
    roundtrips += 1
    ownheads = dag.heads()
    sample = ownheads
    if remote.local():
        # stopgap until we have a proper localpeer that supports batch()
        srvheadhashes = remote.heads()
        yesno = remote.known(dag.externalizeall(sample))
    elif remote.capable('batch'):
        batch = remote.batch()
        srvheadhashesref = batch.heads()
        yesnoref = batch.known(dag.externalizeall(sample))
        batch.submit()
        srvheadhashes = srvheadhashesref.value
        yesno = yesnoref.value
    else:
        # compatibility with pre-batch, but post-known remotes during 1.9
        # development
        srvheadhashes = remote.heads()
        sample = []

    if cl.tip() == nullid:
        if srvheadhashes != [nullid]:
            return [nullid], True, srvheadhashes
        return [nullid], False, []

    # start actual discovery (we note this before the next "if" for
    # compatibility reasons)
    ui.status(_("searching for changes\n"))

    srvheads = dag.internalizeall(srvheadhashes, filterunknown=True)
    if len(srvheads) == len(srvheadhashes):
        ui.debug("all remote heads known locally\n")
        return (
            srvheadhashes,
            False,
            srvheadhashes,
        )

    if sample and util.all(yesno):
        ui.note(_("all local heads known remotely\n"))
        ownheadhashes = dag.externalizeall(ownheads)
        return (
            ownheadhashes,
            True,
            srvheadhashes,
        )

    # full blown discovery

    # own nodes where I don't know if remote knows them
    undecided = dag.nodeset()
    # own nodes I know we both know
    common = set()
    # own nodes I know remote lacks
    missing = set()

    # treat remote heads (and maybe own heads) as a first implicit sample
    # response
    common.update(dag.ancestorset(srvheads))
    undecided.difference_update(common)

    full = False
    while undecided:

        if sample:
            commoninsample = set(n for i, n in enumerate(sample) if yesno[i])
            common.update(dag.ancestorset(commoninsample, common))

            missinginsample = [n for i, n in enumerate(sample) if not yesno[i]]
            missing.update(dag.descendantset(missinginsample, missing))

            undecided.difference_update(missing)
            undecided.difference_update(common)

        if not undecided:
            break

        if full:
            ui.note(_("sampling from both directions\n"))
            sample = _takefullsample(dag, undecided, size=fullsamplesize)
        elif common:
            # use cheapish initial sample
            ui.debug("taking initial sample\n")
            sample = _takefullsample(dag, undecided, size=fullsamplesize)
        else:
            # use even cheaper initial sample
            ui.debug("taking quick initial sample\n")
            sample = _takequicksample(dag,
                                      undecided,
                                      size=initialsamplesize,
                                      initial=True)

        roundtrips += 1
        ui.progress(_('searching'), roundtrips, unit=_('queries'))
        ui.debug("query %i; still undecided: %i, sample size is: %i\n" %
                 (roundtrips, len(undecided), len(sample)))
        # indices between sample and externalized version must match
        sample = list(sample)
        yesno = remote.known(dag.externalizeall(sample))
        full = True

    result = dag.headsetofconnecteds(common)
    ui.progress(_('searching'), None)
    ui.debug("%d total queries\n" % roundtrips)

    if not result and srvheadhashes != [nullid]:
        if abortwhenunrelated:
            raise util.Abort(_("repository is unrelated"))
        else:
            ui.warn(_("warning: repository is unrelated\n"))
        return (
            set([nullid]),
            True,
            srvheadhashes,
        )

    anyincoming = (srvheadhashes != [nullid])
    return dag.externalizeall(result), anyincoming, srvheadhashes
Пример #11
0
def calculateupdates(repo, wctx, mctx, ancestors, branchmerge, force, partial,
                     acceptremote, followcopies):
    "Calculate the actions needed to merge mctx into wctx using ancestors"

    if len(ancestors) == 1: # default
        actions = manifestmerge(repo, wctx, mctx, ancestors[0],
                                branchmerge, force,
                                partial, acceptremote, followcopies)

    else: # only when merge.preferancestor=* - experimentalish code
        repo.ui.status(
            _("note: merging %s and %s using bids from ancestors %s\n") %
            (wctx, mctx, _(' and ').join(str(anc) for anc in ancestors)))

        # Call for bids
        fbids = {} # mapping filename to list af action bids
        for ancestor in ancestors:
            repo.ui.note(_('\ncalculating bids for ancestor %s\n') % ancestor)
            actions = manifestmerge(repo, wctx, mctx, ancestor,
                                    branchmerge, force,
                                    partial, acceptremote, followcopies)
            for a in sorted(actions):
                repo.ui.debug(' %s: %s\n' % (a[0], a[1]))
                f = a[0]
                if f in fbids:
                    fbids[f].append(a)
                else:
                    fbids[f] = [a]

        # Pick the best bid for each file
        repo.ui.note(_('\nauction for merging merge bids\n'))
        actions = []
        for f, bidsl in sorted(fbids.items()):
            # Consensus?
            a0 = bidsl[0]
            if util.all(a == a0 for a in bidsl[1:]): # len(bidsl) is > 1
                repo.ui.note(" %s: consensus for %s\n" % (f, a0[1]))
                actions.append(a0)
                continue
            # Group bids by kind of action
            bids = {}
            for a in bidsl:
                m = a[1]
                if m in bids:
                    bids[m].append(a)
                else:
                    bids[m] = [a]
            # If keep is an option, just do it.
            if "k" in bids:
                repo.ui.note(" %s: picking 'keep' action\n" % f)
                actions.append(bids["k"][0])
                continue
            # If all gets agree [how could they not?], just do it.
            if "g" in bids:
                ga0 = bids["g"][0]
                if util.all(a == ga0 for a in bids["g"][1:]):
                    repo.ui.note(" %s: picking 'get' action\n" % f)
                    actions.append(ga0)
                    continue
            # TODO: Consider other simple actions such as mode changes
            # Handle inefficient democrazy.
            repo.ui.note(_(' %s: multiple bids for merge action:\n') % f)
            for _f, m, args, msg in bidsl:
                repo.ui.note('  %s -> %s\n' % (msg, m))
            # Pick random action. TODO: Instead, prompt user when resolving
            a0 = bidsl[0]
            repo.ui.warn(_(' %s: ambiguous merge - picked %s action\n') %
                         (f, a0[1]))
            actions.append(a0)
            continue
        repo.ui.note(_('end of auction\n\n'))

    # Filter out prompts.
    newactions, prompts = [], []
    for a in actions:
        if a[1] in ("cd", "dc"):
            prompts.append(a)
        else:
            newactions.append(a)
    # Prompt and create actions. TODO: Move this towards resolve phase.
    for f, m, args, msg in sorted(prompts):
        if m == "cd":
            if repo.ui.promptchoice(
                _("local changed %s which remote deleted\n"
                  "use (c)hanged version or (d)elete?"
                  "$$ &Changed $$ &Delete") % f, 0):
                newactions.append((f, "r", None, "prompt delete"))
            else:
                newactions.append((f, "a", None, "prompt keep"))
        elif m == "dc":
            flags, = args
            if repo.ui.promptchoice(
                _("remote changed %s which local deleted\n"
                  "use (c)hanged version or leave (d)eleted?"
                  "$$ &Changed $$ &Deleted") % f, 0) == 0:
                newactions.append((f, "g", (flags,), "prompt recreating"))
        else: assert False, m

    if wctx.rev() is None:
        newactions += _forgetremoved(wctx, mctx, branchmerge)

    return newactions
Пример #12
0
def calculateupdates(repo, wctx, mctx, ancestors, branchmerge, force, partial,
                     acceptremote, followcopies):
    "Calculate the actions needed to merge mctx into wctx using ancestors"

    if len(ancestors) == 1: # default
        actions = manifestmerge(repo, wctx, mctx, ancestors[0],
                                branchmerge, force,
                                partial, acceptremote, followcopies)

    else: # only when merge.preferancestor=* - the default
        repo.ui.note(
            _("note: merging %s and %s using bids from ancestors %s\n") %
            (wctx, mctx, _(' and ').join(str(anc) for anc in ancestors)))

        # Call for bids
        fbids = {} # mapping filename to bids (action method to list af actions)
        for ancestor in ancestors:
            repo.ui.note(_('\ncalculating bids for ancestor %s\n') % ancestor)
            actions = manifestmerge(repo, wctx, mctx, ancestor,
                                    branchmerge, force,
                                    partial, acceptremote, followcopies)
            for m, l in sorted(actions.items()):
                for a in l:
                    f, args, msg = a
                    repo.ui.debug(' %s: %s -> %s\n' % (f, msg, m))
                    if f in fbids:
                        d = fbids[f]
                        if m in d:
                            d[m].append(a)
                        else:
                            d[m] = [a]
                    else:
                        fbids[f] = {m: [a]}

        # Pick the best bid for each file
        repo.ui.note(_('\nauction for merging merge bids\n'))
        actions = dict((m, []) for m in actions.keys())
        for f, bids in sorted(fbids.items()):
            # bids is a mapping from action method to list af actions
            # Consensus?
            if len(bids) == 1: # all bids are the same kind of method
                m, l = bids.items()[0]
                if util.all(a == l[0] for a in l[1:]): # len(bids) is > 1
                    repo.ui.note(" %s: consensus for %s\n" % (f, m))
                    actions[m].append(l[0])
                    continue
            # If keep is an option, just do it.
            if "k" in bids:
                repo.ui.note(" %s: picking 'keep' action\n" % f)
                actions['k'].append(bids["k"][0])
                continue
            # If there are gets and they all agree [how could they not?], do it.
            if "g" in bids:
                ga0 = bids["g"][0]
                if util.all(a == ga0 for a in bids["g"][1:]):
                    repo.ui.note(" %s: picking 'get' action\n" % f)
                    actions['g'].append(ga0)
                    continue
            # TODO: Consider other simple actions such as mode changes
            # Handle inefficient democrazy.
            repo.ui.note(_(' %s: multiple bids for merge action:\n') % f)
            for m, l in sorted(bids.items()):
                for _f, args, msg in l:
                    repo.ui.note('  %s -> %s\n' % (msg, m))
            # Pick random action. TODO: Instead, prompt user when resolving
            m, l = bids.items()[0]
            repo.ui.warn(_(' %s: ambiguous merge - picked %s action\n') %
                         (f, m))
            actions[m].append(l[0])
            continue
        repo.ui.note(_('end of auction\n\n'))

    # Prompt and create actions. TODO: Move this towards resolve phase.
    for f, args, msg in actions['cd']:
        if f in ancestors[0] and not wctx[f].cmp(ancestors[0][f]):
            # local did change but ended up with same content
            actions['r'].append((f, None, "prompt same"))
        elif repo.ui.promptchoice(
            _("local changed %s which remote deleted\n"
              "use (c)hanged version or (d)elete?"
              "$$ &Changed $$ &Delete") % f, 0):
            actions['r'].append((f, None, "prompt delete"))
        else:
            actions['a'].append((f, None, "prompt keep"))
    del actions['cd'][:]

    for f, args, msg in actions['dc']:
        flags, = args
        if f in ancestors[0] and not mctx[f].cmp(ancestors[0][f]):
            # remote did change but ended up with same content
            pass # don't get = keep local deleted
        elif repo.ui.promptchoice(
            _("remote changed %s which local deleted\n"
              "use (c)hanged version or leave (d)eleted?"
              "$$ &Changed $$ &Deleted") % f, 0) == 0:
            actions['g'].append((f, (flags,), "prompt recreating"))
    del actions['dc'][:]

    if wctx.rev() is None:
        ractions, factions = _forgetremoved(wctx, mctx, branchmerge)
        actions['r'].extend(ractions)
        actions['f'].extend(factions)

    return actions
Пример #13
0
 def _isempty(self):
     return (not self._files and (not self._dirs or
             util.all(m._isempty() for m in self._dirs.values())))
Пример #14
0
 def _filesfastpath(self, match):
     '''Checks whether we can correctly and quickly iterate over matcher
     files instead of over manifest files.'''
     files = match.files()
     return (len(files) < 100 and (match.isexact() or
         (not match.anypats() and util.all(fn in self for fn in files))))
def findcommonheads(ui, local, remote, initialsamplesize=100, fullsamplesize=200, abortwhenunrelated=True):
    """Return a tuple (common, anyincoming, remoteheads) used to identify
    missing nodes from or in remote.

    shortcutlocal determines whether we try use direct access to localrepo if
    remote is actually local.
    """
    roundtrips = 0
    cl = local.changelog
    dag = dagutil.revlogdag(cl)

    # early exit if we know all the specified remote heads already
    ui.debug("query 1; heads\n")
    roundtrips += 1
    ownheads = dag.heads()
    sample = ownheads
    if remote.local():
        # stopgap until we have a proper localpeer that supports batch()
        srvheadhashes = phases.visibleheads(remote)
        yesno = remote.known(dag.externalizeall(sample))
    elif remote.capable("batch"):
        batch = remote.batch()
        srvheadhashesref = batch.heads()
        yesnoref = batch.known(dag.externalizeall(sample))
        batch.submit()
        srvheadhashes = srvheadhashesref.value
        yesno = yesnoref.value
    else:
        # compatibitity with pre-batch, but post-known remotes during 1.9 devel
        srvheadhashes = remote.heads()
        sample = []

    if cl.tip() == nullid:
        if srvheadhashes != [nullid]:
            return [nullid], True, srvheadhashes
        return [nullid], False, []

    # start actual discovery (we note this before the next "if" for
    # compatibility reasons)
    ui.status(_("searching for changes\n"))

    srvheads = dag.internalizeall(srvheadhashes, filterunknown=True)
    if len(srvheads) == len(srvheadhashes):
        ui.debug("all remote heads known locally\n")
        return (srvheadhashes, False, srvheadhashes)

    if sample and util.all(yesno):
        ui.note(_("all local heads known remotely\n"))
        ownheadhashes = dag.externalizeall(ownheads)
        return (ownheadhashes, True, srvheadhashes)

    # full blown discovery
    undecided = dag.nodeset()  # own nodes where I don't know if remote knows them
    common = set()  # own nodes I know we both know
    missing = set()  # own nodes I know remote lacks

    # treat remote heads (and maybe own heads) as a first implicit sample response
    common.update(dag.ancestorset(srvheads))
    undecided.difference_update(common)

    full = False
    while undecided:

        if sample:
            commoninsample = set(n for i, n in enumerate(sample) if yesno[i])
            common.update(dag.ancestorset(commoninsample, common))

            missinginsample = [n for i, n in enumerate(sample) if not yesno[i]]
            missing.update(dag.descendantset(missinginsample, missing))

            undecided.difference_update(missing)
            undecided.difference_update(common)

        if not undecided:
            break

        if full:
            ui.note(_("sampling from both directions\n"))
            sample = _takefullsample(dag, undecided, size=fullsamplesize)
        elif common:
            # use cheapish initial sample
            ui.debug("taking initial sample\n")
            sample = _takefullsample(dag, undecided, size=fullsamplesize)
        else:
            # use even cheaper initial sample
            ui.debug("taking quick initial sample\n")
            sample = _takequicksample(dag, undecided, size=initialsamplesize, initial=True)

        roundtrips += 1
        ui.progress(_("searching"), roundtrips, unit=_("queries"))
        ui.debug("query %i; still undecided: %i, sample size is: %i\n" % (roundtrips, len(undecided), len(sample)))
        # indices between sample and externalized version must match
        sample = list(sample)
        yesno = remote.known(dag.externalizeall(sample))
        full = True

    result = dag.headsetofconnecteds(common)
    ui.progress(_("searching"), None)
    ui.debug("%d total queries\n" % roundtrips)

    if not result and srvheadhashes != [nullid]:
        if abortwhenunrelated:
            raise util.Abort(_("repository is unrelated"))
        else:
            ui.warn(_("warning: repository is unrelated\n"))
        return (set([nullid]), True, srvheadhashes)

    anyincoming = srvheadhashes != [nullid]
    return dag.externalizeall(result), anyincoming, srvheadhashes
Пример #16
0
def findcommonheads(ui, local, remote,
                    initialsamplesize=100,
                    fullsamplesize=200,
                    abortwhenunrelated=True):
    '''Return a tuple (common, anyincoming, remoteheads) used to identify
    missing nodes from or in remote.
    '''
    roundtrips = 0
    cl = local.changelog
    dag = dagutil.revlogdag(cl)

    # early exit if we know all the specified remote heads already
    ui.debug("query 1; heads\n")
    roundtrips += 1
    ownheads = dag.heads()
    sample = _limitsample(ownheads, initialsamplesize)
    # indices between sample and externalized version must match
    sample = list(sample)
    if remote.local():
        # stopgap until we have a proper localpeer that supports batch()
        srvheadhashes = remote.heads()
        yesno = remote.known(dag.externalizeall(sample))
    elif remote.capable('batch'):
        batch = remote.batch()
        srvheadhashesref = batch.heads()
        yesnoref = batch.known(dag.externalizeall(sample))
        batch.submit()
        srvheadhashes = srvheadhashesref.value
        yesno = yesnoref.value
    else:
        # compatibility with pre-batch, but post-known remotes during 1.9
        # development
        srvheadhashes = remote.heads()
        sample = []

    if cl.tip() == nullid:
        if srvheadhashes != [nullid]:
            return [nullid], True, srvheadhashes
        return [nullid], False, []

    # start actual discovery (we note this before the next "if" for
    # compatibility reasons)
    ui.status(_("searching for changes\n"))

    srvheads = dag.internalizeall(srvheadhashes, filterunknown=True)
    if len(srvheads) == len(srvheadhashes):
        ui.debug("all remote heads known locally\n")
        return (srvheadhashes, False, srvheadhashes,)

    if sample and len(ownheads) <= initialsamplesize and util.all(yesno):
        ui.note(_("all local heads known remotely\n"))
        ownheadhashes = dag.externalizeall(ownheads)
        return (ownheadhashes, True, srvheadhashes,)

    # full blown discovery

    # own nodes I know we both know
    # treat remote heads (and maybe own heads) as a first implicit sample
    # response
    common = cl.incrementalmissingrevs(srvheads)
    commoninsample = set(n for i, n in enumerate(sample) if yesno[i])
    common.addbases(commoninsample)
    # own nodes where I don't know if remote knows them
    undecided = set(common.missingancestors(ownheads))
    # own nodes I know remote lacks
    missing = set()

    full = False
    while undecided:

        if sample:
            missinginsample = [n for i, n in enumerate(sample) if not yesno[i]]
            missing.update(dag.descendantset(missinginsample, missing))

            undecided.difference_update(missing)

        if not undecided:
            break

        if full or common.hasbases():
            if full:
                ui.note(_("sampling from both directions\n"))
            else:
                ui.debug("taking initial sample\n")
            samplefunc = _takefullsample
            targetsize = fullsamplesize
        else:
            # use even cheaper initial sample
            ui.debug("taking quick initial sample\n")
            samplefunc = _takequicksample
            targetsize = initialsamplesize
        if len(undecided) < targetsize:
            sample = list(undecided)
        else:
            sample = samplefunc(dag, undecided, targetsize)
            sample = _limitsample(sample, targetsize)

        roundtrips += 1
        ui.progress(_('searching'), roundtrips, unit=_('queries'))
        ui.debug("query %i; still undecided: %i, sample size is: %i\n"
                 % (roundtrips, len(undecided), len(sample)))
        # indices between sample and externalized version must match
        sample = list(sample)
        yesno = remote.known(dag.externalizeall(sample))
        full = True

        if sample:
            commoninsample = set(n for i, n in enumerate(sample) if yesno[i])
            common.addbases(commoninsample)
            common.removeancestorsfrom(undecided)

    # heads(common) == heads(common.bases) since common represents common.bases
    # and all its ancestors
    result = dag.headsetofconnecteds(common.bases)
    # common.bases can include nullrev, but our contract requires us to not
    # return any heads in that case, so discard that
    result.discard(nullrev)
    ui.progress(_('searching'), None)
    ui.debug("%d total queries\n" % roundtrips)

    if not result and srvheadhashes != [nullid]:
        if abortwhenunrelated:
            raise util.Abort(_("repository is unrelated"))
        else:
            ui.warn(_("warning: repository is unrelated\n"))
        return (set([nullid]), True, srvheadhashes,)

    anyincoming = (srvheadhashes != [nullid])
    return dag.externalizeall(result), anyincoming, srvheadhashes
Пример #17
0
def calculateupdates(repo, wctx, mctx, ancestors, branchmerge, force, partial,
                     acceptremote, followcopies):
    "Calculate the actions needed to merge mctx into wctx using ancestors"

    if len(ancestors) == 1: # default
        actions, diverge, renamedelete = manifestmerge(
            repo, wctx, mctx, ancestors[0], branchmerge, force, partial,
            acceptremote, followcopies)
        _checkunknownfiles(repo, wctx, mctx, force, actions)

    else: # only when merge.preferancestor=* - the default
        repo.ui.note(
            _("note: merging %s and %s using bids from ancestors %s\n") %
            (wctx, mctx, _(' and ').join(str(anc) for anc in ancestors)))

        # Call for bids
        fbids = {} # mapping filename to bids (action method to list af actions)
        diverge, renamedelete = None, None
        for ancestor in ancestors:
            repo.ui.note(_('\ncalculating bids for ancestor %s\n') % ancestor)
            actions, diverge1, renamedelete1 = manifestmerge(
                repo, wctx, mctx, ancestor, branchmerge, force, partial,
                acceptremote, followcopies)
            _checkunknownfiles(repo, wctx, mctx, force, actions)
            if diverge is None: # and renamedelete is None.
                # Arbitrarily pick warnings from first iteration
                diverge = diverge1
                renamedelete = renamedelete1
            for f, a in sorted(actions.iteritems()):
                m, args, msg = a
                repo.ui.debug(' %s: %s -> %s\n' % (f, msg, m))
                if f in fbids:
                    d = fbids[f]
                    if m in d:
                        d[m].append(a)
                    else:
                        d[m] = [a]
                else:
                    fbids[f] = {m: [a]}

        # Pick the best bid for each file
        repo.ui.note(_('\nauction for merging merge bids\n'))
        actions = {}
        for f, bids in sorted(fbids.items()):
            # bids is a mapping from action method to list af actions
            # Consensus?
            if len(bids) == 1: # all bids are the same kind of method
                m, l = bids.items()[0]
                if util.all(a == l[0] for a in l[1:]): # len(bids) is > 1
                    repo.ui.note(" %s: consensus for %s\n" % (f, m))
                    actions[f] = l[0]
                    continue
            # If keep is an option, just do it.
            if 'k' in bids:
                repo.ui.note(" %s: picking 'keep' action\n" % f)
                actions[f] = bids['k'][0]
                continue
            # If there are gets and they all agree [how could they not?], do it.
            if 'g' in bids:
                ga0 = bids['g'][0]
                if util.all(a == ga0 for a in bids['g'][1:]):
                    repo.ui.note(" %s: picking 'get' action\n" % f)
                    actions[f] = ga0
                    continue
            # TODO: Consider other simple actions such as mode changes
            # Handle inefficient democrazy.
            repo.ui.note(_(' %s: multiple bids for merge action:\n') % f)
            for m, l in sorted(bids.items()):
                for _f, args, msg in l:
                    repo.ui.note('  %s -> %s\n' % (msg, m))
            # Pick random action. TODO: Instead, prompt user when resolving
            m, l = bids.items()[0]
            repo.ui.warn(_(' %s: ambiguous merge - picked %s action\n') %
                         (f, m))
            actions[f] = l[0]
            continue
        repo.ui.note(_('end of auction\n\n'))

    _resolvetrivial(repo, wctx, mctx, ancestors[0], actions)

    if wctx.rev() is None:
        fractions = _forgetremoved(wctx, mctx, branchmerge)
        actions.update(fractions)

    return actions, diverge, renamedelete