Esempio n. 1
0
def overlaycontext(memworkingcopy,
                   ctx,
                   parents=None,
                   extra=None,
                   loginfo=None,
                   mutinfo=None):
    """({path: content}, ctx, (p1node, p2node)?, {}?) -> memctx
    memworkingcopy overrides file contents.
    """
    # parents must contain 2 items: (node1, node2)
    if parents is None:
        parents = ctx.repo().changelog.parents(ctx.node())
    if extra is None:
        extra = ctx.extra()
    date = ctx.date()
    desc = ctx.description()
    user = ctx.user()
    files = set(ctx.files()).union(pycompat.iterkeys(memworkingcopy))
    store = overlaystore(ctx, memworkingcopy)
    return context.memctx(
        repo=ctx.repo(),
        parents=parents,
        text=desc,
        files=files,
        filectxfn=store,
        user=user,
        date=date,
        branch=None,
        extra=extra,
        loginfo=loginfo,
        mutinfo=mutinfo,
    )
Esempio n. 2
0
    def _willbecomenoop(memworkingcopy, ctx, pctx=None):
        """({path: content}, ctx, ctx) -> bool. test if a commit will be noop

        if it will become an empty commit (does not change anything, after the
        memworkingcopy overrides), return True. otherwise return False.
        """
        if not pctx:
            parents = ctx.parents()
            if len(parents) != 1:
                return False
            pctx = parents[0]
        # ctx changes more files (not a subset of memworkingcopy)
        if not set(ctx.files()).issubset(set(
                pycompat.iterkeys(memworkingcopy))):
            return False
        for path, content in pycompat.iteritems(memworkingcopy):
            if path not in pctx or path not in ctx:
                return False
            fctx = ctx[path]
            pfctx = pctx[path]
            if pfctx.flags() != fctx.flags():
                return False
            if pfctx.data() != content:
                return False
        return True
Esempio n. 3
0
 def setrevmap(self, revmap):
     lastrevs = {}
     for revid in pycompat.iterkeys(revmap):
         uuid, module, revnum = revsplit(revid)
         lastrevnum = lastrevs.setdefault(module, revnum)
         if revnum > lastrevnum:
             lastrevs[module] = revnum
     self.lastrevs = lastrevs
Esempio n. 4
0
    def _buildrevgraph(self, a, b):
        """Builds a numeric revision graph for the given two nodes.
        Returns a node->rev map and a rev->[revs] parent function.
        """
        amap = self.ancestormap(a)
        bmap = self.ancestormap(b)

        # Union the two maps
        parentsmap = collections.defaultdict(list)
        allparents = set()
        for mapping in (amap, bmap):
            for node, pdata in pycompat.iteritems(mapping):
                parents = parentsmap[node]
                p1, p2, linknode, copyfrom = pdata
                # Don't follow renames (copyfrom).
                # remotefilectx.ancestor does that.
                if p1 != nullid and not copyfrom:
                    parents.append(p1)
                    allparents.add(p1)
                if p2 != nullid:
                    parents.append(p2)
                    allparents.add(p2)

        # Breadth first traversal to build linkrev graph
        parentrevs = collections.defaultdict(list)
        revmap = {}
        queue = collections.deque(
            ((None, n) for n in pycompat.iterkeys(parentsmap) if n not in allparents)
        )
        while queue:
            prevrev, current = queue.pop()
            if current in revmap:
                if prevrev:
                    parentrevs[prevrev].append(revmap[current])
                continue

            # Assign linkrevs in reverse order, so start at
            # len(parentsmap) and work backwards.
            currentrev = len(parentsmap) - len(revmap) - 1
            revmap[current] = currentrev

            if prevrev:
                parentrevs[prevrev].append(currentrev)

            for parent in parentsmap.get(current):
                queue.appendleft((currentrev, parent))

        return revmap, parentrevs.__getitem__
Esempio n. 5
0
def _moveto(repo, bookmark, ctx, clean=False):
    """Moves the given bookmark and the working copy to the given revision.
    By default it does not overwrite the working copy contents unless clean is
    True.

    Assumes the wlock is already taken.
    """
    # Move working copy over
    if clean:
        merge.update(
            repo,
            ctx.node(),
            False,  # not a branchmerge
            True,  # force overwriting files
            None,
        )  # not a partial update
    else:
        # Mark any files that are different between the two as normal-lookup
        # so they show up correctly in hg status afterwards.
        wctx = repo[None]
        m1 = wctx.manifest()
        m2 = ctx.manifest()
        diff = m1.diff(m2)

        changedfiles = []
        changedfiles.extend(pycompat.iterkeys(diff))

        dirstate = repo.dirstate
        dirchanges = [f for f in dirstate if dirstate[f] != "n"]
        changedfiles.extend(dirchanges)

        if changedfiles or ctx.node() != repo["."].node():
            with dirstate.parentchange():
                dirstate.rebuild(ctx.node(), m2, changedfiles)

    # Move bookmark over
    if bookmark:
        lock = tr = None
        try:
            lock = repo.lock()
            tr = repo.transaction("reset")
            changes = [(bookmark, ctx.node())]
            repo._bookmarks.applychanges(repo, tr, changes)
            tr.close()
        finally:
            lockmod.release(lock, tr)
Esempio n. 6
0
 def ascii(orig, ui, state, type, char, text, coldata):
     if type == "F":
         # the fake node is used to move draft changesets to the 2nd column.
         # there can be at most one fake node, which should also be at the
         # top of the graph.
         # we should not draw the fake node and its edges, so change its
         # edge style to a space, and return directly.
         # these are very hacky but it seems to work well and it seems there
         # is no other easy choice for now.
         edgemap = state["edges"]
         for k in pycompat.iterkeys(edgemap):
             edgemap[k] = " "
         # also we need to hack _drawendinglines to draw the missing '|'s:
         #    (before)      (after)
         #     o draft       o draft
         #    /             /
         #                 |
         #   o             o
         extensions.wrapfunction(graphmod, "_drawendinglines", _drawendinglines)
         return
     orig(ui, state, type, char, text, coldata)
Esempio n. 7
0
def unamend(ui, repo, **opts):
    """undo the last amend operation on the current commit

    Reverse the effects of an :hg:`amend` operation. Hides the current commit
    and checks out the previous version of the commit. :hg:`unamend` does not
    revert the state of the working copy, so changes that were added to the
    commit in the last amend operation become pending changes in the working
    copy.

    :hg:`unamend` cannot be run on amended commits that have children. In
    other words, you cannot unamend an amended commit in the middle of a
    stack.

    .. note::

        Running :hg:`unamend` is similar to running :hg:`undo --keep`
        immediately after :hg:`amend`. However, unlike :hg:`undo`, which can
        only undo an amend if it was the last operation you performed,
        :hg:`unamend` can unamend any draft amended commit in the graph that
        does not have children.

    .. container:: verbose

      Although :hg:`unamend` is typically used to reverse the effects of
      :hg:`amend`, it actually rolls back the current commit to its previous
      version, regardless of whether the changes resulted from an :hg:`amend`
      operation or from another operation, such as :hg:`rebase`.
    """
    unfi = repo

    # identify the commit from which to unamend
    curctx = repo["."]

    # identify the commit to which to unamend
    if mutation.enabled(repo):
        prednodes = curctx.mutationpredecessors()
        if not prednodes:
            prednodes = []
    else:
        prednodes = [marker.prednode() for marker in predecessormarkers(curctx)]

    if len(prednodes) != 1:
        e = _("changeset must have one predecessor, found %i predecessors")
        raise error.Abort(e % len(prednodes))
    prednode = prednodes[0]

    if prednode not in unfi:
        # Trigger autopull.
        autopull.trypull(unfi, [nodemod.hex(prednode)])

    predctx = unfi[prednode]

    if curctx.children():
        raise error.Abort(_("cannot unamend in the middle of a stack"))

    with repo.wlock(), repo.lock():
        ctxbookmarks = curctx.bookmarks()
        changedfiles = []
        wctx = repo[None]
        wm = wctx.manifest()
        cm = predctx.manifest()
        dirstate = repo.dirstate
        diff = cm.diff(wm)
        changedfiles.extend(pycompat.iterkeys(diff))

        tr = repo.transaction("unamend")
        with dirstate.parentchange():
            dirstate.rebuild(prednode, cm, changedfiles)
            # we want added and removed files to be shown
            # properly, not with ? and ! prefixes
            for filename, data in pycompat.iteritems(diff):
                if data[0][0] is None:
                    dirstate.add(filename)
                if data[1][0] is None:
                    dirstate.remove(filename)
        changes = []
        for book in ctxbookmarks:
            changes.append((book, prednode))
        repo._bookmarks.applychanges(repo, tr, changes)
        if obsolete.isenabled(repo, obsolete.createmarkersopt):
            obsolete.createmarkers(repo, [(curctx, (predctx,))])
        visibility.remove(repo, [curctx.node()])
        visibility.add(repo, [predctx.node()])
        tr.close()
Esempio n. 8
0
def synthesize(ui, repo, descpath, **opts):
    """synthesize commits based on a model of an existing repository

    The model must have been generated by :hg:`analyze`. Commits will
    be generated randomly according to the probabilities described in
    the model. If --initfiles is set, the repository will be seeded with
    the given number files following the modeled repository's directory
    structure.

    When synthesizing new content, commit descriptions, and user
    names, words will be chosen randomly from a dictionary that is
    presumed to contain one word per line. Use --dict to specify the
    path to an alternate dictionary to use.
    """
    try:
        fp = hg.openpath(ui, descpath)
    except Exception as err:
        raise error.Abort("%s: %s" % (descpath, err[0].strerror))
    desc = json.load(fp)
    fp.close()

    def cdf(l):
        if not l:
            return [], []
        vals, probs = zip(*sorted(l, key=lambda x: x[1], reverse=True))
        t = float(sum(probs, 0))
        s, cdfs = 0, []
        for v in probs:
            s += v
            cdfs.append(s / t)
        return vals, cdfs

    lineschanged = cdf(desc["lineschanged"])
    fileschanged = cdf(desc["fileschanged"])
    filesadded = cdf(desc["filesadded"])
    dirsadded = cdf(desc["dirsadded"])
    filesremoved = cdf(desc["filesremoved"])
    linelengths = cdf(desc["linelengths"])
    parents = cdf(desc["parents"])
    p1distance = cdf(desc["p1distance"])
    p2distance = cdf(desc["p2distance"])
    interarrival = cdf(desc["interarrival"])
    linesinfilesadded = cdf(desc["linesinfilesadded"])
    tzoffset = cdf(desc["tzoffset"])

    dictfile = opts.get("dict") or "/usr/share/dict/words"
    try:
        fp = open(dictfile, "rU")
    except IOError as err:
        raise error.Abort("%s: %s" % (dictfile, err.strerror))
    words = fp.read().splitlines()
    fp.close()

    initdirs = {}
    if desc["initdirs"]:
        for k, v in desc["initdirs"]:
            initdirs[k.encode("utf-8").replace(".hg", "_hg")] = v
        initdirs = renamedirs(initdirs, words)
    initdirscdf = cdf(initdirs)

    def pick(cdf):
        return cdf[0][bisect.bisect_left(cdf[1], random.random())]

    def pickpath():
        return os.path.join(pick(initdirscdf), random.choice(words))

    def makeline(minimum=0):
        total = max(minimum, pick(linelengths))
        c, l = 0, []
        while c < total:
            w = random.choice(words)
            c += len(w) + 1
            l.append(w)
        return " ".join(l)

    wlock = repo.wlock()
    lock = repo.lock()

    nevertouch = {".hgsub", ".hgignore", ".hgtags"}

    progress = ui.progress
    _synthesizing = _("synthesizing")
    _files = _("initial files")
    _changesets = _("changesets")

    # Synthesize a single initial revision adding files to the repo according
    # to the modeled directory structure.
    initcount = int(opts["initfiles"])
    if initcount and initdirs:
        pctx = repo[None].parents()[0]
        dirs = set(pctx.dirs())
        files = {}

        def validpath(path):
            # Don't pick filenames which are already directory names.
            if path in dirs:
                return False
            # Don't pick directories which were used as file names.
            while path:
                if path in files:
                    return False
                path = os.path.dirname(path)
            return True

        for i in xrange(0, initcount):
            ui.progress(_synthesizing, i, unit=_files, total=initcount)

            path = pickpath()
            while not validpath(path):
                path = pickpath()
            data = "%s contents\n" % path
            files[path] = data
            dir = os.path.dirname(path)
            while dir and dir not in dirs:
                dirs.add(dir)
                dir = os.path.dirname(dir)

        def filectxfn(repo, memctx, path):
            return context.memfilectx(repo, memctx, path, files[path])

        ui.progress(_synthesizing, None)
        message = "synthesized wide repo with %d files" % (len(files), )
        mc = context.memctx(
            repo,
            [pctx.node(), nullid],
            message,
            pycompat.iterkeys(files),
            filectxfn,
            ui.username(),
            "%d %d" % util.makedate(),
        )
        initnode = mc.commit()
        if ui.debugflag:
            hexfn = hex
        else:
            hexfn = short
        ui.status(
            _("added commit %s with %d files\n") %
            (hexfn(initnode), len(files)))

    # Synthesize incremental revisions to the repository, adding repo depth.
    count = int(opts["count"])
    heads = set(map(repo.changelog.rev, repo.heads()))
    for i in xrange(count):
        progress(_synthesizing, i, unit=_changesets, total=count)

        node = repo.changelog.node
        revs = len(repo)

        def pickhead(heads, distance):
            if heads:
                lheads = sorted(heads)
                rev = revs - min(pick(distance), revs)
                if rev < lheads[-1]:
                    rev = lheads[bisect.bisect_left(lheads, rev)]
                else:
                    rev = lheads[-1]
                return rev, node(rev)
            return nullrev, nullid

        r1 = revs - min(pick(p1distance), revs)
        p1 = node(r1)

        # the number of heads will grow without bound if we use a pure
        # model, so artificially constrain their proliferation
        toomanyheads = len(heads) > random.randint(1, 20)
        if p2distance[0] and (pick(parents) == 2 or toomanyheads):
            r2, p2 = pickhead(heads.difference([r1]), p2distance)
        else:
            r2, p2 = nullrev, nullid

        pl = [p1, p2]
        pctx = repo[r1]
        mf = pctx.manifest()
        mfk = mf.keys()
        changes = {}
        if mfk:
            for __ in xrange(pick(fileschanged)):
                for __ in xrange(10):
                    fctx = pctx.filectx(random.choice(mfk))
                    path = fctx.path()
                    if not (path in nevertouch or fctx.isbinary()
                            or "l" in fctx.flags()):
                        break
                lines = fctx.data().splitlines()
                add, remove = pick(lineschanged)
                for __ in xrange(remove):
                    if not lines:
                        break
                    del lines[random.randrange(0, len(lines))]
                for __ in xrange(add):
                    lines.insert(random.randint(0, len(lines)), makeline())
                path = fctx.path()
                changes[path] = "\n".join(lines) + "\n"
            for __ in xrange(pick(filesremoved)):
                path = random.choice(mfk)
                for __ in xrange(10):
                    path = random.choice(mfk)
                    if path not in changes:
                        break
        if filesadded:
            dirs = list(pctx.dirs())
            dirs.insert(0, "")
        for __ in xrange(pick(filesadded)):
            pathstr = ""
            while pathstr in dirs:
                path = [random.choice(dirs)]
                if pick(dirsadded):
                    path.append(random.choice(words))
                path.append(random.choice(words))
                pathstr = "/".join(filter(None, path))
            data = ("\n".join(makeline()
                              for __ in xrange(pick(linesinfilesadded))) +
                    "\n")
            changes[pathstr] = data

        def filectxfn(repo, memctx, path):
            if path not in changes:
                return None
            return context.memfilectx(repo, memctx, path, changes[path])

        if not changes:
            continue
        if revs:
            date = repo["tip"].date()[0] + pick(interarrival)
        else:
            date = time.time() - (86400 * count)
        # dates in mercurial must be positive, fit in 32-bit signed integers.
        date = min(0x7FFFFFFF, max(0, date))
        user = random.choice(words) + "@" + random.choice(words)
        mc = context.memctx(
            repo,
            pl,
            makeline(minimum=2),
            sorted(changes),
            filectxfn,
            user,
            "%d %d" % (date, pick(tzoffset)),
        )
        newnode = mc.commit()
        heads.add(repo.changelog.rev(newnode))
        heads.discard(r1)
        heads.discard(r2)

    lock.release()
    wlock.release()
Esempio n. 9
0
def _undoto(ui, repo, reverseindex, keep=False, branch=None):
    # undo to specific reverseindex
    # branch is a changectx hash (potentially short form)
    # which identifies its branch via localbranch revset

    if branch and repo.ui.configbool("experimental", "narrow-heads"):
        raise error.Abort(
            _("'undo --branch' is no longer supported in the current setup"))

    if repo != repo.unfiltered():
        raise error.ProgrammingError(_("_undoto expects unfilterd repo"))
    try:
        nodedict = _readindex(repo, reverseindex)
    except IndexError:
        raise error.Abort(_("index out of bounds"))

    # bookmarks
    bookstring = _readnode(repo, "bookmarks.i", nodedict["bookmarks"])
    booklist = bookstring.split("\n")
    if branch:
        spec = revsetlang.formatspec("_localbranch(%s)", branch)
        branchcommits = tohexnode(repo, spec)
    else:
        branchcommits = False

    # copy implementation for bookmarks
    itercopy = []
    for mark in pycompat.iteritems(repo._bookmarks):
        itercopy.append(mark)
    bmremove = []
    for mark in itercopy:
        if not branchcommits or hex(mark[1]) in branchcommits:
            bmremove.append((mark[0], None))
    repo._bookmarks.applychanges(repo, repo.currenttransaction(), bmremove)
    bmchanges = []
    for mark in booklist:
        if mark:
            kv = mark.rsplit(" ", 1)
            if not branchcommits or kv[1] in branchcommits or (
                    kv[0], None) in bmremove:
                bmchanges.append((kv[0], bin(kv[1])))
    repo._bookmarks.applychanges(repo, repo.currenttransaction(), bmchanges)

    # working copy parent
    workingcopyparent = _readnode(repo, "workingparent.i",
                                  nodedict["workingparent"])
    if not keep:
        if not branchcommits or workingcopyparent in branchcommits:
            # bailifchanged is run, so this should be safe
            hg.clean(repo, workingcopyparent, show_stats=False)
    elif not branchcommits or workingcopyparent in branchcommits:
        # keeps working copy files
        prednode = bin(workingcopyparent)
        predctx = repo[prednode]

        changedfiles = []
        wctx = repo[None]
        wctxmanifest = wctx.manifest()
        predctxmanifest = predctx.manifest()
        dirstate = repo.dirstate
        diff = predctxmanifest.diff(wctxmanifest)
        changedfiles.extend(pycompat.iterkeys(diff))

        with dirstate.parentchange():
            dirstate.rebuild(prednode, predctxmanifest, changedfiles)
            # we want added and removed files to be shown
            # properly, not with ? and ! prefixes
            for filename, data in pycompat.iteritems(diff):
                if data[0][0] is None:
                    dirstate.add(filename)
                if data[1][0] is None:
                    dirstate.remove(filename)

    # visible changesets
    addedrevs = revsetlang.formatspec("olddraft(0) - olddraft(%d)",
                                      reverseindex)
    removedrevs = revsetlang.formatspec("olddraft(%d) - olddraft(0)",
                                        reverseindex)
    if not branch:
        if repo.ui.configbool("experimental", "narrow-heads"):
            # Assuming mutation and visibility are used. Restore visibility heads
            # directly.
            _restoreheads(repo, reverseindex)
        else:
            # Legacy path.
            smarthide(repo, addedrevs, removedrevs)
            revealcommits(repo, removedrevs)
    else:
        localadds = revsetlang.formatspec(
            "(olddraft(0) - olddraft(%d)) and"
            " _localbranch(%s)", reverseindex, branch)
        localremoves = revsetlang.formatspec(
            "(olddraft(%d) - olddraft(0)) and"
            " _localbranch(%s)", reverseindex, branch)
        smarthide(repo, localadds, removedrevs)
        smarthide(repo, addedrevs, localremoves, local=True)
        revealcommits(repo, localremoves)

    # informative output
    time = _readnode(repo, "date.i", nodedict["date"])
    time = util.datestr([float(x) for x in time.split(" ")])

    nodedict = _readindex(repo, reverseindex - 1)
    commandstr = _readnode(repo, "command.i", nodedict["command"])
    commandlist = commandstr.split("\0")[1:]
    commandstr = " ".join(commandlist)
    uimessage = _("undone to %s, before %s\n") % (time, commandstr)
    if reverseindex == 1 and commandlist[0] in ("commit", "amend"):
        command = commandlist[0]
        if command == "commit" and "--amend" in commandlist:
            command = "amend"
        oldcommithash = _readnode(repo, "workingparent.i",
                                  nodedict["workingparent"])
        shorthash = short(bin(oldcommithash))
        hintutil.trigger("undo-uncommit-unamend", command, shorthash)
    repo.ui.status((uimessage))