Beispiel #1
0
def prefetch(ui, repo, *pats, **opts):
    """prefetch file revisions from the server

    Prefetchs file revisions for the specified revs and stores them in the
    local remotefilelog cache.  If no rev is specified, the default rev is
    used which is the union of dot, draft, and pullprefetch.
    File names or patterns can be used to limit which files are downloaded.

    Return 0 on success.
    """
    if not shallowrepo.requirement in repo.requirements:
        raise error.Abort(_("repo is not shallow"))
    fullrepo = not (pats or opts.get("include") or opts.get("exclude"))
    if "eden" in repo.requirements and fullrepo:
        raise error.Abort(
            _("`hg prefetch` must be called with paths in an EdenFS repository!"
              ),
            hint=
            "Specify exact paths you want to fetch i.e. run `hg prefetch DIR/**`",
        )

    opts = resolveprefetchopts(ui, opts)
    revs = scmutil.revrange(repo, opts.get("rev"))
    repo.prefetch(revs, opts.get("base"), pats, opts)

    # Run repack in background
    if opts.get("repack"):
        repackmod.domaintenancerepack(repo)
Beispiel #2
0
def cloudcheck(ui, repo, dest=None, **opts):
    """check if commits have been backed up

    If no revision are specified then it checks working copy parent.
    """

    revs = opts.get("rev")
    remote = opts.get("remote")
    if not revs:
        revs = ["."]

    remotepath = ccutil.getremotepath(repo, dest)
    unfi = repo.unfiltered()
    revs = scmutil.revrange(repo, revs)
    nodestocheck = [repo[r].hex() for r in revs]

    if remote:
        getconnection = lambda: repo.connectionpool.get(remotepath, opts)
        isbackedup = {
            nodestocheck[i]: res
            for i, res in enumerate(
                dependencies.infinitepush.isbackedupnodes(getconnection, nodestocheck)
            )
        }
    else:
        state = backupstate.BackupState(repo, remotepath)
        backeduprevs = unfi.revs("not public() and ::%ln", state.heads)
        isbackedup = {node: unfi[node].rev() in backeduprevs for node in nodestocheck}

    for n in nodestocheck:
        ui.write((n + " "))
        ui.write(_("backed up") if isbackedup[n] else _("not backed up"))
        ui.write(_("\n"))
Beispiel #3
0
def _revive(repo, rev):
    """Brings the given rev back into the repository. Finding it in backup
    bundles if necessary.
    """
    unfi = repo
    try:
        ctx = unfi[rev]
    except error.RepoLookupError:
        # It could either be a revset or a stripped commit.
        pass
    else:
        if obsolete.isenabled(repo, obsolete.createmarkersopt):
            if ctx.obsolete():
                torevive = unfi.set("::%d & obsolete()", ctx.rev())
                obsolete.revive(torevive, operation="reset")
        visibility.add(repo, [ctx.node()])

    try:
        revs = scmutil.revrange(repo, [rev])
        if len(revs) > 1:
            raise error.Abort(_("exactly one revision must be specified"))
        if len(revs) == 1:
            return repo[revs.first()]
    except error.RepoLookupError:
        revs = []

    return _pullbundle(repo, rev)
Beispiel #4
0
def unhide(ui, repo, *revs, **opts):
    """unhide commits and their ancestors

    Mark the specified commits as visible. Any ancestors of the specified
    commits will also become visible.
    """
    revs = list(revs) + opts.pop("rev", [])
    with repo.wlock(), repo.lock():
        revs = set(scmutil.revrange(repo, revs))
        _dounhide(repo, revs)
Beispiel #5
0
def cloudbackup(ui, repo, *revs, **opts):
    """back up commits to commit cloud

    Commits that have already been backed up will be skipped.

    If no revision is specified, backs up all visible commits.
    """
    inbackground = opts.get("background")
    revs = revs + tuple(opts.get("rev", ()))
    if revs:
        if inbackground:
            raise error.Abort(
                "'--background' cannot be used with specific revisions")
        revs = scmutil.revrange(repo, revs)
    else:
        revs = None

    dest = opts.get("dest")

    if inbackground:
        background.backgroundbackup(repo, dest=dest)
        return 0

    backupsnapshots = False
    try:
        extensions.find("snapshot")
        backupsnapshots = True
    except KeyError:
        pass

    backedup, failed = backup.backup(repo,
                                     revs,
                                     dest=dest,
                                     connect_opts=opts,
                                     backupsnapshots=backupsnapshots)

    if backedup:
        repo.ui.status(
            _n("backed up %d commit\n", "backed up %d commits\n",
               len(backedup)) % len(backedup),
            component="commitcloud",
        )
    if failed:
        repo.ui.warn(
            _n(
                "failed to back up %d commit\n",
                "failed to back up %d commits\n",
                len(failed),
            ) % len(failed),
            component="commitcloud",
        )
    if not backedup and not failed:
        repo.ui.status(_("nothing to back up\n"))
    return 0 if not failed else 2
Beispiel #6
0
 def __init__(self, ui, repotype, path, revs=None):
     common.converter_source.__init__(self, ui, repotype, path, revs)
     self.ignoreerrors = ui.configbool("convert", "hg.ignoreerrors")
     self.ignored = set()
     self.saverev = ui.configbool("convert", "hg.saverev")
     try:
         self.repo = hg.repository(self.ui, path)
         # try to provoke an exception if this isn't really a hg
         # repo, but some other bogus compatible-looking url
         if not self.repo.local():
             raise error.RepoError
     except error.RepoError:
         ui.traceback()
         raise NoRepo(_("%s is not a local Mercurial repository") % path)
     self.lastrev = None
     self.lastctx = None
     self._changescache = None, None
     self.convertfp = None
     # Restrict converted revisions to startrev descendants
     startnode = ui.config("convert", "hg.startrev")
     hgrevs = ui.config("convert", "hg.revs")
     if hgrevs is None:
         if startnode is not None:
             try:
                 startnode = self.repo.lookup(startnode)
             except error.RepoError:
                 raise error.Abort(
                     _("%s is not a valid start revision") % startnode)
             startrev = self.repo.changelog.rev(startnode)
             children = {startnode: 1}
             for r in self.repo.changelog.descendants([startrev]):
                 children[self.repo.changelog.node(r)] = 1
             self.keep = children.__contains__
         else:
             self.keep = util.always
         if revs:
             self._heads = [self.repo[r].node() for r in revs]
         else:
             self._heads = self.repo.heads()
     else:
         if revs or startnode is not None:
             raise error.Abort(
                 _("hg.revs cannot be combined with "
                   "hg.startrev or --rev"))
         nodes = set()
         parents = set()
         for r in scmutil.revrange(self.repo, [hgrevs]):
             ctx = self.repo[r]
             nodes.add(ctx.node())
             parents.update(p.node() for p in ctx.parents())
         self.keep = nodes.__contains__
         self._heads = nodes - parents
Beispiel #7
0
def _resolvetargetnode(repo, rev):
    index = repo.bundlestore.index
    targetnode = index.getnodebyprefix(rev)
    if not targetnode:
        revs = scmutil.revrange(repo, [rev])
        if len(revs) != 1:
            raise error.Abort(
                _("must specify exactly one target commit for scratch bookmark")
            )

        targetnode = repo[revs.last()].hex()

    return targetnode
Beispiel #8
0
def amendtocommit(ui, repo, commitspec):
    """amend to a specific commit
    """
    with repo.wlock(), repo.lock():
        originalcommits = list(repo.set("::. - public()"))
        try:
            revs = scmutil.revrange(repo, [commitspec])
        except error.RepoLookupError:
            raise error.Abort(_("revision '%s' cannot be found") % commitspec)
        if len(revs) > 1:
            raise error.Abort(
                _("'%s' refers to multiple changesets") % commitspec)
        targetcommit = repo[revs.first()]
        if targetcommit not in originalcommits:
            raise error.Abort(
                _("revision '%s' is not a parent of the working copy") %
                commitspec)

        tempcommit = repo.commit(text="tempCommit")

        if not tempcommit:
            raise error.Abort(_("no pending changes to amend"))

        tempcommithex = hex(tempcommit)

        fp = tempfile.NamedTemporaryFile()
        try:
            found = False
            for curcommit in originalcommits:
                fp.write(b"pick %s\n" % bytes(curcommit))
                if curcommit == targetcommit:
                    fp.write(b"roll %s\n" %
                             pycompat.encodeutf8(tempcommithex[:12]))
                    found = True
            if not found:
                raise error.Abort(
                    _("revision '%s' cannot be found") % commitspec)
            fp.flush()
            try:
                histedit.histedit(ui,
                                  repo,
                                  rev=[originalcommits[0].hex()],
                                  commands=fp.name)
            except error.InterventionRequired:
                ui.warn(
                    _("amend --to encountered an issue - "
                      "use hg histedit to continue or abort"))
                raise
        finally:
            fp.close()
Beispiel #9
0
def drop(ui, repo, *revs, **opts):
    """drop changeset from stack
    """
    if not rebasemod:
        raise error.Abort(_("required extensions not detected"))

    cmdutil.checkunfinished(repo)
    cmdutil.bailifchanged(repo)

    revs = scmutil.revrange(repo, list(revs) + opts.get("rev"))
    if not revs:
        raise error.Abort(_("no revision to drop was provided"))

    # currently drop supports dropping only one changeset at a time
    if len(revs) > 1:
        raise error.Abort(_("only one revision can be dropped at a time"))

    revid = revs.first()
    changectx = repo[revid]
    if changectx.phase() == phases.public:
        raise error.Abort(_("public changeset which landed cannot be dropped"))

    parents = repo.revs("parents(%s)", revid)
    if len(parents) > 1:
        raise error.Abort(_("merge changeset cannot be dropped"))
    elif len(parents) == 0:
        raise error.Abort(_("root changeset cannot be dropped"))

    _showrev(ui, repo, revid)

    descendants = repo.revs("(%d::) - %d", revid, revid)
    parent = parents.first()
    with repo.wlock():
        with repo.lock():
            with repo.transaction("drop"):
                if len(descendants) > 0:
                    try:
                        rebasemod.rebase(ui,
                                         repo,
                                         dest=str(parent),
                                         rev=descendants)
                    except error.InterventionRequired:
                        ui.warn(
                            _("conflict occurred during drop: " +
                              "please fix it by running " +
                              "'hg rebase --continue', " +
                              "and then re-run 'hg drop'\n"))
                        raise
                    scmutil.cleanupnodes(repo, [changectx.node()], "drop")
Beispiel #10
0
def prefetch(ui, repo, *pats, **opts):
    """prefetch file revisions from the server

    Prefetchs file revisions for the specified revs and stores them in the
    local remotefilelog cache.  If no rev is specified, the default rev is
    used which is the union of dot, draft, and pullprefetch.
    File names or patterns can be used to limit which files are downloaded.

    Return 0 on success.
    """
    if not shallowrepo.requirement in repo.requirements:
        raise error.Abort(_("repo is not shallow"))

    opts = resolveprefetchopts(ui, opts)
    revs = scmutil.revrange(repo, opts.get("rev"))
    repo.prefetch(revs, opts.get("base"), pats, opts)

    # Run repack in background
    if opts.get("repack"):
        repackmod.domaintenancerepack(repo)
Beispiel #11
0
def pull(orig, ui, repo, *pats, **opts):
    result = orig(ui, repo, *pats, **opts)

    if shallowrepo.requirement in repo.requirements:
        # prefetch if it's configured
        prefetchrevset = ui.config("remotefilelog", "pullprefetch", None)
        bgrepack = repo.ui.configbool("remotefilelog", "backgroundrepack",
                                      False)

        if prefetchrevset:
            ui.status(_("prefetching file contents\n"))
            revs = scmutil.revrange(repo, [prefetchrevset])
            base = repo["."].rev()
            repo.prefetch(revs, base=base)
            if bgrepack:
                repackmod.domaintenancerepack(repo)
        elif bgrepack:
            repackmod.domaintenancerepack(repo)

    return result
Beispiel #12
0
def _revive(repo, rev):
    """Brings the given rev back into the repository. Finding it in backup
    bundles if necessary.
    """
    unfi = repo
    try:
        ctx = unfi[rev]
    except error.RepoLookupError:
        # It could either be a revset or a stripped commit.
        pass
    else:
        visibility.add(repo, [ctx.node()])

    try:
        revs = scmutil.revrange(repo, [rev])
        if len(revs) > 1:
            raise error.Abort(_("exactly one revision must be specified"))
        if len(revs) == 1:
            return repo[revs.first()]
    except error.RepoLookupError:
        revs = []

    return _pullbundle(repo, rev)
Beispiel #13
0
def prune(ui, repo, *revs, **opts):
    """hide changesets by marking them obsolete

    Pruned changesets are obsolete with no successors. If they also have no
    descendants, they are hidden (invisible to all commands).

    Non-obsolete descendants of pruned changesets become "unstable". Use
    :hg:`evolve` to handle this situation.

    When you prune the parent of your working copy, Mercurial updates the
    working copy to a non-obsolete parent.

    You can use ``--succ`` to tell Mercurial that a newer version (successor)
    of the pruned changeset exists. Mercurial records successor revisions in
    obsolescence markers.

    You can use the ``--biject`` option to specify a 1-1 mapping (bijection)
    between revisions to pruned (precursor) and successor changesets. This
    option may be removed in a future release (with the functionality provided
    automatically).

    If you specify multiple revisions in ``--succ``, you are recording a
    "split" and must acknowledge it by passing ``--split``. Similarly, when you
    prune multiple changesets with a single successor, you must pass the
    ``--fold`` option.
    """
    if opts.get("keep", False):
        hint = "strip-uncommit"
    else:
        hint = "strip-hide"
    hintutil.trigger(hint)

    revs = scmutil.revrange(repo, list(revs) + opts.get("rev", []))
    succs = opts.get("succ", [])
    bookmarks = set(opts.get("bookmark", ()))
    metadata = _getmetadata(**opts)
    biject = opts.get("biject")
    fold = opts.get("fold")
    split = opts.get("split")

    options = [o for o in ("biject", "fold", "split") if opts.get(o)]
    if 1 < len(options):
        raise error.Abort(_("can only specify one of %s") % ", ".join(options))

    if bookmarks:
        revs += bookmarksmod.reachablerevs(repo, bookmarks)
        if not revs:
            # No revs are reachable exclusively from these bookmarks, just
            # delete the bookmarks.
            with repo.wlock(), repo.lock(), repo.transaction(
                    "prune-bookmarks") as tr:
                bookmarksmod.delete(repo, tr, bookmarks)
            for bookmark in sorted(bookmarks):
                ui.write(_("bookmark '%s' deleted\n") % bookmark)
            return 0

    if not revs:
        raise error.Abort(_("nothing to prune"))

    wlock = lock = tr = None
    try:
        wlock = repo.wlock()
        lock = repo.lock()
        tr = repo.transaction("prune")
        # defines pruned changesets
        precs = []
        revs.sort()
        for p in revs:
            cp = repo[p]
            if not cp.mutable():
                # note: createmarkers() would have raised something anyway
                raise error.Abort(
                    "cannot prune immutable changeset: %s" % cp,
                    hint="see 'hg help phases' for details",
                )
            precs.append(cp)
        if not precs:
            raise error.Abort("nothing to prune")

        # defines successors changesets
        sucs = scmutil.revrange(repo, succs)
        sucs.sort()
        sucs = tuple(repo[n] for n in sucs)
        if not biject and len(sucs) > 1 and len(precs) > 1:
            msg = "Can't use multiple successors for multiple precursors"
            hint = _("use --biject to mark a series as a replacement"
                     " for another")
            raise error.Abort(msg, hint=hint)
        elif biject and len(sucs) != len(precs):
            msg = "Can't use %d successors for %d precursors" % (len(sucs),
                                                                 len(precs))
            raise error.Abort(msg)
        elif (len(precs) == 1 and len(sucs) > 1) and not split:
            msg = "please add --split if you want to do a split"
            raise error.Abort(msg)
        elif len(sucs) == 1 and len(precs) > 1 and not fold:
            msg = "please add --fold if you want to do a fold"
            raise error.Abort(msg)
        elif biject:
            relations = [(p, (s, )) for p, s in zip(precs, sucs)]
        else:
            relations = [(p, sucs) for p in precs]

        wdp = repo["."]

        if len(sucs) == 1 and len(precs) == 1 and wdp in precs:
            # '.' killed, so update to the successor
            newnode = sucs[0]
        else:
            # update to an unkilled parent
            newnode = wdp

            while newnode in precs or newnode.obsolete():
                newnode = newnode.parents()[0]

        if newnode.node() != wdp.node():
            if opts.get("keep", False):
                # This is largely the same as the implementation in
                # strip.stripcmd(). We might want to refactor this somewhere
                # common at some point.

                # only reset the dirstate for files that would actually change
                # between the working context and uctx
                descendantrevs = repo.revs("%d::." % newnode.rev())
                changedfiles = []
                for rev in descendantrevs:
                    # blindly reset the files, regardless of what actually
                    # changed
                    changedfiles.extend(repo[rev].files())

                # reset files that only changed in the dirstate too
                dirstate = repo.dirstate
                dirchanges = [f for f in dirstate if dirstate[f] != "n"]
                changedfiles.extend(dirchanges)
                repo.dirstate.rebuild(newnode.node(), newnode.manifest(),
                                      changedfiles)
                dirstate.write(tr)
            else:
                bookactive = repo._activebookmark
                # Active bookmark that we don't want to delete (with -B option)
                # we deactivate and move it before the update and reactivate it
                # after
                movebookmark = bookactive and not bookmarks
                if movebookmark:
                    bookmarksmod.deactivate(repo)
                    changes = [(bookactive, newnode.node())]
                    repo._bookmarks.applychanges(repo, tr, changes)
                commands.update(ui, repo, newnode.hex())
                ui.status(
                    _("working directory now at %s\n") %
                    ui.label(str(newnode), "evolve.node"))
                if movebookmark:
                    bookmarksmod.activate(repo, bookactive)

        # update bookmarks
        if bookmarks:
            with repo.wlock(), repo.lock(), repo.transaction(
                    "prune-bookmarks") as tr:
                bookmarksmod.delete(repo, tr, bookmarks)
            for bookmark in sorted(bookmarks):
                ui.write(_("bookmark '%s' deleted\n") % bookmark)

        # create markers
        obsolete.createmarkers(repo,
                               relations,
                               metadata=metadata,
                               operation="prune")

        # hide nodes
        visibility.remove(repo, [c.node() for c in precs])

        # informs that changeset have been pruned
        ui.status(_("%i changesets pruned\n") % len(precs))

        for ctx in repo.set("bookmark() and %ld", precs):
            # used to be:
            #
            #   ldest = list(repo.set('max((::%d) - obsolete())', ctx))
            #   if ldest:
            #      c = ldest[0]
            #
            # but then revset took a lazy arrow in the knee and became much
            # slower. The new forms makes as much sense and a much faster.
            for dest in ctx.ancestors():
                if not dest.obsolete():
                    updatebookmarks = common.bookmarksupdater(repo, ctx.node())
                    updatebookmarks(dest.node())
                    break

        tr.close()
    finally:
        lockmod.release(tr, lock, wlock)
Beispiel #14
0
def chistedit(ui, repo, *freeargs, **opts):
    """Provides a ncurses interface to histedit. Press ? in chistedit mode
    to see an extensive help. Requires python-curses to be installed."""

    if curses is None:
        raise error.Abort(_("Python curses library required"))

    # disable color
    ui._colormode = None

    try:
        keep = opts.get("keep")
        revs = opts.get("rev", [])[:]
        cmdutil.checkunfinished(repo)
        cmdutil.bailifchanged(repo)

        if os.path.exists(os.path.join(repo.path, "histedit-state")):
            raise error.Abort(
                _("history edit already in progress, try "
                  "--continue or --abort"))
        revs.extend(freeargs)
        if not revs:
            defaultrev = destutil.desthistedit(ui, repo)
            if defaultrev is not None:
                revs.append(defaultrev)
        if len(revs) != 1:
            raise error.Abort(
                _("histedit requires exactly one ancestor revision"))

        rr = list(repo.set("roots(%ld)", scmutil.revrange(repo, revs)))
        if len(rr) != 1:
            raise error.Abort(
                _("The specified revisions must have "
                  "exactly one common root"))
        root = rr[0].node()

        topmost, empty = repo.dirstate.parents()
        revs = histedit.between(repo, root, topmost, keep)
        if not revs:
            raise error.Abort(
                _("%s is not an ancestor of working directory") %
                node.short(root))

        ctxs = []
        for i, r in enumerate(revs):
            ctxs.append(histeditrule(repo[r], i))
        rc = curses.wrapper(functools.partial(main, repo, ctxs))
        curses.echo()
        curses.endwin()
        if rc is False:
            ui.write(_("chistedit aborted\n"))
            return 0
        if type(rc) is list:
            ui.status(_("running histedit\n"))
            rules = makecommands(rc)
            filename = repo.localvfs.join("chistedit")
            with open(filename, "w+") as fp:
                for r in rules:
                    fp.write(r)
            opts["commands"] = filename
            return histedit.histedit(ui, repo, *freeargs, **opts)
    except KeyboardInterrupt:
        pass
    return -1
Beispiel #15
0
def hide(ui, repo, *revs, **opts):
    """hide commits and their descendants

    Mark the specified commits as hidden. Hidden commits are not included in
    the output of most Mercurial commands, including :hg:`log` and
    :hg:`smartlog.` Any descendants of the specified commits will also be
    hidden.

    Hidden commits are not deleted. They will remain in the repo indefinitely
    and are still accessible by their hashes. However, :hg:`hide` will delete
    any bookmarks pointing to hidden commits.

    Use the :hg:`unhide` command to make hidden commits visible again. See
    :hg:`help unhide` for more information.

    To view hidden commits, run :hg:`journal`.

    When you hide the current commit, the most recent visible ancestor is
    checked out.

    To hide obsolete stacks (stacks that have a newer version), run
    :hg:`hide --cleanup`. This command is equivalent to:

    :hg:`hide 'obsolete() - ancestors(draft() & not obsolete())'`

    --cleanup skips obsolete commits with non-obsolete descendants.
    """
    if opts.get("cleanup") and len(opts.get("rev") + list(revs)) != 0:
        raise error.Abort(_("--rev and --cleanup are incompatible"))
    elif opts.get("cleanup"):
        # hides all the draft, obsolete commits that
        # don't have non-obsolete descendants
        revs = ["obsolete() - (draft() & ::(draft() & not obsolete()))"]
    else:
        revs = list(revs) + opts.pop("rev", [])

    with repo.wlock(), repo.lock(), repo.transaction("hide") as tr:
        revs = repo.revs("(%ld)::", scmutil.revrange(repo, revs))

        bookmarks = set(opts.get("bookmark", ()))
        if bookmarks:
            revs += bookmarksmod.reachablerevs(repo, bookmarks)
            if not revs:
                # No revs are reachable exclusively from these bookmarks, just
                # delete the bookmarks.
                if not ui.quiet:
                    for bookmark in sorted(bookmarks):
                        ui.status(
                            _("removing bookmark '%s' (was at: %s)\n")
                            % (bookmark, short(repo._bookmarks[bookmark]))
                        )
                bookmarksmod.delete(repo, tr, bookmarks)
                ui.status(
                    _n(
                        "%i bookmark removed\n",
                        "%i bookmarks removed\n",
                        len(bookmarks),
                    )
                    % len(bookmarks)
                )
                return 0

        if not revs:
            raise error.Abort(_("nothing to hide"))

        hidectxs = [repo[r] for r in revs]

        # revs to be hidden
        for ctx in hidectxs:
            if not ctx.mutable():
                raise error.Abort(
                    _("cannot hide immutable changeset: %s") % ctx,
                    hint="see 'hg help phases' for details",
                )
            if not ui.quiet:
                ui.status(
                    _('hiding commit %s "%s"\n')
                    % (ctx, ctx.description().split("\n")[0][:50])
                )

        wdp = repo["."]
        newnode = wdp

        while newnode in hidectxs:
            newnode = newnode.parents()[0]

        if newnode.node() != wdp.node():
            cmdutil.bailifchanged(repo, merge=False)
            hg.update(repo, newnode, False)
            ui.status(
                _("working directory now at %s\n") % ui.label(str(newnode), "node")
            )

        # create markers
        if obsolete.isenabled(repo, obsolete.createmarkersopt):
            obsolete.createmarkers(repo, [(r, []) for r in hidectxs], operation="hide")
        visibility.remove(repo, [c.node() for c in hidectxs])
        ui.status(
            _n("%i changeset hidden\n", "%i changesets hidden\n", len(hidectxs))
            % len(hidectxs)
        )

        # remove bookmarks pointing to hidden changesets
        hnodes = [r.node() for r in hidectxs]
        deletebookmarks = set(bookmarks)
        for bookmark, node in sorted(bookmarksmod.listbinbookmarks(repo)):
            if node in hnodes:
                deletebookmarks.add(bookmark)
        if deletebookmarks:
            for bookmark in sorted(deletebookmarks):
                if not ui.quiet:
                    ui.status(
                        _('removing bookmark "%s (was at: %s)"\n')
                        % (bookmark, short(repo._bookmarks[bookmark]))
                    )
            bookmarksmod.delete(repo, tr, deletebookmarks)
            ui.status(
                _n(
                    "%i bookmark removed\n",
                    "%i bookmarks removed\n",
                    len(deletebookmarks),
                )
                % len(deletebookmarks)
            )
        hintutil.trigger("undo")
Beispiel #16
0
def fold(ui, repo, *revs, **opts):
    """combine multiple commits into a single commit

    With --from, folds all the revisions linearly between the current revision
    and the specified revision.

    With --exact, folds only the specified revisions while ignoring the revision
    currently checked out. The given revisions must form a linear unbroken
    chain.

    .. container:: verbose

     Some examples:

     - Fold from the current revision to its parent::

         hg fold --from .^

     - Fold all draft revisions into the current revision::

         hg fold --from 'draft()'

       See :hg:`help phases` for more about draft revisions and
       :hg:`help revsets` for more about the `draft()` keyword

     - Fold revisions between 3 and 6 into the current revision::

         hg fold --from 3::6

     - Fold revisions 3 and 4:

        hg fold "3 + 4" --exact

     - Only fold revisions linearly between foo and @::

         hg fold foo::@ --exact
    """
    revs = list(revs)
    revs.extend(opts["rev"])
    if not revs:
        raise error.Abort(_("no revisions specified"))

    revs = scmutil.revrange(repo, revs)

    if opts.get("no_rebase"):
        torebase = ()
    else:
        torebase = repo.revs("descendants(%ld) - (%ld)", revs, revs)

    if opts["from"] and opts["exact"]:
        raise error.Abort(_("cannot use both --from and --exact"))
    elif opts["from"]:
        # Try to extend given revision starting from the working directory
        extrevs = repo.revs("(%ld::.) or (.::%ld)", revs, revs)
        discardedrevs = [r for r in revs if r not in extrevs]
        if discardedrevs:
            msg = _("cannot fold non-linear revisions")
            hint = _("given revisions are unrelated to parent of working"
                     " directory")
            raise error.Abort(msg, hint=hint)
        revs = extrevs
    elif opts["exact"]:
        # Nothing to do; "revs" is already set correctly
        pass
    else:
        raise error.Abort(_("must specify either --from or --exact"))

    if not revs:
        raise error.Abort(
            _("specified revisions evaluate to an empty set"),
            hint=_("use different revision arguments"),
        )
    elif len(revs) == 1:
        ui.write_err(_("single revision specified, nothing to fold\n"))
        return 1

    with repo.wlock(), repo.lock(), ui.formatter("fold", opts) as fm:
        fm.startitem()
        root, head = _foldcheck(repo, revs)

        with repo.transaction("fold") as tr:
            commitopts = opts.copy()
            allctx = [repo[r] for r in revs]
            targetphase = max(c.phase() for c in allctx)

            if (commitopts.get("message") or commitopts.get("logfile")
                    or commitopts.get("reuse_message")):
                commitopts["edit"] = False
            else:
                msgs = ["HG: This is a fold of %d changesets." % len(allctx)]
                msgs += [
                    "HG: Commit message of %s.\n\n%s\n" %
                    (node.short(c.node()), c.description()) for c in allctx
                ]
                commitopts["message"] = "\n".join(msgs)
                commitopts["edit"] = True

            newid, unusedvariable = common.rewrite(
                repo,
                root,
                allctx,
                head,
                [root.p1().node(), root.p2().node()],
                commitopts=commitopts,
                mutop="fold",
            )
            phases.retractboundary(repo, tr, targetphase, [newid])

            replacements = {ctx.node(): (newid, ) for ctx in allctx}
            nodechanges = {
                fm.hexfunc(ctx.node()): [fm.hexfunc(newid)]
                for ctx in allctx
            }
            fm.data(nodechanges=fm.formatdict(nodechanges))
            scmutil.cleanupnodes(repo, replacements, "fold")
            fm.condwrite(not ui.quiet, "count", "%i changesets folded\n",
                         len(revs))
            if repo["."].rev() in revs:
                hg.update(repo, newid)

            if torebase:
                common.restackonce(ui, repo, repo[newid].rev())
Beispiel #17
0
def amendtocommit(ui, repo, commitspec, pats=None, opts=None):
    """amend to a specific commit

    This works by patching the working diff on to the specified commit
    and then performing a simplified rebase of the stack's tail on to
    the amended ancestor.

    commitspec must refer to a single commit that is a linear ancestor
    of ".".
    """
    with repo.wlock(), repo.lock(), repo.transaction("amend"):
        revs = list(scmutil.revrange(repo, [commitspec]))
        if len(revs) != 1:
            raise error.Abort(
                _("'%s' must refer to a single changeset") % commitspec)

        draftctxs = list(repo.revs("(%d)::.", revs[0]).iterctx())
        if len(draftctxs) == 0:
            raise error.Abort(
                _("revision '%s' is not an ancestor of the working copy") %
                commitspec)

        if repo.revs("%ld & merge()", draftctxs):
            raise error.Abort(_("cannot amend non-linear stack"))

        dest = draftctxs.pop(0)
        if dest.phase() == phases.public:
            raise error.Abort(_("cannot amend public changesets"))

        # Generate patch from wctx and apply to dest commit.
        mergedctx = mirrorwithmetadata(dest, "amend")
        wctx = repo[None]
        matcher = scmutil.match(wctx, pats, opts) if pats or opts else None

        store = patch.mempatchstore(mergedctx)
        backend = patch.mempatchbackend(ui, mergedctx, store)
        ret = patch.applydiff(
            ui,
            io.BytesIO(b"".join(list(wctx.diff(match=matcher, opts=opts)))),
            backend,
            store,
        )
        if ret < 0:
            raise error.Abort(
                _("amend would conflict in %s") % ", ".join(backend.rejs))

        memctxs = [mergedctx]
        mappednodes = [dest.node()]

        # Perform mini-rebase of our stack.
        for ctx in draftctxs:
            memctxs.append(inmemorymerge(ui, repo, ctx, memctxs[-1], ctx.p1()))
            mappednodes.append(ctx.node())

        parentnode = None
        mapping = {}
        # Execute our list of in-memory commits, updating descendants'
        # parent as we go.
        for i, memctx in enumerate(memctxs):
            if i > 0:
                memctx = context.memctx.mirror(memctx,
                                               parentnodes=(parentnode,
                                                            nullid))
            parentnode = memctx.commit()
            mapping[mappednodes[i]] = (parentnode, )

        scmutil.cleanupnodes(repo, {dest.node(): mapping.pop(dest.node())},
                             "amend")
        scmutil.cleanupnodes(repo, mapping, "rebase")

        with repo.dirstate.parentchange():
            # Update dirstate status of amended files.
            repo.dirstate.rebuild(parentnode,
                                  repo[parentnode].manifest(),
                                  wctx.files(),
                                  exact=True)
Beispiel #18
0
def crdump(ui, repo, *revs, **opts):
    """
    Dump the info about the revisions in format that's friendly for sending the
    patches for code review.

    The output is a JSON list with dictionary for each specified revision: ::

        {
          "output_directory": an output directory for all temporary files
          "commits": [
          {
            "node": commit hash,
            "date": date in format [unixtime, timezone offset],
            "desc": commit message,
            "patch_file": path to file containing patch in unified diff format
                          relative to output_directory,
            "commit_cloud": true if the commit is in commit cloud,
            "files": list of files touched by commit,
            "binary_files": [
              {
                "filename": path to file relative to repo root,
                "old_file": path to file (relative to output_directory) with
                            a dump of the old version of the file,
                "new_file": path to file (relative to output_directory) with
                            a dump of the newversion of the file,
              },
              ...
            ],
            "user": commit author,
            "p1": {
              "node": hash,
              "differential_revision": xxxx
            },
            "public_base": {
              "node": public base commit hash,
              "svnrev": svn revision of public base (if hgsvn repo),
            },
            "obsolete": {
                "date": [
                    time,
                    timezone
                ],
                "flag": marker's flags,
                "metadata": {
                    "operation": changes made,
                    "user": user name
                },
                "prednode": predecessor commit in hash,
                "succnodes": [
                    successors in hash
                ]
            }
          },
          ...
          ]
        }
    """

    revs = list(revs)
    revs.extend(opts["rev"])

    if not revs:
        raise error.Abort(_("revisions must be specified"))
    revs = scmutil.revrange(repo, revs)

    if "unified" in opts:
        contextlines = opts["unified"]

    cdata = []
    outdir = tempfile.mkdtemp(suffix="hg.crdump")
    try:
        lfs = None
        if opts["lfs"]:
            try:
                lfs = extensions.find("lfs")
            except KeyError:
                pass  # lfs extension is not enabled

        notbackedup = set(repo[rev].node() for rev in revs)
        if ui.configbool("crdump", "commitcloud", False):
            try:
                oldquiet = repo.ui.quiet
                # Silence any output from commitcloud
                repo.ui.quiet = True
                notbackedup = commitcloud.backup.backup(repo, revs)[1]
            except Exception:
                # Don't let commit cloud exceptions block crdump
                pass
            finally:
                repo.ui.quiet = oldquiet

        for rev in revs:
            ctx = repo[rev]
            rdata = {
                "node": hex(ctx.node()),
                "date": list(map(int, ctx.date())),
                "desc": encoding.fromlocal(ctx.description()),
                "files": ctx.files(),
                "p1": {
                    "node": ctx.parents()[0].hex()
                },
                "user": encoding.fromlocal(ctx.user()),
                "bookmarks": list(map(encoding.fromlocal, ctx.bookmarks())),
                "commit_cloud": False if ctx.node() in notbackedup else True,
                "manifest_node": hex(ctx.manifestnode()),
            }
            if ctx.parents()[0].phase() != phases.public:
                # we need this only if parent is in the same draft stack
                rdata["p1"]["differential_revision"] = phabricatorrevision(
                    ctx.parents()[0])

            if opts["obsolete"]:
                markers = obsutil.getmarkers(repo, [ctx.node()])
                obsolete = dumpmarkers(markers)
                if obsolete:
                    rdata["obsolete"] = obsolete

            rdata["branch"] = ""

            pbctx = publicbase(repo, ctx)
            if pbctx:
                rdata["public_base"] = {"node": hex(pbctx.node())}
                try:
                    globalrevs = extensions.find("globalrevs")
                    globalrev = globalrevs.getglobalrev(ui, pbctx)
                    rdata["public_base"]["svnrev"] = globalrev
                except KeyError:
                    pass

                if extensions.isenabled(ui, "remotenames"):
                    downstreams = repo.revs("%n:: & remotebookmark()",
                                            pbctx.node())
                    downstreambookmarks = set()
                    for r in downstreams:
                        downstreambookmarks.update(
                            repo.names["hoistednames"].names(
                                repo, repo[r].node()))

                    # If there's a single downstream remotebookmark, or master is a
                    # downstream remotebookmark, report it as the current branch.
                    if downstreambookmarks:
                        if "master" in downstreambookmarks:
                            rdata["branch"] = "master"
                        elif len(downstreambookmarks) == 1:
                            rdata["branch"] = list(downstreambookmarks)[0]

            rdata["patch_file"] = dumppatch(ui, repo, ctx, outdir,
                                            contextlines)
            if not opts["nobinary"]:
                rdata["binary_files"] = dumpbinaryfiles(
                    ui, repo, ctx, outdir, lfs)
            cdata.append(rdata)

        ui.write(
            json.dumps(
                {
                    "output_directory": outdir,
                    "commits": cdata
                },
                sort_keys=True,
                indent=4,
                separators=(",", ": "),
            ))
        ui.write("\n")
    except Exception:
        shutil.rmtree(outdir)
        raise
Beispiel #19
0
def _push(orig, ui, repo, dest=None, *args, **opts):
    bookmark = opts.get("to") or ""
    create = opts.get("create") or False

    oldphasemove = None
    overrides = {
        ("experimental", "server-bundlestore-bookmark"): bookmark,
        ("experimental", "server-bundlestore-create"): create,
    }

    with ui.configoverride(
            overrides,
            "infinitepush"), repo.wlock(), repo.lock(), repo.transaction(
                "push"):
        scratchpush = opts.get("bundle_store")
        if repo._scratchbranchmatcher.match(bookmark):
            # We are pushing to a scratch bookmark.  Check that there is
            # exactly one revision that is being pushed (this will be the
            # new bookmarked node).
            revs = opts.get("rev")
            if revs:
                revs = [repo[r] for r in scmutil.revrange(repo, revs)]
            else:
                revs = [repo["."]]
            if len(revs) != 1:
                msg = _("--to requires exactly one commit to push")
                hint = _("use --rev HASH or omit --rev for current commit (.)")
                raise error.Abort(msg, hint=hint)

            # Put the bookmarked node hash in the bundle to avoid ambiguity.
            ui.setconfig("experimental", "server-bundlestore-bookmarknode",
                         revs[0].hex())

            # If the bookmark destination is a public commit, then there will
            # be nothing to push.  We still need to send a changegroup part
            # to update the bookmark, so send the null rev instead.
            if not revs[0].mutable():
                opts["rev"] = ["null"]

            # Hack to fix interaction with remotenames. Remotenames push
            # '--to' bookmark to the server but we don't want to push scratch
            # bookmark to the server. Let's delete '--to' and '--create' and
            # also set allow_anon to True (because if --to is not set
            # remotenames will think that we are pushing anonymoush head)
            if "to" in opts:
                del opts["to"]
            if "create" in opts:
                del opts["create"]
            opts["allow_anon"] = True
            scratchpush = True
            # bundle2 can be sent back after push (for example, bundle2
            # containing `pushkey` part to update bookmarks)
            ui.setconfig("experimental", "bundle2.pushback", True)

        ui.setconfig(
            "experimental",
            "non-forward-move",
            opts.get("non_forward_move"),
            "--non-forward-move",
        )

        otherpath = None

        if scratchpush:
            ui.setconfig("experimental", "infinitepush-scratchpush", True)

            oldphasemove = extensions.wrapfunction(exchange, "_localphasemove",
                                                   _phasemove)

            replicate, path = preparepush(ui, dest)

            # We'll replicate the push if the user intended their push to go to
            # the default infinitepush destination.
            if replicate:
                try:
                    otherpath = repo.ui.paths.getpath(
                        pathname.infinitepushother)
                except error.RepoError:
                    pass
        else:
            path = ui.paths.getpath(dest,
                                    default=(pathname.defaultpush,
                                             pathname.default))

        # Copy-paste from `push` command
        if not path:
            raise error.Abort(
                _("default repository not configured!"),
                hint=_("see 'hg help config.paths'"),
            )
        realdest = path.pushloc or path.loc
        if realdest.startswith("svn+") and scratchpush:
            raise error.Abort(
                "infinite push does not work with svn repo",
                hint="did you forget to `hg push default`?",
            )

        otherdest = otherpath and (otherpath.pushloc or otherpath.loc)

        if scratchpush:
            ui.log(
                "infinitepush_destinations",
                dest=dest,
                real_dest=realdest,
                other_dest=otherdest,
                bookmark=bookmark,
            )

        # Remote scratch bookmarks will be deleted because remotenames doesn't
        # know about them. Let's save it before push and restore after
        remotescratchbookmarks = bookmarks.readremotebookmarks(
            ui, repo, realdest)
        result = orig(ui, repo, realdest, *args, **opts)

        # If an alternate Infinitepush destination is specified, replicate the
        # push there. This ensures scratch bookmarks (and their commits) can
        # properly be replicated to Mononoke.

        if otherdest is not None and otherdest != realdest:
            m = _(
                "please wait while we replicate this push to an alternate repository\n"
            )
            ui.warn(m)
            # NOTE: We ignore the result here (which only represents whether
            # there were changes to land).
            orig(ui, repo, otherdest, *args, **opts)

        if bookmarks.remotebookmarksenabled(ui):
            if bookmark and scratchpush:
                other = hg.peer(repo, opts, realdest)
                fetchedbookmarks = other.listkeyspatterns("bookmarks",
                                                          patterns=[bookmark])
                remotescratchbookmarks.update(fetchedbookmarks)
            bookmarks.saveremotebookmarks(repo, remotescratchbookmarks,
                                          realdest)
    if oldphasemove:
        exchange._localphasemove = oldphasemove
    return result
Beispiel #20
0
def debuglfsupload(ui, repo, **opts):
    """upload lfs blobs added by the working copy parent or given revisions"""
    revs = opts.get("rev", [])
    pointers = wrapper.extractpointers(repo, scmutil.revrange(repo, revs))
    wrapper.uploadblobs(repo, pointers)
Beispiel #21
0
def _findnexttarget(
    ui,
    repo,
    n=None,
    bookmark=False,
    newest=False,
    rebase=False,
    top=False,
    towards=None,
    preferdraft=False,
):
    """Get the revision n levels up the stack from the current revision.
       If newest is True, if a changeset has multiple children the newest
       will always be chosen. Otherwise, throws an exception. If the rebase
       option is specified, potentially rebase unstable children as we
       walk up the stack.
    """
    node = repo["."].node()

    # The caller must specify a stopping condition -- either a number
    # of steps to walk, a bookmark to search for, or --top.
    if not n and not bookmark and not top:
        raise error.Abort(_("no stop condition specified"))

    # Precompute child relationships to avoid expensive ctx.children() calls.
    if not rebase:
        childrenof = common.getchildrelationships(repo, [node])

    # If we're moving towards a rev, get the chain of revs up to that rev.
    line = set()
    if towards:
        towardsrevs = scmutil.revrange(repo, [towards])
        if len(towardsrevs) > 1:
            raise error.Abort(
                _("'%s' refers to multiple changesets") % towards)
        towardsrev = towardsrevs.first()
        line = set(repo.nodes(".::%d", towardsrev))
        if not line:
            raise error.Abort(
                _("the current changeset is not an ancestor of '%s'") %
                towards)

    for i in count(0):
        # Loop until we're gone the desired number of steps, or we reach a
        # node with a bookmark if the bookmark option was specified.
        # If top is specified, loop until we reach a head.
        if bookmark:
            if i > 0 and repo[node].bookmarks():
                break
        elif i >= n and not top:
            break

        # If the rebase flag is present, rebase any unstable children.
        # This means we can't rely on precomputed child relationships.
        if rebase:
            common.restackonce(ui, repo, repo[node].rev(), childrenonly=True)
            children = set(c.node() for c in repo[node].children())
        else:
            children = childrenof[node]

        # Remove children not along the specified line.
        children = (children & line) or children

        # Have we reached a head?
        if not children:
            if node == repo["."].node():
                raise error.Abort(_("current changeset has no children"))
            if not top:
                ui.status(_("reached head changeset\n"))
            break

        # Are there multiple children?
        if len(children) > 1 and not newest:
            ui.status(
                _("changeset %s has multiple children, namely:\n") %
                short(node))
            _showchangesets(ui, repo, nodes=children)
            # if theres only one nonobsolete we're guessing it's the one
            nonobschildren = filter(lambda c: not repo[c].obsolete(), children)
            draftchildren = filter(lambda c: repo[c].mutable(), children)
            if len(nonobschildren) == 1:
                node = nonobschildren[0]
                ui.status(
                    _("choosing the only non-obsolete child: %s\n") %
                    short(node))
            elif preferdraft and len(draftchildren) == 1:
                node = draftchildren[0]
                ui.status(
                    _("choosing the only draft child: %s\n") % short(node))
            else:
                raise error.Abort(
                    _("ambiguous next changeset"),
                    hint=_("use the --newest or --towards flags "
                           "to specify which child to pick"),
                )
        else:
            # Get the child with the highest revision number.
            node = max(children, key=lambda childnode: repo[childnode].rev())

    return node
Beispiel #22
0
def analyze(ui, repo, *revs, **opts):
    """create a simple model of a repository to use for later synthesis

    This command examines every changeset in the given range (or all
    of history if none are specified) and creates a simple statistical
    model of the history of the repository. It also measures the directory
    structure of the repository as checked out.

    The model is written out to a JSON file, and can be used by
    :hg:`synthesize` to create or augment a repository with synthetic
    commits that have a structure that is statistically similar to the
    analyzed repository.
    """
    root = repo.root
    if not root.endswith(os.path.sep):
        root += os.path.sep

    revs = list(revs)
    revs.extend(opts["rev"])
    if not revs:
        revs = [":"]

    output = opts["output"]
    if not output:
        output = os.path.basename(root) + ".json"

    if output == "-":
        fp = sys.stdout
    else:
        fp = open(output, "w")

    # Always obtain file counts of each directory in the given root directory.
    def onerror(e):
        ui.warn(_("error walking directory structure: %s\n") % e)

    dirs = {}
    rootprefixlen = len(root)
    for dirpath, dirnames, filenames in os.walk(root, onerror=onerror):
        dirpathfromroot = dirpath[rootprefixlen:]
        dirs[dirpathfromroot] = len(filenames)
        if ".hg" in dirnames:
            dirnames.remove(".hg")

    lineschanged = zerodict()
    children = zerodict()
    p1distance = zerodict()
    p2distance = zerodict()
    linesinfilesadded = zerodict()
    fileschanged = zerodict()
    filesadded = zerodict()
    filesremoved = zerodict()
    linelengths = zerodict()
    interarrival = zerodict()
    parents = zerodict()
    dirsadded = zerodict()
    tzoffset = zerodict()

    # If a mercurial repo is available, also model the commit history.
    if repo:
        revs = scmutil.revrange(repo, revs)
        revs.sort()

        progress = ui.progress
        _analyzing = _("analyzing")
        _changesets = _("changesets")
        _total = len(revs)

        for i, rev in enumerate(revs):
            progress(_analyzing, i, unit=_changesets, total=_total)
            ctx = repo[rev]
            pl = ctx.parents()
            pctx = pl[0]
            prev = pctx.rev()
            children[prev] += 1
            p1distance[rev - prev] += 1
            parents[len(pl)] += 1
            tzoffset[ctx.date()[1]] += 1
            if len(pl) > 1:
                p2distance[rev - pl[1].rev()] += 1
            if prev == rev - 1:
                lastctx = pctx
            else:
                lastctx = repo[rev - 1]
            if lastctx.rev() != nullrev:
                timedelta = ctx.date()[0] - lastctx.date()[0]
                interarrival[roundto(timedelta, 300)] += 1
            diff = sum((d.splitlines() for d in ctx.diff(pctx, git=True)), [])
            fileadds, diradds, fileremoves, filechanges = 0, 0, 0, 0
            for filename, mar, lineadd, lineremove, isbin in parsegitdiff(
                    diff):
                if isbin:
                    continue
                added = sum(pycompat.itervalues(lineadd), 0)
                if mar == "m":
                    if added and lineremove:
                        lineschanged[roundto(added, 5),
                                     roundto(lineremove, 5)] += 1
                        filechanges += 1
                elif mar == "a":
                    fileadds += 1
                    if "/" in filename:
                        filedir = filename.rsplit("/", 1)[0]
                        if filedir not in pctx.dirs():
                            diradds += 1
                    linesinfilesadded[roundto(added, 5)] += 1
                elif mar == "r":
                    fileremoves += 1
                for length, count in iteritems(lineadd):
                    linelengths[length] += count
            fileschanged[filechanges] += 1
            filesadded[fileadds] += 1
            dirsadded[diradds] += 1
            filesremoved[fileremoves] += 1

    invchildren = zerodict()

    for rev, count in iteritems(children):
        invchildren[count] += 1

    if output != "-":
        ui.status(_("writing output to %s\n") % output)

    def pronk(d):
        return sorted(iteritems(d), key=lambda x: x[1], reverse=True)

    json.dump(
        {
            "revs": len(revs),
            "initdirs": pronk(dirs),
            "lineschanged": pronk(lineschanged),
            "children": pronk(invchildren),
            "fileschanged": pronk(fileschanged),
            "filesadded": pronk(filesadded),
            "linesinfilesadded": pronk(linesinfilesadded),
            "dirsadded": pronk(dirsadded),
            "filesremoved": pronk(filesremoved),
            "linelengths": pronk(linelengths),
            "parents": pronk(parents),
            "p1distance": pronk(p1distance),
            "p2distance": pronk(p2distance),
            "interarrival": pronk(interarrival),
            "tzoffset": pronk(tzoffset),
        },
        fp,
    )
    fp.close()
Beispiel #23
0
def metaedit(ui, repo, templ, *revs, **opts):
    """edit commit message and other metadata

    Edit commit message for the current commit. By default, opens your default
    editor so that you can edit the commit message interactively. Specify -m
    to specify the commit message on the command line.

    To edit the message for a different commit, specify -r. To edit the
    messages of multiple commits, specify --batch.

    You can edit other pieces of commit metadata, namely the user or date,
    by specifying -u or -d, respectively. The expected format for user is
    'Full Name <*****@*****.**>'.

    There is also automation-friendly JSON input mode which allows the caller
    to provide the mapping between commit and new message and username in the
    following format:

        {
            "<commit_hash>": {
                "message": "<message>",
                "user": "******" // optional
            }
        }

    .. note::

        You can specify --fold to fold multiple revisions into one when the
        given revisions form a linear unbroken chain. However, :hg:`fold` is
        the preferred command for this purpose. See :hg:`help fold` for more
        information.

    .. container:: verbose

     Some examples:

     - Edit the commit message for the current commit::

         hg metaedit

     - Change the username for the current commit::

         hg metaedit --user 'New User <*****@*****.**>'

    """
    revs = list(revs)
    revs.extend(opts["rev"])
    if not revs:
        if opts["fold"]:
            raise error.Abort(_("revisions must be specified with --fold"))
        revs = ["."]

    with repo.wlock(), repo.lock():
        revs = scmutil.revrange(repo, revs)
        msgmap = {
        }  # {node: message}, predefined messages, currently used by --batch
        usermap = {
        }  # {node: author}, predefined authors, used by --jsoninputfile

        if opts["fold"]:
            root, head = fold._foldcheck(repo, revs)
        else:
            if repo.revs("%ld and public()", revs):
                raise error.Abort(
                    _("cannot edit commit information for public "
                      "revisions"))
            root = head = repo[revs.first()]

        wctx = repo[None]
        p1 = wctx.p1()
        tr = repo.transaction("metaedit")
        newp1 = None
        try:
            commitopts = opts.copy()
            allctx = [repo[r] for r in revs]
            jsoninputfile = None

            if any(
                    commitopts.get(name)
                    for name in ["message", "logfile", "reuse_message"]):
                commitopts["edit"] = False
            else:
                if opts["fold"]:
                    msgs = [
                        _("HG: This is a fold of %d changesets.") % len(allctx)
                    ]
                    msgs += [
                        _("HG: Commit message of %s.\n\n%s\n") %
                        (nodemod.short(c.node()), c.description())
                        for c in allctx
                    ]
                else:
                    if opts["batch"] and len(revs) > 1:
                        msgmap = editmessages(repo, revs)

                    msgs = [head.description()]
                    jsoninputfile = opts.get("json_input_file")
                    if jsoninputfile:
                        try:
                            if cmdutil.isstdiofilename(jsoninputfile):
                                inputjson = pycompat.decodeutf8(ui.fin.read())
                            else:
                                inputjson = pycompat.decodeutf8(
                                    util.readfile(jsoninputfile))
                            msgusermap = json.loads(inputjson)
                        except IOError as inst:
                            raise error.Abort(
                                _("can't read JSON input file '%s': %s") %
                                (jsoninputfile,
                                 encoding.strtolocal(inst.strerror)))
                        except ValueError as inst:
                            raise error.Abort(
                                _("can't decode JSON input file '%s': %s") %
                                (jsoninputfile, str(inst)))

                        if not isinstance(msgusermap, dict):
                            raise error.Abort(
                                _("JSON input is not a dictionary (see --help for input format)"
                                  ))

                        try:
                            msgmap = {
                                bin(node): msguser.get("message")
                                for (node, msguser) in msgusermap.items()
                                if "message" in msguser
                            }

                            usermap = {
                                bin(node): msguser.get("user")
                                for (node, msguser) in msgusermap.items()
                                if "user" in msguser
                            }
                        except TypeError:
                            raise error.Abort(_("invalid JSON input"))

                commitopts["message"] = "\n".join(msgs)
                commitopts["edit"] = True

            if root == head:
                # fast path: use metarewrite
                replacemap = {}
                # adding commitopts to the revisions to metaedit
                allctxopt = [{
                    "ctx": ctx,
                    "commitopts": commitopts
                } for ctx in allctx]
                # all descendats that can be safely rewritten
                newunstable = common.newunstable(repo, revs)
                newunstableopt = [{
                    "ctx": ctx
                } for ctx in [repo[r] for r in newunstable]]
                # we need to edit descendants with the given revisions to not to
                # corrupt the stacks
                if _histediting(repo):
                    ui.note(
                        _("during histedit, the descendants of "
                          "the edited commit weren't auto-rebased\n"))
                else:
                    allctxopt += newunstableopt
                # we need topological order for all
                if mutation.enabled(repo):
                    allctxopt = mutation.toposort(
                        repo,
                        allctxopt,
                        nodefn=lambda copt: copt["ctx"].node())
                else:
                    allctxopt = sorted(allctxopt,
                                       key=lambda copt: copt["ctx"].rev())

                def _rewritesingle(c, _commitopts):
                    # Predefined message overrides other message editing choices.
                    msg = msgmap.get(c.node())
                    if jsoninputfile:
                        _commitopts["edit"] = False
                    if msg is not None:
                        _commitopts["message"] = msg
                        _commitopts["edit"] = False
                    user = usermap.get(c.node())
                    if user is not None:
                        _commitopts["user"] = user
                    if _commitopts.get("edit", False):
                        msg = "HG: Commit message of changeset %s\n%s" % (
                            str(c),
                            c.description(),
                        )
                        _commitopts["message"] = msg
                    bases = [
                        replacemap.get(c.p1().node(),
                                       c.p1().node()),
                        replacemap.get(c.p2().node(),
                                       c.p2().node()),
                    ]
                    newid, created = common.metarewrite(repo,
                                                        c,
                                                        bases,
                                                        commitopts=_commitopts)
                    if created:
                        replacemap[c.node()] = newid

                for copt in allctxopt:
                    _rewritesingle(
                        copt["ctx"],
                        copt.get("commitopts",
                                 {"date": commitopts.get("date") or None}),
                    )

                if p1.node() in replacemap:
                    repo.setparents(replacemap[p1.node()])
                if len(replacemap) > 0:
                    mapping = dict(
                        map(
                            lambda oldnew: (oldnew[0], [oldnew[1]]),
                            pycompat.iteritems(replacemap),
                        ))
                    templ.setprop("nodereplacements", mapping)
                    scmutil.cleanupnodes(repo, mapping, "metaedit")
                    # TODO: set poroper phase boundaries (affects secret
                    # phase only)
                else:
                    ui.status(_("nothing changed\n"))
                    return 1
            else:
                # slow path: create a new commit
                targetphase = max(c.phase() for c in allctx)

                # TODO: if the author and message are the same, don't create a
                # new hash. Right now we create a new hash because the date can
                # be different.
                newid, created = common.rewrite(
                    repo,
                    root,
                    allctx,
                    head,
                    [root.p1().node(), root.p2().node()],
                    commitopts=commitopts,
                    mutop="metaedit",
                )
                if created:
                    if p1.rev() in revs:
                        newp1 = newid
                    phases.retractboundary(repo, tr, targetphase, [newid])
                    mapping = dict([(repo[rev].node(), [newid])
                                    for rev in revs])
                    templ.setprop("nodereplacements", mapping)
                    scmutil.cleanupnodes(repo, mapping, "metaedit")
                else:
                    ui.status(_("nothing changed\n"))
                    return 1
            tr.close()
        finally:
            tr.release()

        if opts["fold"]:
            ui.status(_("%i changesets folded\n") % len(revs))
        if newp1 is not None:
            hg.update(repo, newp1)
Beispiel #24
0
def _rebase(orig, ui, repo, *pats, **opts):
    histedit = extensions.find("histedit")

    contf = opts.get("continue")
    abortf = opts.get("abort")

    if ((contf or abortf) and not repo.localvfs.exists("rebasestate")
            and repo.localvfs.exists("histedit.state")):
        msg = _("no rebase in progress")
        hint = _(
            "If you want to continue or abort an interactive rebase please"
            ' use "histedit --continue/--abort" instead.')
        raise error.Abort(msg, hint=hint)

    if not opts.get("interactive"):
        return orig(ui, repo, *pats, **opts)

    # the argument parsing has as lot of copy-paste from rebase.py
    # Validate input and define rebasing points
    destf = opts.get("dest", None)
    srcf = opts.get("source", None)
    basef = opts.get("base", None)
    revf = opts.get("rev", [])
    keepf = opts.get("keep", False)

    src = None

    if contf or abortf:
        raise error.Abort("no interactive rebase in progress")
    if destf:
        dest = scmutil.revsingle(repo, destf)
    else:
        raise error.Abort("you must specify a destination (-d) for the rebase")

    if srcf and basef:
        raise error.Abort(_("cannot specify both a source and a base"))
    if revf:
        raise error.Abort("--rev not supported with interactive rebase")
    elif srcf:
        src = scmutil.revsingle(repo, srcf)
    else:
        base = scmutil.revrange(repo, [basef or "."])
        if not base:
            ui.status(
                _('empty "base" revision set - '
                  "can't compute rebase set\n"))
            return 1
        commonanc = repo.revs("ancestor(%ld, %d)", base, dest).first()
        if commonanc is not None:
            src = repo.revs("min((%d::(%ld) - %d)::)", commonanc, base,
                            commonanc).first()
        else:
            src = None

    if src is None:
        raise error.Abort("no revisions to rebase")
    src = repo[src].node()

    topmost, empty = repo.dirstate.parents()
    revs = histedit.between(repo, src, topmost, keepf)

    if srcf and not revs:
        raise error.Abort(
            _("source revision (-s) must be an ancestor of the "
              "working directory for interactive rebase"))

    ctxs = [repo[r] for r in revs]
    state = histedit.histeditstate(repo)
    rules = [histedit.base(state, repo[dest])
             ] + [histedit.pick(state, ctx) for ctx in ctxs]
    editcomment = """#
# Interactive rebase is just a wrapper over histedit (adding the 'base' line as
# the first rule). To continue or abort it you should use:
# "hg histedit --continue" and "--abort"
#
"""
    editcomment += histedit.geteditcomment(ui, node.short(src),
                                           node.short(topmost))
    histedit.ruleeditor(repo, ui, rules, editcomment=editcomment)

    return histedit.histedit(
        ui,
        repo,
        node.hex(src),
        keep=keepf,
        commands=repo.localvfs.join("histedit-last-edit.txt"),
    )