Beispiel #1
0
def debugverifylinkrevcache(ui, repo, *pats, **opts):
    """read the linkrevs from the database and verify if they are correct"""
    # restore to the original _adjustlinkrev implementation
    c = context.basefilectx
    extensions.unwrapfunction(c, "_adjustlinkrev", _adjustlinkrev)

    paths = {}  # {id: name}
    nodes = {}  # {id: name}

    repo = repo.unfiltered()
    idx = repo.unfiltered().changelog.index

    db = repo._linkrevcache
    paths = dict(db._getdb(db._pathdbname))
    nodes = dict(db._getdb(db._nodedbname))
    pathsrev = dict(
        (v, pycompat.decodeutf8(k)) for k, v in pycompat.iteritems(paths))
    nodesrev = dict((v, k) for k, v in pycompat.iteritems(nodes))
    lrevs = dict(db._getdb(db._linkrevdbname))

    readfilelog = ui.configbool("linkrevcache", "readfilelog", True)

    total = len(lrevs)
    with progress.bar(ui, _("verifying"), total=total) as prog:
        for i, (k, v) in enumerate(pycompat.iteritems(lrevs)):
            prog.value = i
            pathid, nodeid = k.split(b"\0")
            path = pathsrev[pathid]
            fnode = nodesrev[nodeid]
            linkrevs = _str2intlist(pycompat.decodeutf8(v))
            linkrevs.sort()

            for linkrev in linkrevs:
                fctx = repo[linkrev][path]
                introrev = fctx.introrev()
                fctx.linkrev()
                if readfilelog:
                    flinkrev = fctx.linkrev()
                else:
                    flinkrev = None
                if introrev == linkrev:
                    continue
                if introrev in idx.commonancestorsheads(
                        introrev, linkrev) and (introrev in linkrevs
                                                or introrev == flinkrev):
                    adjective = _("unnecessary")
                else:
                    adjective = _("incorrect")
                ui.warn(
                    _("%s linkrev %s for %s @ %s (expected: %s)\n") %
                    (adjective, linkrev, path, node.hex(fnode), introrev))

    ui.write(_("%d entries verified\n") % total)
Beispiel #2
0
def pullrebaseffwd(orig, rebasefunc, ui, repo, source="default", **opts):
    # The remotenames module also wraps "pull --rebase", and if it is active, it
    # is the module that actually performs the rebase.  If it is rebasing, we
    # need to wrap the rebasemodule.rebase function that it calls to replace it
    # with our rebaseorfastforward method.
    rebasing = "rebase" in opts
    if rebasing:
        rebasemodule = extensions.find("rebase")
        if rebasemodule:
            wrapfunction(rebasemodule, "rebase", rebaseorfastforward)
    ret = orig(rebasefunc, ui, repo, source, **opts)
    if rebasing and rebasemodule:
        extensions.unwrapfunction(rebasemodule, "rebase", rebaseorfastforward)
    return ret
Beispiel #3
0
def pushbackupbundle(ui, repo, other, outgoing, bookmarks):
    """
    push a backup bundle to the server

    Pushes an infinitepush bundle containing the commits described in `outgoing`
    and the bookmarks described in `bookmarks` to the `other` server.
    """
    # Wrap deltaparent function to make sure that bundle takes less space
    # See _deltaparent comments for details
    extensions.wrapfunction(changegroup.cg2packer, "deltaparent", _deltaparent)
    try:
        bundler = _createbundler(ui, repo, other)
        bundler.addparam("infinitepush", "True")
        pushvarspart = bundler.newpart("pushvars")
        pushvarspart.addparam("BYPASS_READONLY", "True", mandatory=False)

        backup = False

        if outgoing and not outgoing.missing and not bookmarks:
            ui.status(_("nothing to back up\n"))
            return True

        if outgoing and outgoing.missing:
            backup = True
            parts = bundleparts.getscratchbranchparts(
                repo,
                other,
                outgoing,
                confignonforwardmove=False,
                ui=ui,
                bookmark=None,
                create=False,
                bookmarknode=None,
            )
            for part in parts:
                bundler.addpart(part)

        if bookmarks:
            backup = True
            bundler.addpart(
                bundleparts.getscratchbookmarkspart(other, bookmarks))

        if backup:
            _sendbundle(bundler, other)
        return backup
    finally:
        extensions.unwrapfunction(changegroup.cg2packer, "deltaparent",
                                  _deltaparent)
Beispiel #4
0
def batchunwrap(wrappers):
    for w in wrappers:
        result = None
        try:
            result = extensions.unwrapfunction(dummy, "getstack", w)
            msg = str(dummy.getstack())
        except (ValueError, IndexError) as e:
            msg = e.__class__.__name__
        print("unwrap %s: %s: %s" % (getid(w), getid(result), msg))
Beispiel #5
0
def _drawendinglines(orig, lines, extra, edgemap, seen):
    # if we are going to have only one single column, draw the missing '|'s
    # and restore everything to normal. see comment in 'ascii' below for an
    # example of what will be changed. note: we do not respect 'graphstyle'
    # but always draw '|' here, for simplicity.
    if len(seen) == 1 or any(l[0:2] != [" ", " "] for l in lines):
        # draw '|' from bottom to top in the 1st column to connect to
        # something, like a '/' in the 2nd column, or a '+' in the 1st column.
        for line in reversed(lines):
            if line[0:2] != [" ", " "]:
                break
            line[0] = "|"
        # undo the wrapfunction
        extensions.unwrapfunction(graphmod, "_drawendinglines", _drawendinglines)
        # restore the space to '|'
        for k, v in pycompat.iteritems(edgemap):
            if v == " ":
                edgemap[k] = "|"
    orig(lines, extra, edgemap, seen)
Beispiel #6
0
def getbundlechunks(orig, repo, source, heads=None, bundlecaps=None, **kwargs):
    heads = heads or []
    # newheads are parents of roots of scratch bundles that were requested
    newphases = {}
    scratchbundles = []
    newheads = []
    scratchheads = []
    nodestobundle = {}
    allbundlestocleanup = []

    cgversion = _getsupportedcgversion(repo, bundlecaps or [])
    try:
        for head in heads:
            if head not in repo.changelog.nodemap:
                if head not in nodestobundle:
                    newbundlefile = downloadbundle(repo, head)
                    bundlepath = "bundle:%s+%s" % (repo.root, newbundlefile)
                    bundlerepo = hg.repository(repo.ui, bundlepath)

                    allbundlestocleanup.append((bundlerepo, newbundlefile))
                    bundlerevs = set(bundlerepo.revs("bundle()"))
                    bundlecaps = _includefilelogstobundle(
                        bundlecaps, bundlerepo, bundlerevs, repo.ui
                    )
                    cl = bundlerepo.changelog
                    bundleroots = _getbundleroots(repo, bundlerepo, bundlerevs)
                    draftcommits = set()
                    bundleheads = set([head])
                    for rev in bundlerevs:
                        node = cl.node(rev)
                        draftcommits.add(node)
                        if node in heads:
                            bundleheads.add(node)
                            nodestobundle[node] = (
                                bundlerepo,
                                bundleroots,
                                newbundlefile,
                            )

                    if draftcommits:
                        # Filter down to roots of this head, so we don't report
                        # non-roots as phase roots and we don't report commits
                        # that aren't related to the requested head.
                        for rev in bundlerepo.revs(
                            "roots((%ln) & ::%ln)", draftcommits, bundleheads
                        ):
                            newphases[bundlerepo[rev].hex()] = str(phases.draft)

                scratchbundles.append(
                    _generateoutputparts(
                        head, cgversion, bundlecaps, *nodestobundle[head]
                    )
                )
                newheads.extend(bundleroots)
                scratchheads.append(head)
    finally:
        for bundlerepo, bundlefile in allbundlestocleanup:
            bundlerepo.close()
            try:
                os.unlink(bundlefile)
            except (IOError, OSError):
                # if we can't cleanup the file then just ignore the error,
                # no need to fail
                pass

    pullfrombundlestore = bool(scratchbundles)
    wrappedchangegrouppart = False
    wrappedlistkeys = False
    oldchangegrouppart = exchange.getbundle2partsmapping["changegroup"]
    try:

        def _changegrouppart(bundler, *args, **kwargs):
            # Order is important here. First add non-scratch part
            # and only then add parts with scratch bundles because
            # non-scratch part contains parents of roots of scratch bundles.
            result = oldchangegrouppart(bundler, *args, **kwargs)
            for bundle in scratchbundles:
                for part in bundle:
                    bundler.addpart(part)
            return result

        exchange.getbundle2partsmapping["changegroup"] = _changegrouppart
        wrappedchangegrouppart = True

        def _listkeys(orig, self, namespace):
            origvalues = orig(self, namespace)
            if namespace == "phases" and pullfrombundlestore:
                if origvalues.get("publishing") == "True":
                    # Make repo non-publishing to preserve draft phase
                    del origvalues["publishing"]
                origvalues.update(newphases)
            return origvalues

        extensions.wrapfunction(localrepo.localrepository, "listkeys", _listkeys)
        wrappedlistkeys = True
        heads = list((set(newheads) | set(heads)) - set(scratchheads))
        result = orig(repo, source, heads=heads, bundlecaps=bundlecaps, **kwargs)
    finally:
        if wrappedchangegrouppart:
            exchange.getbundle2partsmapping["changegroup"] = oldchangegrouppart
        if wrappedlistkeys:
            extensions.unwrapfunction(localrepo.localrepository, "listkeys", _listkeys)
    return result
Beispiel #7
0
def _dopull(orig, ui, repo, source="default", **opts):
    # Copy paste from `pull` command
    source, branches = hg.parseurl(ui.expandpath(source), opts.get("branch"))

    scratchbookmarks = {}
    unfi = repo.unfiltered()
    unknownnodes = []
    pullbookmarks = opts.get("bookmark") or []
    for rev in opts.get("rev", []):
        if repo._scratchbranchmatcher.match(rev):
            # rev is a scratch bookmark, treat it as a bookmark
            pullbookmarks.append(rev)
        elif rev not in unfi:
            unknownnodes.append(rev)
    if pullbookmarks:
        realbookmarks = []
        revs = opts.get("rev") or []
        for bookmark in pullbookmarks:
            if repo._scratchbranchmatcher.match(bookmark):
                # rev is not known yet
                # it will be fetched with listkeyspatterns next
                scratchbookmarks[bookmark] = "REVTOFETCH"
            else:
                realbookmarks.append(bookmark)

        if scratchbookmarks:
            other = hg.peer(repo, opts, source)
            fetchedbookmarks = other.listkeyspatterns(
                "bookmarks", patterns=scratchbookmarks)
            for bookmark in scratchbookmarks:
                if bookmark not in fetchedbookmarks:
                    raise error.Abort("remote bookmark %s not found!" %
                                      bookmark)
                scratchbookmarks[bookmark] = fetchedbookmarks[bookmark]
                revs.append(fetchedbookmarks[bookmark])
        opts["bookmark"] = realbookmarks
        opts["rev"] = [rev for rev in revs if rev not in scratchbookmarks]

    # Pulling revisions that were filtered results in a error.
    # Let's revive them.
    unfi = repo.unfiltered()
    torevive = []
    for rev in opts.get("rev", []):
        try:
            repo[rev]
        except error.FilteredRepoLookupError:
            torevive.append(rev)
        except error.RepoLookupError:
            pass
    if obsolete.isenabled(repo, obsolete.createmarkersopt):
        obsolete.revive([unfi[r] for r in torevive])
    visibility.add(repo, [unfi[r].node() for r in torevive])

    if scratchbookmarks or unknownnodes:
        # Set anyincoming to True
        extensions.wrapfunction(discovery, "findcommonincoming",
                                _findcommonincoming)
    try:
        # Remote scratch bookmarks will be deleted because remotenames doesn't
        # know about them. Let's save it before pull and restore after
        remotescratchbookmarks = bookmarks.readremotebookmarks(
            ui, repo, source)
        result = orig(ui, repo, source, **opts)
        # TODO(stash): race condition is possible
        # if scratch bookmarks was updated right after orig.
        # But that's unlikely and shouldn't be harmful.
        with repo.wlock(), repo.lock(), repo.transaction("pull"):
            if bookmarks.remotebookmarksenabled(ui):
                remotescratchbookmarks.update(scratchbookmarks)
                bookmarks.saveremotebookmarks(repo, remotescratchbookmarks,
                                              source)
            else:
                bookmarks.savelocalbookmarks(repo, scratchbookmarks)
        return result
    finally:
        if scratchbookmarks:
            extensions.unwrapfunction(discovery, "findcommonincoming")
Beispiel #8
0
def _runcommandwrapper(orig, lui, repo, cmd, fullargs, *args):
    # For chg, do not wrap the "serve" runcommand call. Otherwise everything
    # will be logged as side effects of a long "hg serve" command, no
    # individual commands will be logged.
    if "CHGINTERNALMARK" in encoding.environ:
        return orig(lui, repo, cmd, fullargs, *args)

    # Unwrap _runcommandwrapper so nested "runcommand" (ex. "hg continue")
    # would work.
    extensions.unwrapfunction(dispatch, "runcommand", _runcommandwrapper)

    # For non-repo command, it's unnecessary to go through the undo logic
    if repo is None:
        return orig(lui, repo, cmd, fullargs, *args)

    command = [cmd] + fullargs

    # Whether something (transaction, or update) has triggered the writing of
    # the *before* state to undolog or not. Possible values:
    #  - []: not triggered, should trigger if write operation happens
    #  - [True]: already triggered by this process, should also log end state
    #  - [False]: already triggered by a parent process, should skip logging
    triggered = []

    # '_undologactive' is set by a parent hg process with before state written
    # to undolog. In this case, the current process should not write undolog.
    if "_undologactive" in encoding.environ:
        triggered.append(False)

    def log(orig, *args, **kwargs):
        # trigger a log of the initial state of a repo before a command tries
        # to modify that state.
        if not triggered:
            triggered.append(True)
            encoding.environ["_undologactive"] = "active"

            # Check wether undolog is consistent
            # ie check wether the undo ext was
            # off before this command
            changes = safelog(repo, [""])
            if changes:
                _recordnewgap(repo)

        return orig(*args, **kwargs)

    # Only write undo log if we know a command is going to do some writes. This
    # saves time calculating visible heads if the command is read-only (ex.
    # status).
    #
    # To detect a write command, wrap all possible entries:
    #  - transaction.__init__
    #  - merge.update
    w = extensions.wrappedfunction
    with w(merge, "update", log), w(transaction.transaction, "__init__", log):
        try:
            result = orig(lui, repo, cmd, fullargs, *args)
        finally:
            # record changes to repo
            if triggered and triggered[0]:
                # invalidatevolatilesets should really be done in Mercurial's
                # transaction handling code. We workaround it here before that
                # upstream change.
                repo.invalidatevolatilesets()
                safelog(repo, command)
                del encoding.environ["_undologactive"]

    return result