Exemple #1
0
def debugbundle2part(orig, ui, part, all, **opts):
    if part.type == constants.scratchmutationparttype:
        entries = mutation.mutationstore.unbundle(part.read())
        ui.write(_x("    %s entries\n") % len(entries))
        for entry in entries:
            pred = ",".join([nodemod.hex(p) for p in entry.preds()])
            succ = nodemod.hex(entry.succ())
            split = entry.split()
            if split:
                succ = ",".join([nodemod.hex(s) for s in split] + [succ])
            ui.write(
                _x("      %s -> %s (%s by %s at %s)\n") %
                (pred, succ, entry.op(), entry.user(), entry.time()))

    orig(ui, part, all, **opts)
Exemple #2
0
def mirrornode(ctx, mapping, args):
    """template: find this commit in other repositories"""

    reponame = mapping["repo"].ui.config("fbscmquery", "reponame")
    if not reponame:
        # We don't know who we are, so we can't ask for a translation
        return ""

    if mapping["ctx"].mutable():
        # Local commits don't have translations
        return ""

    node = mapping["ctx"].hex()
    args = [f(ctx, mapping, a) for f, a in args]
    if len(args) == 1:
        torepo, totype = reponame, args[0]
    else:
        torepo, totype = args

    try:
        client = graphql.Client(repo=mapping["repo"])
        return client.getmirroredrev(reponame, "hg", torepo, totype, node)
    except arcconfig.ArcConfigError:
        mapping["repo"].ui.warn(_("couldn't read .arcconfig or .arcrc\n"))
        return ""
    except graphql.ClientError as e:
        mapping["repo"].ui.warn(_x(str(e.message) + "\n"))
        return ""
Exemple #3
0
def extsetup(ui):
    # TODO: decide use of config section for this extension
    if (not os.path.supports_unicode_filenames) and (pycompat.sysplatform !=
                                                     "cygwin"):
        ui.warn(_("[win32mbcs] cannot activate on this platform.\n"))
        return
    # determine encoding for filename
    global _encoding
    _encoding = ui.config("win32mbcs", "encoding")
    # fake is only for relevant environment.
    if _encoding.lower() in problematic_encodings.split():
        for f in funcs.split():
            wrapname(f, wrapper)
        if pycompat.iswindows:
            for f in winfuncs.split():
                wrapname(f, wrapper)
        wrapname("mercurial.util.listdir", wrapperforlistdir)
        wrapname("mercurial.windows.listdir", wrapperforlistdir)
        # wrap functions to be called with local byte string arguments
        for f in rfuncs.split():
            wrapname(f, reversewrapper)
        # Check sys.args manually instead of using ui.debug() because
        # command line options is not yet applied when
        # extensions.loadall() is called.
        if "--debug" in sys.argv:
            ui.write(
                _x("[win32mbcs] activated with encoding: %s\n") % _encoding)
Exemple #4
0
def debugindexdot(orig, ui, repo, file_):
    """dump an index DAG as a graphviz dot file"""
    if not shallowrepo.requirement in repo.requirements:
        return orig(ui, repo, file_)

    r = buildtemprevlog(repo, os.path.basename(file_)[:-2])

    ui.write(_x("digraph G {\n"))
    for i in r:
        node = r.node(i)
        pp = r.parents(node)
        ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
        if pp[1] != nullid:
            ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
    ui.write("}\n")
Exemple #5
0
def debuglfssend(ui, url=None):
    """read from stdin, send it as a single file to LFS server

    Print oid and size.
    """
    local, remote = _adhocstores(ui, url)

    data = ui.fin.read()
    oid = hashlib.sha256(data).hexdigest()
    longoid = "sha256:%s" % oid
    size = len(data)
    pointers = [pointer.gitlfspointer(oid=longoid, size=str(size))]

    local.write(oid, data)
    remote.writebatch(pointers, local)
    ui.write(_x("%s %s\n") % (oid, size))
Exemple #6
0
    def printtotals():
        if lastfilename is not None:
            ui.write("\n")
        if not totaldeltasize or not totalblobsize:
            return
        difference = totalblobsize - totaldeltasize
        deltastr = "%0.1f%% %s" % (
            (100.0 * abs(difference) / totalblobsize),
            ("smaller" if difference > 0 else "bigger"),
        )

        ui.write(
            _x("Total:%s%s  %s (%s)\n") % (
                "".ljust(2 * hashlen - len("Total:")),
                str(totaldeltasize).ljust(12),
                str(totalblobsize).ljust(9),
                deltastr,
            ))
Exemple #7
0
def gitnode(repo, subset, x):
    """``gitnode(id)``
    Return the hg revision corresponding to a given git rev."""
    l = revset.getargs(x, 1, 1, _("id requires one argument"))
    n = revset.getstring(l[0], _("id requires a string"))

    reponame = repo.ui.config("fbscmquery", "reponame")
    if not reponame:
        # We don't know who we are, so we can't ask for a translation
        return subset.filter(lambda r: False)
    backingrepos = repo.ui.configlist("fbscmquery",
                                      "backingrepos",
                                      default=[reponame])

    lasterror = None
    hghash = None
    for backingrepo in backingrepos:
        try:
            client = graphql.Client(repo=repo)
            hghash = client.getmirroredrev(backingrepo, "git", reponame, "hg",
                                           n)
            if hghash != "":
                break
        except Exception as ex:
            lasterror = ex

    if not hghash:
        if lasterror:
            repo.ui.warn(("Could not translate revision {0}: {1}\n".format(
                n, lasterror)))
        else:
            repo.ui.warn(_x("Could not translate revision {0}\n".format(n)))
        return subset.filter(lambda r: False)

    rn = repo[node.bin(hghash)].rev()
    return subset & smartset.baseset([rn])
Exemple #8
0
def storebundle(op, params, bundlefile, iscrossbackendsync=False):
    log = _getorcreateinfinitepushlogger(op)
    parthandlerstart = time.time()
    log(constants.scratchbranchparttype, eventtype="start")
    index = op.repo.bundlestore.index
    store = op.repo.bundlestore.store
    op.records.add(constants.scratchbranchparttype + "_skippushkey", True)

    bundle = None
    try:  # guards bundle
        bundlepath = "bundle:%s+%s" % (op.repo.root, bundlefile)
        bundle = hg.repository(op.repo.ui, bundlepath)

        bookmark = params.get("bookmark")
        create = params.get("create")
        force = params.get("force")

        if bookmark:
            oldnode = index.getnode(bookmark)

            if not oldnode and not create:
                raise error.Abort(
                    "unknown bookmark %s" % bookmark,
                    hint="use --create if you want to create one",
                )
        else:
            oldnode = None
        bundleheads = bundle.revs("heads(bundle())")
        if bookmark and len(bundleheads) > 1:
            raise error.Abort(
                _("cannot push more than one head to a scratch branch"))

        revs = _getrevs(bundle, oldnode, force, bookmark)

        # Notify the user of what is being pushed
        op.repo.ui.warn(
            _n("pushing %s commit:\n", "pushing %s commits:\n", len(revs)) %
            len(revs))
        maxoutput = 10
        for i in range(0, min(len(revs), maxoutput)):
            firstline = bundle[revs[i]].description().split("\n")[0][:50]
            op.repo.ui.warn(_x("    %s  %s\n") % (revs[i], firstline))

        if len(revs) > maxoutput + 1:
            op.repo.ui.warn(_x("    ...\n"))
            firstline = bundle[revs[-1]].description().split("\n")[0][:50]
            op.repo.ui.warn(_x("    %s  %s\n") % (revs[-1], firstline))

        nodesctx = [bundle[rev] for rev in revs]
        inindex = lambda rev: bool(index.getbundle(bundle[rev].hex()))
        if bundleheads:
            newheadscount = sum(not inindex(rev) for rev in bundleheads)
        else:
            newheadscount = 0
        # If there's a bookmark specified, the bookmarked node should also be
        # provided.  Older clients may omit this, in which case there should be
        # only one head, so we choose the last node, which will be that head.
        # If a bug or malicious client allows there to be a bookmark
        # with multiple heads, we will place the bookmark on the last head.
        bookmarknode = params.get("bookmarknode",
                                  nodesctx[-1].hex() if nodesctx else None)
        key = None
        if newheadscount:
            bundlesize = os.stat(bundlefile).st_size
            with logservicecall(log, "bundlestore", bundlesize=bundlesize):
                bundlesizelimitmb = op.repo.ui.configint(
                    "infinitepush", "maxbundlesize", 100)
                if bundlesize > bundlesizelimitmb * 1024 * 1024:
                    error_msg = (
                        "bundle is too big: %d bytes. " +
                        "max allowed size is %s MB" % bundlesizelimitmb)
                    raise error.Abort(error_msg % (bundlesize, ))

                with open(bundlefile, "rb") as f:
                    bundledata = f.read()
                    key = store.write(bundledata)

        with logservicecall(log, "index", newheadscount=newheadscount), index:
            if key:
                index.addbundle(key,
                                nodesctx,
                                iscrossbackendsync=iscrossbackendsync)
            if bookmark and bookmarknode:
                index.addbookmark(bookmark, bookmarknode, False)
        log(
            constants.scratchbranchparttype,
            eventtype="success",
            elapsedms=(time.time() - parthandlerstart) * 1000,
        )

        fillmetadatabranchpattern = op.repo.ui.config(
            "infinitepush", "fillmetadatabranchpattern", "")
        if bookmark and fillmetadatabranchpattern:
            __, __, matcher = util.stringmatcher(fillmetadatabranchpattern)
            if matcher(bookmark):
                _asyncsavemetadata(op.repo.root,
                                   [ctx.hex() for ctx in nodesctx])
    except Exception as e:
        log(
            constants.scratchbranchparttype,
            eventtype="failure",
            elapsedms=(time.time() - parthandlerstart) * 1000,
            errormsg=str(e),
        )
        raise
    finally:
        if bundle:
            bundle.close()
Exemple #9
0
def debugdatastore(ui, store, verifynoduplicate=True, **opts):
    nodedelta = opts.get("node_delta")
    if nodedelta:
        deltachain = store.getdeltachain("", bin(nodedelta))
        dumpdeltachain(ui, deltachain, **opts)
        return
    node = opts.get("node")
    if node:
        unionstore = unioncontentstore(store)
        try:
            content = unionstore.get("", bin(node))
        except KeyError:
            ui.write(_x("(not found)\n"))
            return
        else:
            ui.writebytes(b"%s" % content)
            return

    if opts.get("long"):
        hashformatter = hex
        hashlen = 42
    else:
        hashformatter = short
        hashlen = 14

    lastfilename = None
    totaldeltasize = 0
    totalblobsize = 0

    def printtotals():
        if lastfilename is not None:
            ui.write("\n")
        if not totaldeltasize or not totalblobsize:
            return
        difference = totalblobsize - totaldeltasize
        deltastr = "%0.1f%% %s" % (
            (100.0 * abs(difference) / totalblobsize),
            ("smaller" if difference > 0 else "bigger"),
        )

        ui.write(
            _x("Total:%s%s  %s (%s)\n") % (
                "".ljust(2 * hashlen - len("Total:")),
                str(totaldeltasize).ljust(12),
                str(totalblobsize).ljust(9),
                deltastr,
            ))

    bases = {}
    nodes = set()
    failures = 0
    for filename, node, deltabase, deltalen in store.iterentries():
        bases[node] = deltabase
        if verifynoduplicate and node in nodes:
            ui.write(_x("Bad entry: %s appears twice\n" % short(node)))
            failures += 1
        nodes.add(node)
        if filename != lastfilename:
            printtotals()
            name = "(empty name)" if filename == "" else filename
            ui.write("%s:\n" % name)
            ui.write("%s%s%s%s\n" % (
                "Node".ljust(hashlen),
                "Delta Base".ljust(hashlen),
                "Delta Length".ljust(14),
                "Blob Size".ljust(9),
            ))
            lastfilename = filename
            totalblobsize = 0
            totaldeltasize = 0

        # Metadata could be missing, in which case it will be an empty dict.
        meta = store.getmeta(filename, node)
        if constants.METAKEYSIZE in meta:
            blobsize = meta[constants.METAKEYSIZE]
            totaldeltasize += deltalen
            totalblobsize += blobsize
        else:
            blobsize = "(missing)"
        ui.write("%s  %s  %s%s\n" % (
            hashformatter(node),
            hashformatter(deltabase),
            str(deltalen).ljust(14),
            blobsize,
        ))

    if filename is not None:
        printtotals()

    failures += _sanitycheck(ui, set(nodes), bases)
    if failures > 1:
        ui.warn(_x("%d failures\n" % failures))
        return 1