Esempio n. 1
0
 def __init__(self, ui):
     scratchbranchpat = ui.config("infinitepush", "branchpattern")
     if scratchbranchpat:
         _, _, matchfn = util.stringmatcher(scratchbranchpat)
     else:
         matchfn = lambda x: False
     self._matchfn = matchfn
Esempio n. 2
0
    def filtered(self, namespace=None, name=None):
        """Yield all journal entries with the given namespace or name

        Both the namespace and the name are optional; if neither is given all
        entries in the journal are produced.

        Matching supports regular expressions by using the `re:` prefix
        (use `literal:` to match names or namespaces that start with `re:`)

        """
        if namespace is not None:
            namespace = util.stringmatcher(namespace)[-1]
        if name is not None:
            name = util.stringmatcher(name)[-1]
        for entry in self:
            if namespace is not None and not namespace(entry.namespace):
                continue
            if name is not None and not name(entry.name):
                continue
            yield entry
Esempio n. 3
0
 def _listbookmarks(self, pattern):
     if pattern.endswith("*"):
         pattern = "re:^" + pattern[:-1] + ".*"
     kind, pat, matcher = util.stringmatcher(pattern)
     prefixlen = len(self._bookmarkmap) + 1
     for dirpath, _dirs, books in self._repo.localvfs.walk(
             self._bookmarkmap):
         for book in books:
             bookmark = posixpath.join(dirpath, book)[prefixlen:]
             if not matcher(bookmark):
                 continue
             yield bookmark, self._read(os.path.join(dirpath, book))
Esempio n. 4
0
def localrepolistkeys(orig, self, namespace, patterns=None):
    """Wrapper of localrepo.listkeys()"""

    if namespace == "bookmarks" and patterns:
        index = self.bundlestore.index
        # Using sortdict instead of a dictionary to ensure that bookmaks are
        # restored in the same order after a pullbackup. See T24417531
        results = util.sortdict()
        bookmarks = orig(self, namespace)
        for pattern in patterns:
            results.update(index.getbookmarks(pattern))
            if pattern.endswith("*"):
                pattern = "re:^" + pattern[:-1] + ".*"
            kind, pat, matcher = util.stringmatcher(pattern)
            for bookmark, node in pycompat.iteritems(bookmarks):
                if matcher(bookmark):
                    results[bookmark] = node
        return results
    else:
        return orig(self, namespace)
Esempio n. 5
0
def storebundle(op, params, bundlefile):
    log = _getorcreateinfinitepushlogger(op)
    parthandlerstart = time.time()
    log(constants.scratchbranchparttype, eventtype="start")
    index = op.repo.bundlestore.index
    store = op.repo.bundlestore.store
    op.records.add(constants.scratchbranchparttype + "_skippushkey", True)

    bundle = None
    try:  # guards bundle
        bundlepath = "bundle:%s+%s" % (op.repo.root, bundlefile)
        bundle = hg.repository(op.repo.ui, bundlepath)

        bookmark = params.get("bookmark")
        create = params.get("create")
        force = params.get("force")

        if bookmark:
            oldnode = index.getnode(bookmark)

            if not oldnode and not create:
                raise error.Abort(
                    "unknown bookmark %s" % bookmark,
                    hint="use --create if you want to create one",
                )
        else:
            oldnode = None
        bundleheads = bundle.revs("heads(bundle())")
        if bookmark and len(bundleheads) > 1:
            raise error.Abort(_("cannot push more than one head to a scratch branch"))

        revs = _getrevs(bundle, oldnode, force, bookmark)

        # Notify the user of what is being pushed
        op.repo.ui.warn(
            _n("pushing %s commit:\n", "pushing %s commits:\n", len(revs)) % len(revs)
        )
        maxoutput = 10
        for i in range(0, min(len(revs), maxoutput)):
            firstline = bundle[revs[i]].description().split("\n")[0][:50]
            op.repo.ui.warn(("    %s  %s\n") % (revs[i], firstline))

        if len(revs) > maxoutput + 1:
            op.repo.ui.warn(("    ...\n"))
            firstline = bundle[revs[-1]].description().split("\n")[0][:50]
            op.repo.ui.warn(("    %s  %s\n") % (revs[-1], firstline))

        nodesctx = [bundle[rev] for rev in revs]
        inindex = lambda rev: bool(index.getbundle(bundle[rev].hex()))
        if bundleheads:
            newheadscount = sum(not inindex(rev) for rev in bundleheads)
        else:
            newheadscount = 0
        # If there's a bookmark specified, the bookmarked node should also be
        # provided.  Older clients may omit this, in which case there should be
        # only one head, so we choose the last node, which will be that head.
        # If a bug or malicious client allows there to be a bookmark
        # with multiple heads, we will place the bookmark on the last head.
        bookmarknode = params.get(
            "bookmarknode", nodesctx[-1].hex() if nodesctx else None
        )
        key = None
        if newheadscount:
            with open(bundlefile, "r") as f:
                bundledata = f.read()
                with logservicecall(log, "bundlestore", bundlesize=len(bundledata)):
                    bundlesizelimitmb = op.repo.ui.configint(
                        "infinitepush", "maxbundlesize", 100
                    )
                    if len(bundledata) > bundlesizelimitmb * 1024 * 1024:
                        error_msg = (
                            "bundle is too big: %d bytes. "
                            + "max allowed size is %s MB" % bundlesizelimitmb
                        )
                        raise error.Abort(error_msg % (len(bundledata),))
                    key = store.write(bundledata)

        with logservicecall(log, "index", newheadscount=newheadscount), index:
            if key:
                index.addbundle(key, nodesctx)
            if bookmark and bookmarknode:
                index.addbookmark(bookmark, bookmarknode, False)
        log(
            constants.scratchbranchparttype,
            eventtype="success",
            elapsedms=(time.time() - parthandlerstart) * 1000,
        )

        fillmetadatabranchpattern = op.repo.ui.config(
            "infinitepush", "fillmetadatabranchpattern", ""
        )
        if bookmark and fillmetadatabranchpattern:
            __, __, matcher = util.stringmatcher(fillmetadatabranchpattern)
            if matcher(bookmark):
                _asyncsavemetadata(op.repo.root, [ctx.hex() for ctx in nodesctx])
    except Exception as e:
        log(
            constants.scratchbranchparttype,
            eventtype="failure",
            elapsedms=(time.time() - parthandlerstart) * 1000,
            errormsg=str(e),
        )
        raise
    finally:
        if bundle:
            bundle.close()
Esempio n. 6
0
def cloudhide(ui, repo, *revs, **opts):
    """remove commits or bookmarks from the cloud workspace"""
    reponame = ccutil.getreponame(repo)
    workspacename = workspace.parseworkspace(ui, opts)
    if workspacename is None:
        workspacename = workspace.currentworkspace(repo)
    if workspacename is None:
        workspacename = workspace.defaultworkspace(ui)

    with progress.spinner(ui, _("fetching commit cloud workspace")):
        serv = service.get(ui, tokenmod.TokenLocator(ui).token)
        slinfo = serv.getsmartlog(reponame, workspacename, repo, 0)
        firstpublic, revdag = serv.makedagwalker(slinfo, repo)
        cloudrefs = serv.getreferences(reponame, workspacename, 0)

    nodeinfos = slinfo.nodeinfos
    dag = slinfo.dag
    drafts = set(slinfo.draft)

    removenodes = set()

    for rev in list(revs) + opts.get("rev", []):
        rev = pycompat.encodeutf8(rev)
        if rev in drafts:
            removenodes.add(rev)
        else:
            candidate = None
            for draft in drafts:
                if draft.startswith(rev):
                    if candidate is None:
                        candidate = draft
                    else:
                        raise error.Abort(
                            _("ambiguous commit hash prefix: %s") % rev)
            if candidate is None:
                raise error.Abort(_("commit not in workspace: %s") % rev)
            removenodes.add(candidate)

    # Find the bookmarks we need to remove
    removebookmarks = set()
    for bookmark in opts.get("bookmark", []):
        kind, pattern, matcher = util.stringmatcher(bookmark)
        if kind == "literal":
            if pattern not in cloudrefs.bookmarks:
                raise error.Abort(_("bookmark not in workspace: %s") % pattern)
            removebookmarks.add(pattern)
        else:
            for bookmark in cloudrefs.bookmarks:
                if matcher(bookmark):
                    removebookmarks.add(bookmark)

    # Find the remote bookmarks we need to remove
    removeremotes = set()
    for remote in opts.get("remotebookmark", []):
        kind, pattern, matcher = util.stringmatcher(remote)
        if kind == "literal":
            if pattern not in cloudrefs.remotebookmarks:
                raise error.Abort(
                    _("remote bookmark not in workspace: %s") % pattern)
            removeremotes.add(remote)
        else:
            for remote in cloudrefs.remotebookmarks:
                if matcher(remote):
                    removeremotes.add(remote)

    # Find the heads and bookmarks we need to remove
    allremovenodes = dag.descendants(removenodes)
    removeheads = set(allremovenodes
                      & map(pycompat.encodeutf8, cloudrefs.heads))
    for node in allremovenodes:
        removebookmarks.update(nodeinfos[node].bookmarks)

    # Find the heads we need to remove because we are removing the last bookmark
    # to it.
    remainingheads = set(map(pycompat.encodeutf8,
                             cloudrefs.heads)) - removeheads
    for bookmark in removebookmarks:
        nodeutf8 = cloudrefs.bookmarks[bookmark]
        node = pycompat.encodeutf8(nodeutf8)
        info = nodeinfos.get(node)
        if node in remainingheads and info:
            if removebookmarks.issuperset(set(info.bookmarks)):
                remainingheads.discard(node)
                removeheads.add(node)

    # Find the heads we need to add to keep other commits visible
    addheads = (dag.parents(removenodes) - allremovenodes -
                dag.ancestors(remainingheads)) & drafts

    if removeheads:
        ui.status(_("removing heads:\n"))
        for head in sorted(removeheads):
            headutf8 = pycompat.decodeutf8(head)
            ui.status("    %s  %s\n" %
                      (headutf8[:12],
                       templatefilters.firstline(nodeinfos[head].message)))
    if addheads:
        ui.status(_("adding heads:\n"))
        for head in sorted(addheads):
            headutf8 = pycompat.decodeutf8(head)
            ui.status("    %s  %s\n" %
                      (headutf8[:12],
                       templatefilters.firstline(nodeinfos[head].message)))
    if removebookmarks:
        ui.status(_("removing bookmarks:\n"))
        for bookmark in sorted(removebookmarks):
            ui.status("    %s: %s\n" %
                      (bookmark, cloudrefs.bookmarks[bookmark][:12]))
    if removeremotes:
        ui.status(_("removing remote bookmarks:\n"))
        for remote in sorted(removeremotes):
            ui.status("    %s: %s\n" %
                      (remote, cloudrefs.remotebookmarks[remote][:12]))

    # Normalize back to strings. (The DAG wants bytes, the cloudrefs wants str)
    removeheads = list(map(pycompat.decodeutf8, removeheads))
    addheads = list(map(pycompat.decodeutf8, addheads))

    if removeheads or addheads or removebookmarks or removeremotes:
        if opts.get("dry_run"):
            ui.status(_("not updating cloud workspace: --dry-run specified\n"))
            return 0
        with progress.spinner(ui, _("updating commit cloud workspace")):
            serv.updatereferences(
                reponame,
                workspacename,
                cloudrefs.version,
                oldheads=list(removeheads),
                newheads=list(addheads),
                oldbookmarks=list(removebookmarks),
                oldremotebookmarks=list(removeremotes),
            )
    else:
        ui.status(_("nothing to change\n"))