コード例 #1
0
ファイル: remotefilelogserver.py プロジェクト: xmonader/eden
    def stream_out_shallow(repo, proto, other):
        includepattern = None
        excludepattern = None
        raw = other.get("includepattern")
        if raw:
            includepattern = raw.split("\0")
        raw = other.get("excludepattern")
        if raw:
            excludepattern = raw.split("\0")

        oldshallow = state.shallowremote
        oldmatch = state.match
        oldnoflatmf = state.noflatmf
        try:
            state.shallowremote = True
            state.match = match.always(repo.root, "")
            state.noflatmf = other.get("noflatmanifest") == "True"
            if includepattern or excludepattern:
                state.match = match.match(
                    repo.root, "", None, includepattern, excludepattern
                )
            streamres = wireproto.stream(repo, proto)

            # Force the first value to execute, so the file list is computed
            # within the try/finally scope
            first = next(streamres.gen)
            second = next(streamres.gen)

            def gen():
                yield first
                yield second
                for value in streamres.gen:
                    yield value

            return wireproto.streamres(gen())
        finally:
            state.shallowremote = oldshallow
            state.match = oldmatch
            state.noflatmf = oldnoflatmf
コード例 #2
0
    def testMatchesCwd(self):
        """Tests matches() on a relpath match with the current directory ('.')
        when not in the root directory."""
        m = self.parsemanifest(A_DEEPER_MANIFEST)

        match = matchmod.match(b"/", b"a/b", [b"."], default=b"relpath")
        m2 = m.matches(match)

        self.assertEqual(
            [
                b"a/b/c/bar.py",
                b"a/b/c/bar.txt",
                b"a/b/c/foo.py",
                b"a/b/c/foo.txt",
                b"a/b/d/baz.py",
                b"a/b/d/qux.py",
                b"a/b/d/ten.txt",
                b"a/b/dog.py",
                b"a/b/fish.py",
            ],
            m2.keys(),
        )
コード例 #3
0
    def testMatchesDirectory(self):
        """Tests matches() on a relpath match on a directory, which should
        match against all files within said directory."""
        m = self.parsemanifest(A_DEEPER_MANIFEST)

        match = matchmod.match(b"/", b"", [b"a/b"], default=b"relpath")
        m2 = m.matches(match)

        self.assertEqual(
            [
                b"a/b/c/bar.py",
                b"a/b/c/bar.txt",
                b"a/b/c/foo.py",
                b"a/b/c/foo.txt",
                b"a/b/d/baz.py",
                b"a/b/d/qux.py",
                b"a/b/d/ten.txt",
                b"a/b/dog.py",
                b"a/b/fish.py",
            ],
            m2.keys(),
        )
コード例 #4
0
def filelogrevset(orig, repo, subset, x):
    """``filelog(pattern)``
    Changesets connected to the specified filelog.

    For performance reasons, ``filelog()`` does not show every changeset
    that affects the requested file(s). See :hg:`help log` for details. For
    a slower, more accurate result, use ``file()``.
    """

    if not shallowrepo.requirement in repo.requirements:
        return orig(repo, subset, x)

    # i18n: "filelog" is a keyword
    pat = revset.getstring(x, _("filelog requires a pattern"))
    m = match.match(repo.root,
                    repo.getcwd(), [pat],
                    default="relpath",
                    ctx=repo[None])
    s = set()

    if not match.patkind(pat):
        # slow
        for r in subset:
            ctx = repo[r]
            cfiles = ctx.files()
            for f in m.files():
                if f in cfiles:
                    s.add(ctx.rev())
                    break
    else:
        # partial
        files = (f for f in repo[None] if m(f))
        for f in files:
            fctx = repo[None].filectx(f)
            s.add(fctx.linkrev())
            for actx in fctx.ancestors():
                s.add(actx.linkrev())

    return smartset.baseset([r for r in subset if r in s])
コード例 #5
0
ファイル: shallowbundle.py プロジェクト: x414e54/eden
    def generatefiles(self, changedfiles, linknodes, commonrevs, source):

        if self._repo.ui.configbool("remotefilelog", "server"):
            caps = self._bundlecaps or []
            if requirement in caps:
                # only send files that don't match the specified patterns
                includepattern = None
                excludepattern = None
                for cap in self._bundlecaps or []:
                    if cap.startswith("includepattern="):
                        includepattern = cap[len("includepattern="):].split(
                            "\0")
                    elif cap.startswith("excludepattern="):
                        excludepattern = cap[len("excludepattern="):].split(
                            "\0")

                m = match.always(self._repo.root, "")
                if includepattern or excludepattern:
                    m = match.match(self._repo.root, "", None, includepattern,
                                    excludepattern)
                changedfiles = list([f for f in changedfiles if not m(f)])

        if requirement in self._repo.requirements:
            repo = self._repo
            if isinstance(repo, bundlerepo.bundlerepository):
                # If the bundle contains filelogs, we can't pull from it, since
                # bundlerepo is heavily tied to revlogs. Instead require that
                # the user use unbundle instead.
                # Force load the filelog data.
                bundlerepo.bundlerepository.file(repo, "foo")
                if repo._cgfilespos:
                    raise error.Abort(
                        "cannot pull from full bundles",
                        hint="use `hg unbundle` instead",
                    )
                return []
            filestosend = self.shouldaddfilegroups(source)
            if filestosend == NoFiles:
                changedfiles = list(
                    [f for f in changedfiles if not repo.shallowmatch(f)])
            else:
                files = []

                phasecache = repo._phasecache
                cl = repo.changelog

                # Prefetch the revisions being bundled
                for i, fname in enumerate(sorted(changedfiles)):
                    filerevlog = repo.file(fname)
                    linkrevnodes = linknodes(filerevlog, fname)
                    # Normally we'd prune the linkrevnodes first,
                    # but that would perform the server fetches one by one.
                    for fnode, cnode in list(pycompat.iteritems(linkrevnodes)):
                        # Adjust linknodes so remote file revisions aren't sent
                        if filestosend == LocalFiles:
                            if phasecache.phase(
                                    repo, cl.rev(cnode)
                            ) == phases.public and repo.shallowmatch(fname):
                                del linkrevnodes[fnode]
                            else:
                                files.append((fname, hex(fnode)))
                        else:
                            files.append((fname, hex(fnode)))

                repo.fileservice.prefetch(files)

                # Prefetch the revisions that are going to be diffed against
                prevfiles = []
                for fname, fnode in files:
                    if repo.shallowmatch(fname):
                        fnode = bin(fnode)
                        filerevlog = repo.file(fname)
                        p1, p2, linknode, copyfrom = filerevlog.getnodeinfo(
                            fnode)
                        if p1 != nullid:
                            prevfiles.append((copyfrom or fname, hex(p1)))

                repo.fileservice.prefetch(prevfiles)

        return super(shallowcg1packer,
                     self).generatefiles(changedfiles, linknodes, commonrevs,
                                         source)
コード例 #6
0
def wraprepo(repo):
    class shallowrepository(repo.__class__):
        @util.propertycache
        def name(self):
            return self.ui.config("remotefilelog", "reponame", "")

        @util.propertycache
        def fallbackpath(self):
            path = self.ui.config(
                "remotefilelog",
                "fallbackpath",
                # fallbackrepo is the old, deprecated name
                self.ui.config("remotefilelog", "fallbackrepo",
                               self.ui.config("paths", "default")),
            )
            if not path:
                raise error.Abort("no remotefilelog server "
                                  "configured - is your .hg/hgrc trusted?")

            return path

        @localrepo.unfilteredpropertycache
        def fileslog(self):
            return remotefilelog.remotefileslog(self)

        def maybesparsematch(self, *revs, **kwargs):
            """
            A wrapper that allows the remotefilelog to invoke sparsematch() if
            this is a sparse repository, or returns None if this is not a
            sparse repository.
            """
            if util.safehasattr(self, "sparsematch"):
                return self.sparsematch(*revs, **kwargs)

            return None

        def file(self, f):
            if f[0] == "/":
                f = f[1:]

            if self.shallowmatch(f):
                return remotefilelog.remotefilelog(self.svfs, f, self)
            else:
                return super(shallowrepository, self).file(f)

        def filectx(self, path, changeid=None, fileid=None):
            if self.shallowmatch(path):
                return remotefilectx.remotefilectx(self, path, changeid,
                                                   fileid)
            else:
                return super(shallowrepository,
                             self).filectx(path, changeid, fileid)

        @localrepo.unfilteredmethod
        def close(self):
            result = super(shallowrepository, self).close()
            self.fileservice.close()
            if "fileslog" in self.__dict__:
                self.fileslog.abortpending()
            return result

        @localrepo.unfilteredmethod
        def commitpending(self):
            super(shallowrepository, self).commitpending()

            self.numtransactioncommits += 1
            # In some cases, we can have many transactions in the same repo, in
            # which case each one will create a packfile, let's trigger a repack at
            # this point to bring the number of packfiles down to a reasonable
            # number.
            if self.numtransactioncommits >= self.ui.configint(
                    "remotefilelog", "commitsperrepack"):
                domaintenancerepack(self)
                self.numtransactioncommits = 0

        @localrepo.unfilteredmethod
        def commitctx(self, ctx, error=False):
            """Add a new revision to current repository.
            Revision information is passed via the context argument.
            """

            # some contexts already have manifest nodes, they don't need any
            # prefetching (for example if we're just editing a commit message
            # we can reuse manifest
            if not ctx.manifestnode():
                # prefetch files that will likely be compared
                m1 = ctx.p1().manifest()
                files = []
                for f in ctx.modified() + ctx.added():
                    fparent1 = m1.get(f, nullid)
                    if fparent1 != nullid:
                        files.append((f, hex(fparent1)))
                self.fileservice.prefetch(files)
            return super(shallowrepository, self).commitctx(ctx, error=error)

        def backgroundprefetch(self,
                               revs,
                               base=None,
                               repack=False,
                               pats=None,
                               opts=None):
            """Runs prefetch in background with optional repack
            """
            cmd = [util.hgexecutable(), "-R", self.origroot, "prefetch"]
            if repack:
                cmd.append("--repack")
            if revs:
                cmd += ["-r", revs]
            if base:
                cmd += ["-b", base]
            cmd = " ".join(map(util.shellquote, cmd))

            runshellcommand(cmd, encoding.environ)

        def prefetch(self,
                     revs,
                     base=None,
                     pats=None,
                     opts=None,
                     matcher=None):
            """Prefetches all the necessary file revisions for the given revs
            Optionally runs repack in background
            """
            with self._lock(
                    self.svfs,
                    "prefetchlock",
                    True,
                    None,
                    None,
                    _("prefetching in %s") % self.origroot,
            ):
                self._prefetch(revs, base, pats, opts, matcher)

        def _prefetch(self,
                      revs,
                      base=None,
                      pats=None,
                      opts=None,
                      matcher=None):
            fallbackpath = self.fallbackpath
            if fallbackpath:
                # If we know a rev is on the server, we should fetch the server
                # version of those files, since our local file versions might
                # become obsolete if the local commits are stripped.
                with progress.spinner(self.ui,
                                      _("finding outgoing revisions")):
                    localrevs = self.revs("outgoing(%s)", fallbackpath)
                if base is not None and base != nullrev:
                    serverbase = list(
                        self.revs("first(reverse(::%s) - %ld)", base,
                                  localrevs))
                    if serverbase:
                        base = serverbase[0]
            else:
                localrevs = self

            mfl = self.manifestlog
            if base is not None:
                mfdict = mfl[self[base].manifestnode()].read()
                skip = set(mfdict.iteritems())
            else:
                skip = set()

            # Copy the skip set to start large and avoid constant resizing,
            # and since it's likely to be very similar to the prefetch set.
            files = skip.copy()
            serverfiles = skip.copy()
            visited = set()
            visited.add(nullid)
            with progress.bar(self.ui, _("prefetching"),
                              total=len(revs)) as prog:
                for rev in sorted(revs):
                    ctx = self[rev]
                    if pats:
                        m = scmutil.match(ctx, pats, opts)
                    if matcher is None:
                        matcher = self.maybesparsematch(rev)

                    mfnode = ctx.manifestnode()
                    mfctx = mfl[mfnode]

                    # Decompressing manifests is expensive.
                    # When possible, only read the deltas.
                    p1, p2 = mfctx.parents
                    if p1 in visited and p2 in visited:
                        mfdict = mfctx.readnew()
                    else:
                        mfdict = mfctx.read()

                    diff = mfdict.iteritems()
                    if pats:
                        diff = (pf for pf in diff if m(pf[0]))
                    if matcher:
                        diff = (pf for pf in diff if matcher(pf[0]))
                    if rev not in localrevs:
                        serverfiles.update(diff)
                    else:
                        files.update(diff)

                    visited.add(mfctx.node())
                    prog.value += 1

            files.difference_update(skip)
            serverfiles.difference_update(skip)

            # Fetch files known to be on the server
            if serverfiles:
                results = [(path, hex(fnode)) for (path, fnode) in serverfiles]
                self.fileservice.prefetch(results, force=True)

            # Fetch files that may or may not be on the server
            if files:
                results = [(path, hex(fnode)) for (path, fnode) in files]
                self.fileservice.prefetch(results)

    repo.__class__ = shallowrepository

    repo.shallowmatch = match.always(repo.root, "")
    repo.fileservice = fileserverclient.fileserverclient(repo)

    repo.numtransactioncommits = 0

    repo.includepattern = repo.ui.configlist("remotefilelog", "includepattern",
                                             None)
    repo.excludepattern = repo.ui.configlist("remotefilelog", "excludepattern",
                                             None)

    if repo.includepattern or repo.excludepattern:
        repo.shallowmatch = match.match(repo.root, "", None,
                                        repo.includepattern,
                                        repo.excludepattern)
コード例 #7
0
ファイル: shallowrepo.py プロジェクト: simpkins/eden
def wraprepo(repo):
    class shallowrepository(repo.__class__):
        @util.propertycache
        def name(self):
            return self.ui.config("remotefilelog", "reponame", "unknown")

        @util.propertycache
        def fallbackpath(self):
            path = self.ui.config(
                "remotefilelog",
                "fallbackpath",
                # fallbackrepo is the old, deprecated name
                self.ui.config("remotefilelog", "fallbackrepo",
                               self.ui.config("paths", "default")),
            )
            if not path:
                raise error.Abort("no remotefilelog server "
                                  "configured - is your .hg/hgrc trusted?")

            return path

        @util.propertycache
        def fileslog(self):
            return remotefilelog.remotefileslog(self)

        def maybesparsematch(self, *revs, **kwargs):
            """
            A wrapper that allows the remotefilelog to invoke sparsematch() if
            this is a sparse repository, or returns None if this is not a
            sparse repository.
            """
            if util.safehasattr(self, "sparsematch"):
                return self.sparsematch(*revs, **kwargs)

            return None

        def file(self, f):
            if f[0] == "/":
                f = f[1:]

            if self.shallowmatch(f):
                return remotefilelog.remotefilelog(self.svfs, f, self)
            else:
                return super(shallowrepository, self).file(f)

        def filectx(self, path, changeid=None, fileid=None):
            if self.shallowmatch(path):
                return remotefilectx.remotefilectx(self, path, changeid,
                                                   fileid)
            else:
                return super(shallowrepository,
                             self).filectx(path, changeid, fileid)

        def close(self):
            result = super(shallowrepository, self).close()
            if "fileslog" in self.__dict__:
                self.fileslog.abortpending()
            return result

        def commitpending(self):
            super(shallowrepository, self).commitpending()

            self.numtransactioncommits += 1
            # In some cases, we can have many transactions in the same repo, in
            # which case each one will create a packfile, let's trigger a repack at
            # this point to bring the number of packfiles down to a reasonable
            # number.
            if self.numtransactioncommits >= self.ui.configint(
                    "remotefilelog", "commitsperrepack"):
                domaintenancerepack(self)
                self.numtransactioncommits = 0

        def commitctx(self, ctx, error=False):
            """Add a new revision to current repository.
            Revision information is passed via the context argument.
            """

            # some contexts already have manifest nodes, they don't need any
            # prefetching (for example if we're just editing a commit message
            # we can reuse manifest
            if not ctx.manifestnode():
                # prefetch files that will likely be compared
                m1 = ctx.p1().manifest()
                files = []
                for f in ctx.modified() + ctx.added():
                    fparent1 = m1.get(f, nullid)
                    if fparent1 != nullid:
                        files.append((f, hex(fparent1)))
                self.fileservice.prefetch(files)
            return super(shallowrepository, self).commitctx(ctx, error=error)

        def backgroundprefetch(self,
                               revs,
                               base=None,
                               repack=False,
                               pats=None,
                               opts=None):
            """Runs prefetch in background with optional repack"""
            cmd = [util.hgexecutable(), "-R", self.origroot, "prefetch"]
            if repack:
                cmd.append("--repack")
            if revs:
                cmd += ["-r", revs]
            if base:
                cmd += ["-b", base]

            util.spawndetached(cmd)

        def prefetch(self, revs, base=None, matcher=None):
            """Prefetches all the necessary file revisions for the given revs
            Optionally runs repack in background
            """
            with self._lock(
                    self.svfs,
                    "prefetchlock",
                    True,
                    None,
                    None,
                    _("prefetching in %s") % self.origroot,
            ):
                self._prefetch(revs, base, matcher)

        def _prefetch(self, revs, base=None, matcher=None):
            mfl = self.manifestlog

            # Copy the skip set to start large and avoid constant resizing,
            # and since it's likely to be very similar to the prefetch set.
            files = set()
            basemf = self[base or nullid].manifest()
            with progress.bar(self.ui, _("prefetching"),
                              total=len(revs)) as prog:
                for rev in sorted(revs):
                    ctx = self[rev]
                    if matcher is None:
                        matcher = self.maybesparsematch(rev)

                    mfctx = ctx.manifestctx()
                    mf = mfctx.read()

                    for path, (new, old) in mf.diff(basemf, matcher).items():
                        if new[0]:
                            files.add((path, new[0]))
                    prog.value += 1

            if files:
                results = [(path, hex(fnode)) for (path, fnode) in files]
                self.fileservice.prefetch(results)

    repo.__class__ = shallowrepository

    repo.shallowmatch = match.always(repo.root, "")
    repo.fileservice = fileserverclient.fileserverclient(repo)

    repo.numtransactioncommits = 0

    repo.includepattern = repo.ui.configlist("remotefilelog", "includepattern",
                                             None)
    repo.excludepattern = repo.ui.configlist("remotefilelog", "excludepattern",
                                             None)

    if repo.includepattern or repo.excludepattern:
        repo.shallowmatch = match.match(repo.root, "", None,
                                        repo.includepattern,
                                        repo.excludepattern)
コード例 #8
0
    def testDiff(self):
        a = cstore.treemanifest(FakeDataStore())
        zflags = hashflags()
        mflags = hashflags()
        a.set("abc/z", *zflags)
        a.set("xyz/m", *mflags)
        alinknode = hashflags()[0]

        b = cstore.treemanifest(FakeDataStore())
        b.set("abc/z", *zflags)
        b.set("xyz/m", *mflags)
        blinknode = hashflags()[0]

        # Diff matching trees
        # - uncommitted trees
        diff = a.diff(b)
        self.assertEquals(diff, {})

        # - committed trees
        dstore = FakeDataStore()
        hstore = FakeHistoryStore()
        for name, node, text, p1text, p1, p2 in a.finalize():
            dstore.add(name, node, nullid, text)
            hstore.add(name, node, p1, p2, alinknode, "")
        for name, node, text, p1text, p1, p2 in b.finalize():
            dstore.add(name, node, nullid, text)
            hstore.add(name, node, p1, p2, blinknode, "")
        diff = a.diff(b)
        self.assertEquals(diff, {})

        b2 = b.copy()

        # Diff with modifications
        newfileflags = hashflags()
        newzflags = hashflags()
        b2.set("newfile", *newfileflags)
        b2.set("abc/z", *newzflags)

        # - uncommitted trees
        diff = a.diff(b2)
        self.assertEquals(diff, {
            "newfile": ((None, ""), newfileflags),
            "abc/z": (zflags, newzflags)
        })

        # - uncommitted trees with matcher
        matcher = matchmod.match("/", "/", ["abc/*"])
        diff = a.diff(b2, matcher=matcher)
        self.assertEquals(diff, {"abc/z": (zflags, newzflags)})

        matcher = matchmod.match("/", "/", ["newfile"])
        diff = a.diff(b2, matcher=matcher)
        self.assertEquals(diff, {"newfile": ((None, ""), newfileflags)})

        # - committed trees
        for name, node, text, p1text, p1, p2 in b2.finalize(a):
            dstore.add(name, node, nullid, text)
            hstore.add(name, node, p1, p2, blinknode, "")

        diff = a.diff(b2)
        self.assertEquals(diff, {
            "newfile": ((None, ""), newfileflags),
            "abc/z": (zflags, newzflags)
        })

        # Diff with clean
        diff = a.diff(b2, clean=True)
        self.assertEquals(
            diff,
            {
                "newfile": ((None, ""), newfileflags),
                "abc/z": (zflags, newzflags),
                "xyz/m": None,
            },
        )