Ejemplo n.º 1
0
 def __init__(self, peer_getter, storage_index, logparent=None):
     self._peer_getter = peer_getter
     self._found_shares = set()
     self._storage_index = storage_index
     self._sharemap = dictutil.DictOfSets()
     self._readers = set()
     self._ueb_hash = None
     self._ueb_data = None
     self._logparent = logparent
Ejemplo n.º 2
0
    def test_dict_of_sets(self):
        ds = dictutil.DictOfSets()
        ds.add(1, "a")
        ds.add(2, "b")
        ds.add(2, "b")
        ds.add(2, "c")
        self.failUnlessEqual(ds[1], set(["a"]))
        self.failUnlessEqual(ds[2], set(["b", "c"]))
        ds.discard(3, "d")  # should not raise an exception
        ds.discard(2, "b")
        self.failUnlessEqual(ds[2], set(["c"]))
        ds.discard(2, "c")
        self.failIf(2 in ds)

        ds.add(3, "f")
        ds2 = dictutil.DictOfSets()
        ds2.add(3, "f")
        ds2.add(3, "g")
        ds2.add(4, "h")
        ds.update(ds2)
        self.failUnlessEqual(ds[1], set(["a"]))
        self.failUnlessEqual(ds[3], set(["f", "g"]))
        self.failUnlessEqual(ds[4], set(["h"]))
Ejemplo n.º 3
0
    def test_already_uploaded(self):
        """
        If enough shares to satisfy the needed parameter already exist, the upload
        succeeds without pushing any shares.
        """
        params = FakeClient.DEFAULT_ENCODING_PARAMETERS
        chk_checker = partial(
            FakeCHKCheckerAndUEBFetcher,
            sharemap=dictutil.DictOfSets({
                0: {b"server0"},
                1: {b"server1"},
            }),
            ueb_data={
                "size": len(DATA),
                "segment_size": min(params["max_segment_size"], len(DATA)),
                "needed_shares": params["k"],
                "total_shares": params["n"],
            },
        )
        self.basedir = "helper/AssistedUpload/test_already_uploaded"
        self.setUpHelper(
            self.basedir,
            chk_checker=chk_checker,
        )
        u = make_uploader(self.helper_furl, self.s)

        yield wait_a_few_turns()

        assert u._helper

        results = yield upload_data(u,
                                    DATA,
                                    convergence=b"some convergence string")
        the_uri = results.get_uri()
        assert b"CHK" in the_uri

        files = os.listdir(os.path.join(self.basedir, "CHK_encoding"))
        self.failUnlessEqual(files, [])
        files = os.listdir(os.path.join(self.basedir, "CHK_incoming"))
        self.failUnlessEqual(files, [])

        self.assertEqual(
            results.get_pushed_shares(),
            0,
        )
Ejemplo n.º 4
0
    def _make_checker_results(self, smap):
        self._monitor.raise_if_cancelled()
        healthy = True
        report = []
        summary = []
        vmap = smap.make_versionmap()
        recoverable = smap.recoverable_versions()
        unrecoverable = smap.unrecoverable_versions()

        if recoverable:
            report.append("Recoverable Versions: " +
                          "/".join(["%d*%s" % (len(vmap[v]),
                                               smap.summarize_version(v))
                                    for v in recoverable]))
        if unrecoverable:
            report.append("Unrecoverable Versions: " +
                          "/".join(["%d*%s" % (len(vmap[v]),
                                               smap.summarize_version(v))
                                    for v in unrecoverable]))
        if smap.unrecoverable_versions():
            healthy = False
            summary.append("some versions are unrecoverable")
            report.append("Unhealthy: some versions are unrecoverable")
        if len(recoverable) == 0:
            healthy = False
            summary.append("no versions are recoverable")
            report.append("Unhealthy: no versions are recoverable")
        if len(recoverable) > 1:
            healthy = False
            summary.append("multiple versions are recoverable")
            report.append("Unhealthy: there are multiple recoverable versions")

        if recoverable:
            best_version = smap.best_recoverable_version()
            report.append("Best Recoverable Version: " +
                          smap.summarize_version(best_version))
            counters = self._count_shares(smap, best_version)
            s = counters["count-shares-good"]
            k = counters["count-shares-needed"]
            N = counters["count-shares-expected"]
            if s < N:
                healthy = False
                report.append("Unhealthy: best version has only %d shares "
                              "(encoding is %d-of-%d)" % (s, k, N))
                summary.append("%d shares (enc %d-of-%d)" % (s, k, N))
        elif unrecoverable:
            healthy = False
            # find a k and N from somewhere
            first = list(unrecoverable)[0]
            # not exactly the best version, but that doesn't matter too much
            counters = self._count_shares(smap, first)
        else:
            # couldn't find anything at all
            counters = {
                "count-shares-good": 0,
                "count-shares-needed": 3, # arbitrary defaults
                "count-shares-expected": 10,
                "count-good-share-hosts": 0,
                "count-wrong-shares": 0,
                }

        corrupt_share_locators = []
        problems = []
        if self.bad_shares:
            report.append("Corrupt Shares:")
            summary.append("Corrupt Shares:")
        for (server, shnum, f) in sorted(self.bad_shares):
            serverid = server.get_serverid()
            locator = (server, self._storage_index, shnum)
            corrupt_share_locators.append(locator)
            s = "%s-sh%d" % (server.get_name(), shnum)
            if f.check(CorruptShareError):
                ft = f.value.reason
            else:
                ft = str(f)
            report.append(" %s: %s" % (s, ft))
            summary.append(s)
            p = (serverid, self._storage_index, shnum, f)
            problems.append(p)
            msg = ("CorruptShareError during mutable verify, "
                   "serverid=%(serverid)s, si=%(si)s, shnum=%(shnum)d, "
                   "where=%(where)s")
            log.msg(format=msg, serverid=server.get_name(),
                    si=base32.b2a(self._storage_index),
                    shnum=shnum,
                    where=ft,
                    level=log.WEIRD, umid="EkK8QA")

        sharemap = dictutil.DictOfSets()
        for verinfo in vmap:
            for (shnum, server, timestamp) in vmap[verinfo]:
                shareid = "%s-sh%d" % (smap.summarize_version(verinfo), shnum)
                sharemap.add(shareid, server)
        if healthy:
            summary = "Healthy"
        else:
            summary = "Unhealthy: " + " ".join(summary)

        count_happiness = servers_of_happiness(sharemap)

        cr = CheckResults(from_string(self._node.get_uri()),
                          self._storage_index,
                          healthy=healthy, recoverable=bool(recoverable),
                          count_happiness=count_happiness,
                          count_shares_needed=counters["count-shares-needed"],
                          count_shares_expected=counters["count-shares-expected"],
                          count_shares_good=counters["count-shares-good"],
                          count_good_share_hosts=counters["count-good-share-hosts"],
                          count_recoverable_versions=len(recoverable),
                          count_unrecoverable_versions=len(unrecoverable),
                          servers_responding=list(smap.get_reachable_servers()),
                          sharemap=sharemap,
                          count_wrong_shares=counters["count-wrong-shares"],
                          list_corrupt_shares=corrupt_share_locators,
                          count_corrupt_shares=len(corrupt_share_locators),
                          list_incompatible_shares=[],
                          count_incompatible_shares=0,
                          summary=summary,
                          report=report,
                          share_problems=problems,
                          servermap=smap.copy())
        return cr
Ejemplo n.º 5
0
    def _format_results(self, results):
        cr = CheckResults(self._verifycap, self._verifycap.get_storage_index())
        d = {}
        d['count-shares-needed'] = self._verifycap.needed_shares
        d['count-shares-expected'] = self._verifycap.total_shares

        verifiedshares = dictutil.DictOfSets()  # {sharenum: set(serverid)}
        servers = {}  # {serverid: set(sharenums)}
        corruptsharelocators = []  # (serverid, storageindex, sharenum)
        incompatiblesharelocators = []  # (serverid, storageindex, sharenum)

        for theseverifiedshares, thisserver, thesecorruptshares, theseincompatibleshares, thisresponded in results:
            thisserverid = thisserver.get_serverid()
            servers.setdefault(thisserverid, set()).update(theseverifiedshares)
            for sharenum in theseverifiedshares:
                verifiedshares.setdefault(sharenum, set()).add(thisserverid)
            for sharenum in thesecorruptshares:
                corruptsharelocators.append(
                    (thisserverid, self._verifycap.get_storage_index(),
                     sharenum))
            for sharenum in theseincompatibleshares:
                incompatiblesharelocators.append(
                    (thisserverid, self._verifycap.get_storage_index(),
                     sharenum))

        d['count-shares-good'] = len(verifiedshares)
        d['count-good-share-hosts'] = len(
            [s for s in servers.keys() if servers[s]])

        assert len(verifiedshares) <= self._verifycap.total_shares, (
            verifiedshares.keys(), self._verifycap.total_shares)
        if len(verifiedshares) == self._verifycap.total_shares:
            cr.set_healthy(True)
            cr.set_summary("Healthy")
        else:
            cr.set_healthy(False)
            cr.set_summary("Not Healthy: %d shares (enc %d-of-%d)" %
                           (len(verifiedshares), self._verifycap.needed_shares,
                            self._verifycap.total_shares))
        if len(verifiedshares) >= self._verifycap.needed_shares:
            cr.set_recoverable(True)
            d['count-recoverable-versions'] = 1
            d['count-unrecoverable-versions'] = 0
        else:
            cr.set_recoverable(False)
            d['count-recoverable-versions'] = 0
            d['count-unrecoverable-versions'] = 1

        d['servers-responding'] = list(servers)
        d['sharemap'] = verifiedshares
        # no such thing as wrong shares of an immutable file
        d['count-wrong-shares'] = 0
        d['list-corrupt-shares'] = corruptsharelocators
        d['count-corrupt-shares'] = len(corruptsharelocators)
        d['list-incompatible-shares'] = incompatiblesharelocators
        d['count-incompatible-shares'] = len(incompatiblesharelocators)

        # The file needs rebalancing if the set of servers that have at least
        # one share is less than the number of uniquely-numbered shares
        # available.
        cr.set_needs_rebalancing(
            d['count-good-share-hosts'] < d['count-shares-good'])

        cr.set_data(d)

        return cr
Ejemplo n.º 6
0
    def _format_results(self, results):
        SI = self._verifycap.get_storage_index()

        verifiedshares = dictutil.DictOfSets()  # {sharenum: set(server)}
        servers = {}  # {server: set(sharenums)}
        corruptshare_locators = []  # (server, storageindex, sharenum)
        incompatibleshare_locators = []  # (server, storageindex, sharenum)
        servers_responding = set()  # server

        for verified, server, corrupt, incompatible, responded in results:
            servers.setdefault(server, set()).update(verified)
            for sharenum in verified:
                verifiedshares.setdefault(sharenum, set()).add(server)
            for sharenum in corrupt:
                corruptshare_locators.append((server, SI, sharenum))
            for sharenum in incompatible:
                incompatibleshare_locators.append((server, SI, sharenum))
            if responded:
                servers_responding.add(server)

        good_share_hosts = len([s for s in servers.keys() if servers[s]])

        assert len(verifiedshares) <= self._verifycap.total_shares, (
            verifiedshares.keys(), self._verifycap.total_shares)
        if len(verifiedshares) == self._verifycap.total_shares:
            healthy = True
            summary = "Healthy"
        else:
            healthy = False
            summary = ("Not Healthy: %d shares (enc %d-of-%d)" %
                       (len(verifiedshares), self._verifycap.needed_shares,
                        self._verifycap.total_shares))
        if len(verifiedshares) >= self._verifycap.needed_shares:
            recoverable = 1
            unrecoverable = 0
        else:
            recoverable = 0
            unrecoverable = 1

        count_happiness = servers_of_happiness(verifiedshares)

        cr = CheckResults(
            self._verifycap,
            SI,
            healthy=healthy,
            recoverable=bool(recoverable),
            count_happiness=count_happiness,
            count_shares_needed=self._verifycap.needed_shares,
            count_shares_expected=self._verifycap.total_shares,
            count_shares_good=len(verifiedshares),
            count_good_share_hosts=good_share_hosts,
            count_recoverable_versions=recoverable,
            count_unrecoverable_versions=unrecoverable,
            servers_responding=list(servers_responding),
            sharemap=verifiedshares,
            count_wrong_shares=0,  # no such thing, for immutable
            list_corrupt_shares=corruptshare_locators,
            count_corrupt_shares=len(corruptshare_locators),
            list_incompatible_shares=incompatibleshare_locators,
            count_incompatible_shares=len(incompatibleshare_locators),
            summary=summary,
            report=[],
            share_problems=[],
            servermap=None)

        return cr
Ejemplo n.º 7
0
    def _render_results(self, ctx, cr):
        assert ICheckResults(cr)
        c = self.client
        sb = c.get_storage_broker()
        r = []

        def add(name, value):
            r.append(T.li[name + ": ", value])

        add("Report", T.pre["\n".join(self._html(cr.get_report()))])
        add(
            "Share Counts", "need %d-of-%d, have %d" %
            (cr.get_encoding_needed(), cr.get_encoding_expected(),
             cr.get_share_counter_good()))
        add("Hosts with good shares", cr.get_host_counter_good_shares())

        if cr.get_corrupt_shares():
            badsharemap = []
            for (s, si, shnum) in cr.get_corrupt_shares():
                d = T.tr[T.td["sh#%d" % shnum],
                         T.td[T.div(class_="nickname")[s.get_nickname()],
                              T.div(class_="nodeid")[T.tt[s.get_name()]]], ]
                badsharemap.append(d)
            add(
                "Corrupt shares",
                T.table()[T.tr[T.th["Share ID"],
                               T.th(class_="nickname-and-peerid")[
                                   T.div["Nickname"],
                                   T.div(class_="nodeid")["Node ID"]]],
                          badsharemap])
        else:
            add("Corrupt shares", "none")

        add("Wrong Shares", cr.get_share_counter_wrong())

        sharemap_data = []
        shares_on_server = dictutil.DictOfSets()

        # FIXME: The two tables below contain nickname-and-nodeid table column markup which is duplicated with each other, introducer.xhtml, and deep-check-results.xhtml. All of these (and any other presentations of nickname-and-nodeid) should be combined.

        for shareid in sorted(cr.get_sharemap().keys()):
            servers = sorted(cr.get_sharemap()[shareid],
                             key=lambda s: s.get_longname())
            for i, s in enumerate(servers):
                shares_on_server.add(s, shareid)
                shareid_s = ""
                if i == 0:
                    shareid_s = shareid
                d = T.tr[T.td[shareid_s],
                         T.td[T.div(class_="nickname")[s.get_nickname()],
                              T.div(class_="nodeid")[T.tt[s.get_name()]]]]
                sharemap_data.append(d)
        add(
            "Good Shares (sorted in share order)",
            T.table()[T.tr[T.th["Share ID"],
                           T.th(class_="nickname-and-peerid")[
                               T.div["Nickname"],
                               T.div(class_="nodeid")["Node ID"]]],
                      sharemap_data])

        add("Recoverable Versions", cr.get_version_counter_recoverable())
        add("Unrecoverable Versions", cr.get_version_counter_unrecoverable())

        # this table is sorted by permuted order
        permuted_servers = [
            s for s in sb.get_servers_for_psi(cr.get_storage_index())
        ]

        num_shares_left = sum(
            [len(shareids) for shareids in shares_on_server.values()])
        servermap = []
        for s in permuted_servers:
            shareids = list(shares_on_server.get(s, []))
            shareids.reverse()
            shareids_s = [T.tt[shareid, " "] for shareid in sorted(shareids)]
            d = T.tr[T.td[T.div(class_="nickname")[s.get_nickname()],
                          T.div(class_="nodeid")[T.tt[s.get_name()]]],
                     T.td[shareids_s], ]
            servermap.append(d)
            num_shares_left -= len(shareids)
            if not num_shares_left:
                break
        add(
            "Share Balancing (servers in permuted order)",
            T.table()[T.tr[T.th(
                class_="nickname-and-peerid")[T.div["Nickname"],
                                              T.div(
                                                  class_="nodeid")["Node ID"]],
                           T.th["Share IDs"]], servermap])

        return T.ul[r]
Ejemplo n.º 8
0
    def _render_results(self, req, cr):
        assert ICheckResults(cr)
        c = self._client
        sb = c.get_storage_broker()
        r = []

        def add(name, value):
            r.append(tags.li(name + ": ", value))

        add("Report", tags.pre("\n".join(self._html(cr.get_report()))))

        add(
            "Share Counts", "need %d-of-%d, have %d" %
            (cr.get_encoding_needed(), cr.get_encoding_expected(),
             cr.get_share_counter_good()))
        add("Happiness Level", str(cr.get_happiness()))
        add("Hosts with good shares", str(cr.get_host_counter_good_shares()))

        if cr.get_corrupt_shares():
            badsharemap = []
            for (s, si, shnum) in cr.get_corrupt_shares():
                d = tags.tr(
                    tags.td("sh#%d" % shnum),
                    tags.td(tags.div(s.get_nickname(), class_="nickname"),
                            tags.div(tags.tt(s.get_name()), class_="nodeid")),
                )
                badsharemap.append(d)
            add(
                "Corrupt shares",
                tags.table(
                    tags.tr(
                        tags.th("Share ID"),
                        tags.th((tags.div("Nickname"),
                                 tags.div("Node ID", class_="nodeid")),
                                class_="nickname-and-peerid")), badsharemap))
        else:
            add("Corrupt shares", "none")

        add("Wrong Shares", str(cr.get_share_counter_wrong()))

        sharemap_data = []
        shares_on_server = dictutil.DictOfSets()

        # FIXME: The two tables below contain nickname-and-nodeid
        # table column markup which is duplicated with each other,
        # introducer.xhtml, and deep-check-results.xhtml. All of these
        # (and any other presentations of nickname-and-nodeid) should be combined.

        for shareid in sorted(cr.get_sharemap().keys()):
            servers = sorted(cr.get_sharemap()[shareid],
                             key=lambda s: s.get_longname())
            for i, s in enumerate(servers):
                shares_on_server.add(s, shareid)
                shareid_s = ""
                if i == 0:
                    if isinstance(shareid, bytes):
                        shareid_s = str(shareid, "utf-8")
                    else:
                        shareid_s = str(shareid)
                d = tags.tr(
                    tags.td(shareid_s),
                    tags.td(tags.div(s.get_nickname(), class_="nickname"),
                            tags.div(tags.tt(s.get_name()), class_="nodeid")))
                sharemap_data.append(d)

        add(
            "Good Shares (sorted in share order)",
            tags.table(
                tags.tr(
                    tags.th("Share ID"),
                    tags.th(tags.div("Nickname"),
                            tags.div("Node ID", class_="nodeid"),
                            class_="nickname-and-peerid")), sharemap_data))

        add("Recoverable Versions", str(cr.get_version_counter_recoverable()))
        add("Unrecoverable Versions",
            str(cr.get_version_counter_unrecoverable()))

        # this table is sorted by permuted order
        permuted_servers = [
            s for s in sb.get_servers_for_psi(cr.get_storage_index())
        ]

        num_shares_left = sum(
            [len(shareids) for shareids in shares_on_server.values()])
        servermap = []
        for s in permuted_servers:
            shareids = list(shares_on_server.get(s, []))
            shareids.reverse()
            shareids_s = [
                tags.tt(str(shareid), " ") for shareid in sorted(shareids)
            ]

            d = tags.tr(
                tags.td(tags.div(s.get_nickname(), class_="nickname"),
                        tags.div(tags.tt(s.get_name()), class_="nodeid")),
                tags.td(shareids_s),
            )
            servermap.append(d)
            num_shares_left -= len(shareids)
            if not num_shares_left:
                break

        add(
            "Share Balancing (servers in permuted order)",
            tags.table(
                tags.tr(
                    tags.th(tags.div("Nickname"),
                            tags.div("Node ID", class_="nodeid"),
                            class_="nickname-and-peerid"),
                    tags.th("Share IDs")), servermap))

        return tags.ul(r)