def __init__(self, node, check_results, storage_broker, history, monitor): self.node = node self.check_results = ICheckResults(check_results) assert check_results.get_storage_index() == node.get_storage_index() self._storage_broker = storage_broker self._history = history self._monitor = monitor
def __init__(self, client, results): """ :param allmydata.interfaces.IStatsProducer client: stats provider. :param allmydata.interfaces.ICheckResults results: results of check/vefify operation. """ super(CheckResultsRenderer, self).__init__() self._client = client self._results = ICheckResults(results)
def add_check(self, r, path): if not r: return # non-distributed object, i.e. LIT file r = ICheckResults(r) assert isinstance(path, (list, tuple)) self.objects_checked += 1 if r.is_healthy(): self.objects_healthy += 1 else: self.objects_unhealthy += 1 if not r.is_recoverable(): self.objects_unrecoverable += 1 self.all_results[tuple(path)] = r self.all_results_by_storage_index[r.get_storage_index()] = r self.corrupt_shares.extend(r.get_corrupt_shares())
def repair(self, check_results, force=False, monitor=None): assert ICheckResults(check_results) r = Repairer(self, check_results, self._storage_broker, self._history, monitor) d = r.start(force) return d
def __init__(self, node, check_results): self.node = node self.check_results = ICheckResults(check_results) assert check_results.storage_index == self.node.get_storage_index()
def _render_results(self, ctx, cr): assert ICheckResults(cr) c = self.client sb = c.get_storage_broker() r = [] def add(name, value): r.append(T.li[name + ": ", value]) add("Report", T.pre["\n".join(self._html(cr.get_report()))]) add( "Share Counts", "need %d-of-%d, have %d" % (cr.get_encoding_needed(), cr.get_encoding_expected(), cr.get_share_counter_good())) add("Hosts with good shares", cr.get_host_counter_good_shares()) if cr.get_corrupt_shares(): badsharemap = [] for (s, si, shnum) in cr.get_corrupt_shares(): d = T.tr[T.td["sh#%d" % shnum], T.td[T.div(class_="nickname")[s.get_nickname()], T.div(class_="nodeid")[T.tt[s.get_name()]]], ] badsharemap.append(d) add( "Corrupt shares", T.table()[T.tr[T.th["Share ID"], T.th(class_="nickname-and-peerid")[ T.div["Nickname"], T.div(class_="nodeid")["Node ID"]]], badsharemap]) else: add("Corrupt shares", "none") add("Wrong Shares", cr.get_share_counter_wrong()) sharemap_data = [] shares_on_server = dictutil.DictOfSets() # FIXME: The two tables below contain nickname-and-nodeid table column markup which is duplicated with each other, introducer.xhtml, and deep-check-results.xhtml. All of these (and any other presentations of nickname-and-nodeid) should be combined. for shareid in sorted(cr.get_sharemap().keys()): servers = sorted(cr.get_sharemap()[shareid], key=lambda s: s.get_longname()) for i, s in enumerate(servers): shares_on_server.add(s, shareid) shareid_s = "" if i == 0: shareid_s = shareid d = T.tr[T.td[shareid_s], T.td[T.div(class_="nickname")[s.get_nickname()], T.div(class_="nodeid")[T.tt[s.get_name()]]]] sharemap_data.append(d) add( "Good Shares (sorted in share order)", T.table()[T.tr[T.th["Share ID"], T.th(class_="nickname-and-peerid")[ T.div["Nickname"], T.div(class_="nodeid")["Node ID"]]], sharemap_data]) add("Recoverable Versions", cr.get_version_counter_recoverable()) add("Unrecoverable Versions", cr.get_version_counter_unrecoverable()) # this table is sorted by permuted order permuted_servers = [ s for s in sb.get_servers_for_psi(cr.get_storage_index()) ] num_shares_left = sum( [len(shareids) for shareids in shares_on_server.values()]) servermap = [] for s in permuted_servers: shareids = list(shares_on_server.get(s, [])) shareids.reverse() shareids_s = [T.tt[shareid, " "] for shareid in sorted(shareids)] d = T.tr[T.td[T.div(class_="nickname")[s.get_nickname()], T.div(class_="nodeid")[T.tt[s.get_name()]]], T.td[shareids_s], ] servermap.append(d) num_shares_left -= len(shareids) if not num_shares_left: break add( "Share Balancing (servers in permuted order)", T.table()[T.tr[T.th( class_="nickname-and-peerid")[T.div["Nickname"], T.div( class_="nodeid")["Node ID"]], T.th["Share IDs"]], servermap]) return T.ul[r]
def __init__(self, client, results): self.client = client self.r = ICheckResults(results) rend.Page.__init__(self, results)
def repair(self, check_results, force=False): assert ICheckResults(check_results) r = Repairer(self, check_results) d = r.start(force) return d
def _render_results(self, ctx, cr): assert ICheckResults(cr) c = self.client sb = c.get_storage_broker() data = cr.get_data() r = [] def add(name, value): r.append(T.li[name + ": ", value]) add("Report", T.pre["\n".join(self._html(cr.get_report()))]) add( "Share Counts", "need %d-of-%d, have %d" % (data["count-shares-needed"], data["count-shares-expected"], data["count-shares-good"])) add("Hosts with good shares", data["count-good-share-hosts"]) if data["list-corrupt-shares"]: badsharemap = [] for (serverid, si, shnum) in data["list-corrupt-shares"]: nickname = sb.get_nickname_for_serverid(serverid) badsharemap.append(T.tr[T.td["sh#%d" % shnum], T.td[ T.div(class_="nickname")[nickname], T.div(class_="nodeid")[T.tt[base32.b2a(serverid)]]], ]) add( "Corrupt shares", T.table()[T.tr[T.th["Share ID"], T.th(class_="nickname-and-peerid")[ T.div["Nickname"], T.div(class_="nodeid")["Node ID"]]], badsharemap]) else: add("Corrupt shares", "none") add("Wrong Shares", data["count-wrong-shares"]) sharemap = [] servers = {} # FIXME: The two tables below contain nickname-and-nodeid table column markup which is duplicated with each other, introducer.xhtml, and deep-check-results.xhtml. All of these (and any other presentations of nickname-and-nodeid) should be combined. for shareid in sorted(data["sharemap"].keys()): serverids = data["sharemap"][shareid] for i, serverid in enumerate(serverids): if serverid not in servers: servers[serverid] = [] servers[serverid].append(shareid) shareid_s = "" if i == 0: shareid_s = shareid nickname = sb.get_nickname_for_serverid(serverid) sharemap.append(T.tr[T.td[shareid_s], T.td[ T.div(class_="nickname")[nickname], T.div(class_="nodeid")[T.tt[base32.b2a(serverid)]]]]) add( "Good Shares (sorted in share order)", T.table()[T.tr[T.th["Share ID"], T.th(class_="nickname-and-peerid")[ T.div["Nickname"], T.div(class_="nodeid")["Node ID"]]], sharemap]) add("Recoverable Versions", data["count-recoverable-versions"]) add("Unrecoverable Versions", data["count-unrecoverable-versions"]) # this table is sorted by permuted order sb = c.get_storage_broker() permuted_servers = [ s for s in sb.get_servers_for_psi(cr.get_storage_index()) ] num_shares_left = sum([len(shares) for shares in servers.values()]) servermap = [] for s in permuted_servers: nickname = s.get_nickname() shareids = servers.get(s.get_serverid(), []) shareids.reverse() shareids_s = [T.tt[shareid, " "] for shareid in sorted(shareids)] servermap.append( T.tr[T.td[T.div(class_="nickname")[nickname], T.div(class_="nodeid")[T.tt[s.get_name()]]], T.td[shareids_s], ]) num_shares_left -= len(shareids) if not num_shares_left: break add( "Share Balancing (servers in permuted order)", T.table()[T.tr[T.th( class_="nickname-and-peerid")[T.div["Nickname"], T.div( class_="nodeid")["Node ID"]], T.th["Share IDs"]], servermap]) return T.ul[r]
def _render_results(self, req, cr): assert ICheckResults(cr) c = self._client sb = c.get_storage_broker() r = [] def add(name, value): r.append(tags.li(name + ": ", value)) add("Report", tags.pre("\n".join(self._html(cr.get_report())))) add( "Share Counts", "need %d-of-%d, have %d" % (cr.get_encoding_needed(), cr.get_encoding_expected(), cr.get_share_counter_good())) add("Happiness Level", str(cr.get_happiness())) add("Hosts with good shares", str(cr.get_host_counter_good_shares())) if cr.get_corrupt_shares(): badsharemap = [] for (s, si, shnum) in cr.get_corrupt_shares(): d = tags.tr( tags.td("sh#%d" % shnum), tags.td(tags.div(s.get_nickname(), class_="nickname"), tags.div(tags.tt(s.get_name()), class_="nodeid")), ) badsharemap.append(d) add( "Corrupt shares", tags.table( tags.tr( tags.th("Share ID"), tags.th((tags.div("Nickname"), tags.div("Node ID", class_="nodeid")), class_="nickname-and-peerid")), badsharemap)) else: add("Corrupt shares", "none") add("Wrong Shares", str(cr.get_share_counter_wrong())) sharemap_data = [] shares_on_server = dictutil.DictOfSets() # FIXME: The two tables below contain nickname-and-nodeid # table column markup which is duplicated with each other, # introducer.xhtml, and deep-check-results.xhtml. All of these # (and any other presentations of nickname-and-nodeid) should be combined. for shareid in sorted(cr.get_sharemap().keys()): servers = sorted(cr.get_sharemap()[shareid], key=lambda s: s.get_longname()) for i, s in enumerate(servers): shares_on_server.add(s, shareid) shareid_s = "" if i == 0: if isinstance(shareid, bytes): shareid_s = str(shareid, "utf-8") else: shareid_s = str(shareid) d = tags.tr( tags.td(shareid_s), tags.td(tags.div(s.get_nickname(), class_="nickname"), tags.div(tags.tt(s.get_name()), class_="nodeid"))) sharemap_data.append(d) add( "Good Shares (sorted in share order)", tags.table( tags.tr( tags.th("Share ID"), tags.th(tags.div("Nickname"), tags.div("Node ID", class_="nodeid"), class_="nickname-and-peerid")), sharemap_data)) add("Recoverable Versions", str(cr.get_version_counter_recoverable())) add("Unrecoverable Versions", str(cr.get_version_counter_unrecoverable())) # this table is sorted by permuted order permuted_servers = [ s for s in sb.get_servers_for_psi(cr.get_storage_index()) ] num_shares_left = sum( [len(shareids) for shareids in shares_on_server.values()]) servermap = [] for s in permuted_servers: shareids = list(shares_on_server.get(s, [])) shareids.reverse() shareids_s = [ tags.tt(str(shareid), " ") for shareid in sorted(shareids) ] d = tags.tr( tags.td(tags.div(s.get_nickname(), class_="nickname"), tags.div(tags.tt(s.get_name()), class_="nodeid")), tags.td(shareids_s), ) servermap.append(d) num_shares_left -= len(shareids) if not num_shares_left: break add( "Share Balancing (servers in permuted order)", tags.table( tags.tr( tags.th(tags.div("Nickname"), tags.div("Node ID", class_="nodeid"), class_="nickname-and-peerid"), tags.th("Share IDs")), servermap)) return tags.ul(r)