from zope.interface import implements
from allmydata.interfaces import ICheckResults, ICheckAndRepairResults, \
IDeepCheckResults, IDeepCheckAndRepairResults, IURI, IDisplayableServer
-from allmydata.util import base32, dictutil
+from allmydata.util import base32
class CheckResults:
implements(ICheckResults)
return self._count_unrecoverable_versions
def get_sharemap(self):
- sharemap = dictutil.DictOfSets()
- for shnum, servers in self._sharemap.items():
- for s in servers:
- sharemap.add(shnum, s.get_serverid())
- return sharemap
- def get_new_sharemap(self):
- # this one returns IServers, even when get_sharemap returns serverids
return self._sharemap
def as_dict(self):
healthy file, this will be 0."""
def get_sharemap():
- """Return a dict mapping share identifier to list of serverids
- (binary strings). This indicates which servers are holding which
- shares. For immutable files, the shareid is an integer (the share
- number, from 0 to N-1). For mutable files, it is a string of the form
- 'seq%d-%s-sh%d', containing the sequence number, the roothash, and
- the share number."""
+ """Return a dict mapping share identifier to list of IServer objects.
+ This indicates which servers are holding which shares. For immutable
+ files, the shareid is an integer (the share number, from 0 to N-1).
+ For mutable files, it is a string of the form 'seq%d-%s-sh%d',
+ containing the sequence number, the roothash, and the share number."""
def get_summary():
"""Return a string with a brief (one-line) summary of the results."""
self.failUnlessIn("Wrong Shares: 0", s)
self.failUnlessIn("Recoverable Versions: 1", s)
self.failUnlessIn("Unrecoverable Versions: 0", s)
+ self.failUnlessIn("Good Shares (sorted in share order): Share ID Nickname Node ID shareid1 peer-0 00000000 peer-f ffffffff", s)
cr = check_results.CheckResults(u, u.get_storage_index(),
healthy=False, recoverable=True,
'count-unrecoverable-versions': 0,
'count-shares-needed': 3,
'sharemap': {"shareid1":
- ["77777777777777777777777777777777",
- "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"]},
+ ["v0-00000000-long", "v0-ffffffff-long"]},
'count-recoverable-versions': 1,
'list-corrupt-shares':
[["aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
from allmydata.web.common import getxmlfile, get_arg, get_root, WebError
from allmydata.web.operations import ReloadMixin
from allmydata.interfaces import ICheckAndRepairResults, ICheckResults
-from allmydata.util import base32, idlib
+from allmydata.util import base32, idlib, dictutil
def json_check_counts(r):
d = {"count-shares-good": r.get_share_counter_good(),
in r.get_corrupt_shares() ],
"servers-responding": [idlib.nodeid_b2a(serverid)
for serverid in r.get_servers_responding()],
- "sharemap": dict([(shareid, [idlib.nodeid_b2a(serverid)
- for serverid in serverids])
- for (shareid, serverids)
+ "sharemap": dict([(shareid,
+ sorted([s.get_longname() for s in servers]))
+ for (shareid, servers)
in r.get_sharemap().items()]),
"count-wrong-shares": r.get_share_counter_wrong(),
"count-recoverable-versions": r.get_version_counter_recoverable(),
add("Wrong Shares", cr.get_share_counter_wrong())
- sharemap = []
- servers = {}
+ sharemap_data = []
+ shares_on_server = dictutil.DictOfSets()
# FIXME: The two tables below contain nickname-and-nodeid table column markup which is duplicated with each other, introducer.xhtml, and deep-check-results.xhtml. All of these (and any other presentations of nickname-and-nodeid) should be combined.
for shareid in sorted(cr.get_sharemap().keys()):
- serverids = cr.get_sharemap()[shareid]
- for i,serverid in enumerate(serverids):
- if serverid not in servers:
- servers[serverid] = []
- servers[serverid].append(shareid)
+ servers = sorted(cr.get_sharemap()[shareid],
+ key=lambda s: s.get_longname())
+ for i,s in enumerate(servers):
+ shares_on_server.add(s, shareid)
shareid_s = ""
if i == 0:
shareid_s = shareid
- nickname = sb.get_nickname_for_serverid(serverid)
- sharemap.append(T.tr[T.td[shareid_s],
- T.td[T.div(class_="nickname")[nickname],
- T.div(class_="nodeid")[T.tt[base32.b2a(serverid)]]]
- ])
+ d = T.tr[T.td[shareid_s],
+ T.td[T.div(class_="nickname")[s.get_nickname()],
+ T.div(class_="nodeid")[T.tt[s.get_name()]]]
+ ]
+ sharemap_data.append(d)
add("Good Shares (sorted in share order)",
T.table()[T.tr[T.th["Share ID"], T.th(class_="nickname-and-peerid")[T.div["Nickname"], T.div(class_="nodeid")["Node ID"]]],
- sharemap])
+ sharemap_data])
add("Recoverable Versions", cr.get_version_counter_recoverable())
add("Unrecoverable Versions", cr.get_version_counter_unrecoverable())
# this table is sorted by permuted order
- sb = c.get_storage_broker()
permuted_servers = [s
for s
in sb.get_servers_for_psi(cr.get_storage_index())]
- num_shares_left = sum([len(shares) for shares in servers.values()])
+ num_shares_left = sum([len(shareids)
+ for shareids in shares_on_server.values()])
servermap = []
for s in permuted_servers:
- nickname = s.get_nickname()
- shareids = servers.get(s.get_serverid(), [])
+ shareids = list(shares_on_server.get(s, []))
shareids.reverse()
shareids_s = [ T.tt[shareid, " "] for shareid in sorted(shareids) ]
- servermap.append(T.tr[T.td[T.div(class_="nickname")[nickname],
- T.div(class_="nodeid")[T.tt[s.get_name()]]],
- T.td[shareids_s],
- ])
+ d = T.tr[T.td[T.div(class_="nickname")[s.get_nickname()],
+ T.div(class_="nodeid")[T.tt[s.get_name()]]],
+ T.td[shareids_s],
+ ]
+ servermap.append(d)
num_shares_left -= len(shareids)
if not num_shares_left:
break