def get_sharemap(self):
return self._data["sharemap"]
- def get_data(self):
+ def as_dict(self):
return self._data
def get_summary(self):
needs_rebalancing = bool( num_servers < 10 )
if not incomplete:
self.failUnlessEqual(cr.needs_rebalancing(), needs_rebalancing,
- str((where, cr, cr.get_data())))
+ str((where, cr, cr.as_dict())))
self.failUnlessEqual(cr.get_share_counter_good(), 10, where)
self.failUnlessEqual(cr.get_encoding_needed(), 3, where)
self.failUnlessEqual(cr.get_encoding_expected(), 10, where)
judgement(vr)
except unittest.FailTest, e:
# FailTest just uses e.args[0] == str
- new_arg = str(e.args[0]) + "\nvr.data is: " + str(vr.get_data())
+ new_arg = str(e.args[0]) + "\nvr.data is: " + str(vr.as_dict())
e.args = (new_arg,)
raise
d.addCallback(_check)
""" Verify says the file is healthy when none of the shares have been
touched in a way that matters. It doesn't use more than seven times
as many reads as it needs."""
- self.failUnless(vr.is_healthy(), (vr, vr.is_healthy(), vr.get_data()))
+ self.failUnless(vr.is_healthy(), (vr, vr.is_healthy(), vr.as_dict()))
self.failUnlessEqual(vr.get_share_counter_good(), 10)
self.failUnlessEqual(len(vr.get_sharemap()), 10)
self.failUnlessEqual(vr.get_encoding_needed(), 3)
giving you the share data. Test that verifier handles these answers
correctly. It doesn't use more than seven times as many reads as it
needs."""
- self.failIf(vr.is_healthy(), (vr, vr.is_healthy(), vr.get_data()))
+ self.failIf(vr.is_healthy(), (vr, vr.is_healthy(), vr.as_dict()))
self.failUnlessEqual(vr.get_share_counter_good(), 9)
self.failUnlessEqual(len(vr.get_sharemap()), 9)
self.failUnlessEqual(vr.get_encoding_needed(), 3)
# offsets) to something larger than 2 will trigger a
# ShareVersionIncompatible exception, which should be counted in
# list-incompatible-shares, rather than list-corrupt-shares.
- self.failIf(vr.is_healthy(), (vr, vr.is_healthy(), vr.get_data()))
+ self.failIf(vr.is_healthy(), (vr, vr.is_healthy(), vr.as_dict()))
self.failUnlessEqual(vr.get_share_counter_good(), 9)
self.failUnlessEqual(len(vr.get_sharemap()), 9)
self.failUnlessEqual(vr.get_encoding_needed(), 3)
# corruption of fields that the server does not check (which is most
# of them), which will be detected by the client as it downloads
# those shares.
- self.failIf(vr.is_healthy(), (vr, vr.is_healthy(), vr.get_data()))
+ self.failIf(vr.is_healthy(), (vr, vr.is_healthy(), vr.as_dict()))
self.failUnlessEqual(vr.get_share_counter_good(), 9)
self.failUnlessEqual(vr.get_encoding_needed(), 3)
self.failUnlessEqual(vr.get_encoding_expected(), 10)
self.failIfBigger(delta_reads, MAX_DELTA_READS)
self.failIfBigger(delta_allocates, (DELTA_WRITES_PER_SHARE * 7))
self.failIf(pre.is_healthy())
- self.failUnless(post.is_healthy(), post.get_data())
+ self.failUnless(post.is_healthy(), post.as_dict())
# Make sure we really have 10 shares.
shares = self.find_uri_shares(self.uri)
m = monitor.Monitor()
d.addCallback(lambda fn: fn.check(m))
def _check(cr):
- data = cr.get_data()
- self.failUnlessEqual(data["count-shares-needed"], 7)
- self.failUnlessEqual(data["count-shares-expected"], 12)
+ self.failUnlessEqual(cr.get_encoding_needed(), 7)
+ self.failUnlessEqual(cr.get_encoding_expected(), 12)
d.addCallback(_check)
return d
from allmydata.interfaces import ICheckAndRepairResults, ICheckResults
from allmydata.util import base32, idlib
-def json_check_counts(d):
- r = {}
- r["count-shares-good"] = d["count-shares-good"]
- r["count-shares-needed"] = d["count-shares-needed"]
- r["count-shares-expected"] = d["count-shares-expected"]
- r["count-good-share-hosts"] = d["count-good-share-hosts"]
- r["count-corrupt-shares"] = d["count-corrupt-shares"]
- r["list-corrupt-shares"] = [ (idlib.nodeid_b2a(serverid),
- base32.b2a(si), shnum)
- for (serverid, si, shnum)
- in d["list-corrupt-shares"] ]
- r["servers-responding"] = [idlib.nodeid_b2a(serverid)
- for serverid in d["servers-responding"]]
- sharemap = {}
- for (shareid, serverids) in d["sharemap"].items():
- sharemap[shareid] = [idlib.nodeid_b2a(serverid)
- for serverid in serverids]
- r["sharemap"] = sharemap
-
- r["count-wrong-shares"] = d["count-wrong-shares"]
- r["count-recoverable-versions"] = d["count-recoverable-versions"]
- r["count-unrecoverable-versions"] = d["count-unrecoverable-versions"]
-
- return r
+def json_check_counts(r):
+ d = {"count-shares-good": r.get_share_counter_good(),
+ "count-shares-needed": r.get_encoding_needed(),
+ "count-shares-expected": r.get_encoding_expected(),
+ "count-good-share-hosts": r.get_host_counter_good_shares(),
+ "count-corrupt-shares": len(r.get_corrupt_shares()),
+ "list-corrupt-shares": [ (idlib.nodeid_b2a(serverid),
+ base32.b2a(si), shnum)
+ for (serverid, si, shnum)
+ in r.get_corrupt_shares() ],
+ "servers-responding": [idlib.nodeid_b2a(serverid)
+ for serverid in r.get_servers_responding()],
+ "sharemap": dict([(shareid, [idlib.nodeid_b2a(serverid)
+ for serverid in serverids])
+ for (shareid, serverids)
+ in r.get_sharemap().items()]),
+ "count-wrong-shares": r.get_share_counter_wrong(),
+ "count-recoverable-versions": r.get_version_counter_recoverable(),
+ "count-unrecoverable-versions": r.get_version_counter_unrecoverable(),
+ }
+ return d
def json_check_results(r):
if r is None:
data = {}
data["storage-index"] = r.get_storage_index_string()
data["summary"] = r.get_summary()
- data["results"] = json_check_counts(r.get_data())
+ data["results"] = json_check_counts(r)
data["results"]["needs-rebalancing"] = r.needs_rebalancing()
data["results"]["healthy"] = r.is_healthy()
data["results"]["recoverable"] = r.is_recoverable()