From: Brian Warner Date: Fri, 25 May 2012 07:14:08 +0000 (-0700) Subject: CheckResults: replace get_data() with as_dict(), use getters in web status X-Git-Url: https://git.rkrishnan.org/listings/vdrive/quickstart.html?a=commitdiff_plain;h=8daacbcf690b69031f2a09ce7f0028a1f2206179;p=tahoe-lafs%2Ftahoe-lafs.git CheckResults: replace get_data() with as_dict(), use getters in web status --- diff --git a/src/allmydata/check_results.py b/src/allmydata/check_results.py index 922b6c15..cd21d0cc 100644 --- a/src/allmydata/check_results.py +++ b/src/allmydata/check_results.py @@ -109,7 +109,7 @@ class CheckResults: def get_sharemap(self): return self._data["sharemap"] - def get_data(self): + def as_dict(self): return self._data def get_summary(self): diff --git a/src/allmydata/test/test_deepcheck.py b/src/allmydata/test/test_deepcheck.py index ac50fc73..86415b29 100644 --- a/src/allmydata/test/test_deepcheck.py +++ b/src/allmydata/test/test_deepcheck.py @@ -280,7 +280,7 @@ class DeepCheckWebGood(DeepCheckBase, unittest.TestCase): needs_rebalancing = bool( num_servers < 10 ) if not incomplete: self.failUnlessEqual(cr.needs_rebalancing(), needs_rebalancing, - str((where, cr, cr.get_data()))) + str((where, cr, cr.as_dict()))) self.failUnlessEqual(cr.get_share_counter_good(), 10, where) self.failUnlessEqual(cr.get_encoding_needed(), 3, where) self.failUnlessEqual(cr.get_encoding_expected(), 10, where) diff --git a/src/allmydata/test/test_repairer.py b/src/allmydata/test/test_repairer.py index 60b28e24..0e007423 100644 --- a/src/allmydata/test/test_repairer.py +++ b/src/allmydata/test/test_repairer.py @@ -117,7 +117,7 @@ class Verifier(GridTestMixin, unittest.TestCase, RepairTestMixin): judgement(vr) except unittest.FailTest, e: # FailTest just uses e.args[0] == str - new_arg = str(e.args[0]) + "\nvr.data is: " + str(vr.get_data()) + new_arg = str(e.args[0]) + "\nvr.data is: " + str(vr.as_dict()) e.args = (new_arg,) raise d.addCallback(_check) @@ -127,7 +127,7 @@ class Verifier(GridTestMixin, unittest.TestCase, RepairTestMixin): """ Verify says the file is healthy when none of the shares have been touched in a way that matters. It doesn't use more than seven times as many reads as it needs.""" - self.failUnless(vr.is_healthy(), (vr, vr.is_healthy(), vr.get_data())) + self.failUnless(vr.is_healthy(), (vr, vr.is_healthy(), vr.as_dict())) self.failUnlessEqual(vr.get_share_counter_good(), 10) self.failUnlessEqual(len(vr.get_sharemap()), 10) self.failUnlessEqual(vr.get_encoding_needed(), 3) @@ -162,7 +162,7 @@ class Verifier(GridTestMixin, unittest.TestCase, RepairTestMixin): giving you the share data. Test that verifier handles these answers correctly. It doesn't use more than seven times as many reads as it needs.""" - self.failIf(vr.is_healthy(), (vr, vr.is_healthy(), vr.get_data())) + self.failIf(vr.is_healthy(), (vr, vr.is_healthy(), vr.as_dict())) self.failUnlessEqual(vr.get_share_counter_good(), 9) self.failUnlessEqual(len(vr.get_sharemap()), 9) self.failUnlessEqual(vr.get_encoding_needed(), 3) @@ -182,7 +182,7 @@ class Verifier(GridTestMixin, unittest.TestCase, RepairTestMixin): # offsets) to something larger than 2 will trigger a # ShareVersionIncompatible exception, which should be counted in # list-incompatible-shares, rather than list-corrupt-shares. - self.failIf(vr.is_healthy(), (vr, vr.is_healthy(), vr.get_data())) + self.failIf(vr.is_healthy(), (vr, vr.is_healthy(), vr.as_dict())) self.failUnlessEqual(vr.get_share_counter_good(), 9) self.failUnlessEqual(len(vr.get_sharemap()), 9) self.failUnlessEqual(vr.get_encoding_needed(), 3) @@ -201,7 +201,7 @@ class Verifier(GridTestMixin, unittest.TestCase, RepairTestMixin): # corruption of fields that the server does not check (which is most # of them), which will be detected by the client as it downloads # those shares. - self.failIf(vr.is_healthy(), (vr, vr.is_healthy(), vr.get_data())) + self.failIf(vr.is_healthy(), (vr, vr.is_healthy(), vr.as_dict())) self.failUnlessEqual(vr.get_share_counter_good(), 9) self.failUnlessEqual(vr.get_encoding_needed(), 3) self.failUnlessEqual(vr.get_encoding_expected(), 10) @@ -490,7 +490,7 @@ class Repairer(GridTestMixin, unittest.TestCase, RepairTestMixin, self.failIfBigger(delta_reads, MAX_DELTA_READS) self.failIfBigger(delta_allocates, (DELTA_WRITES_PER_SHARE * 7)) self.failIf(pre.is_healthy()) - self.failUnless(post.is_healthy(), post.get_data()) + self.failUnless(post.is_healthy(), post.as_dict()) # Make sure we really have 10 shares. shares = self.find_uri_shares(self.uri) diff --git a/src/allmydata/test/test_upload.py b/src/allmydata/test/test_upload.py index 6a83f580..49df769f 100644 --- a/src/allmydata/test/test_upload.py +++ b/src/allmydata/test/test_upload.py @@ -885,9 +885,8 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin, m = monitor.Monitor() d.addCallback(lambda fn: fn.check(m)) def _check(cr): - data = cr.get_data() - self.failUnlessEqual(data["count-shares-needed"], 7) - self.failUnlessEqual(data["count-shares-expected"], 12) + self.failUnlessEqual(cr.get_encoding_needed(), 7) + self.failUnlessEqual(cr.get_encoding_expected(), 12) d.addCallback(_check) return d diff --git a/src/allmydata/web/check_results.py b/src/allmydata/web/check_results.py index 3d381544..a0ebf3d3 100644 --- a/src/allmydata/web/check_results.py +++ b/src/allmydata/web/check_results.py @@ -8,30 +8,27 @@ from allmydata.web.operations import ReloadMixin from allmydata.interfaces import ICheckAndRepairResults, ICheckResults from allmydata.util import base32, idlib -def json_check_counts(d): - r = {} - r["count-shares-good"] = d["count-shares-good"] - r["count-shares-needed"] = d["count-shares-needed"] - r["count-shares-expected"] = d["count-shares-expected"] - r["count-good-share-hosts"] = d["count-good-share-hosts"] - r["count-corrupt-shares"] = d["count-corrupt-shares"] - r["list-corrupt-shares"] = [ (idlib.nodeid_b2a(serverid), - base32.b2a(si), shnum) - for (serverid, si, shnum) - in d["list-corrupt-shares"] ] - r["servers-responding"] = [idlib.nodeid_b2a(serverid) - for serverid in d["servers-responding"]] - sharemap = {} - for (shareid, serverids) in d["sharemap"].items(): - sharemap[shareid] = [idlib.nodeid_b2a(serverid) - for serverid in serverids] - r["sharemap"] = sharemap - - r["count-wrong-shares"] = d["count-wrong-shares"] - r["count-recoverable-versions"] = d["count-recoverable-versions"] - r["count-unrecoverable-versions"] = d["count-unrecoverable-versions"] - - return r +def json_check_counts(r): + d = {"count-shares-good": r.get_share_counter_good(), + "count-shares-needed": r.get_encoding_needed(), + "count-shares-expected": r.get_encoding_expected(), + "count-good-share-hosts": r.get_host_counter_good_shares(), + "count-corrupt-shares": len(r.get_corrupt_shares()), + "list-corrupt-shares": [ (idlib.nodeid_b2a(serverid), + base32.b2a(si), shnum) + for (serverid, si, shnum) + in r.get_corrupt_shares() ], + "servers-responding": [idlib.nodeid_b2a(serverid) + for serverid in r.get_servers_responding()], + "sharemap": dict([(shareid, [idlib.nodeid_b2a(serverid) + for serverid in serverids]) + for (shareid, serverids) + in r.get_sharemap().items()]), + "count-wrong-shares": r.get_share_counter_wrong(), + "count-recoverable-versions": r.get_version_counter_recoverable(), + "count-unrecoverable-versions": r.get_version_counter_unrecoverable(), + } + return d def json_check_results(r): if r is None: @@ -43,7 +40,7 @@ def json_check_results(r): data = {} data["storage-index"] = r.get_storage_index_string() data["summary"] = r.get_summary() - data["results"] = json_check_counts(r.get_data()) + data["results"] = json_check_counts(r) data["results"]["needs-rebalancing"] = r.needs_rebalancing() data["results"]["healthy"] = r.is_healthy() data["results"]["recoverable"] = r.is_recoverable()