From: Brian Warner Date: Fri, 25 May 2012 07:13:48 +0000 (-0700) Subject: use the new CheckResult getters almost everywhere X-Git-Url: https://git.rkrishnan.org/%5B/%5D%20/uri/frontends/module-simplejson.decoder.html?a=commitdiff_plain;h=4867dca3f04a4c7590b07b8c9f51fee422076b1c;p=tahoe-lafs%2Ftahoe-lafs.git use the new CheckResult getters almost everywhere The remaining get_data() calls are either in web.check_results.json_check_results(), or functioning as repr()s in various unit test failure cases. --- diff --git a/src/allmydata/check_results.py b/src/allmydata/check_results.py index 1517f96a..922b6c15 100644 --- a/src/allmydata/check_results.py +++ b/src/allmydata/check_results.py @@ -196,7 +196,7 @@ class DeepCheckResults(DeepResultsBase): self.objects_unrecoverable += 1 self.all_results[tuple(path)] = r self.all_results_by_storage_index[r.get_storage_index()] = r - self.corrupt_shares.extend(r.get_data()["list-corrupt-shares"]) + self.corrupt_shares.extend(r.get_corrupt_shares()) def get_counters(self): return {"count-objects-checked": self.objects_checked, @@ -234,7 +234,7 @@ class DeepCheckAndRepairResults(DeepResultsBase): self.objects_unhealthy += 1 if not pre_repair.is_recoverable(): self.objects_unrecoverable += 1 - self.corrupt_shares.extend(pre_repair.get_data()["list-corrupt-shares"]) + self.corrupt_shares.extend(pre_repair.get_corrupt_shares()) if r.get_repair_attempted(): self.repairs_attempted += 1 if r.get_repair_successful(): @@ -249,7 +249,7 @@ class DeepCheckAndRepairResults(DeepResultsBase): self.objects_unrecoverable_post_repair += 1 self.all_results[tuple(path)] = r self.all_results_by_storage_index[r.get_storage_index()] = r - self.corrupt_shares_post_repair.extend(post_repair.get_data()["list-corrupt-shares"]) + self.corrupt_shares_post_repair.extend(post_repair.get_corrupt_shares()) def get_counters(self): return {"count-objects-checked": self.objects_checked, diff --git a/src/allmydata/immutable/filenode.py b/src/allmydata/immutable/filenode.py index d8735189..0c9b719a 100644 --- a/src/allmydata/immutable/filenode.py +++ b/src/allmydata/immutable/filenode.py @@ -1,6 +1,5 @@ import binascii -import copy import time now = time.time from zope.interface import implements @@ -126,19 +125,21 @@ class CiphertextFileNode: # clone the cr (check results) to form the basis of the # prr (post-repair results) prr = CheckResults(cr.uri, cr.storage_index) - prr_data = copy.deepcopy(cr.get_data()) verifycap = self._verifycap - servers_responding = set(prr_data['servers-responding']) - sm = prr_data['sharemap'] - assert isinstance(sm, DictOfSets), sm + servers_responding = set(cr.get_servers_responding()) + sm = DictOfSets() + assert isinstance(cr.get_sharemap(), DictOfSets) + for shnum, serverids in cr.get_sharemap().items(): + for serverid in serverids: + sm.add(shnum, serverid) for shnum, servers in ur.get_sharemap().items(): for s in servers: sm.add(shnum, s.get_serverid()) servers_responding.add(s.get_serverid()) servers_responding = sorted(servers_responding) - good_hosts = len(reduce(set.union, sm.itervalues(), set())) + good_hosts = len(reduce(set.union, sm.values(), set())) is_healthy = bool(len(sm) >= verifycap.total_shares) is_recoverable = bool(len(sm) >= verifycap.needed_shares) prr.set_data( @@ -151,10 +152,10 @@ class CiphertextFileNode: servers_responding=list(servers_responding), sharemap=sm, count_wrong_shares=0, # no such thing as wrong, for immutable - list_corrupt_shares=prr_data["list-corrupt-shares"], - count_corrupt_shares=prr_data["count-corrupt-shares"], - list_incompatible_shares=prr_data["list-incompatible-shares"], - count_incompatible_shares=prr_data["count-incompatible-shares"], + list_corrupt_shares=cr.get_corrupt_shares(), + count_corrupt_shares=len(cr.get_corrupt_shares()), + list_incompatible_shares=cr.get_incompatible_shares(), + count_incompatible_shares=len(cr.get_incompatible_shares()), ) prr.set_healthy(is_healthy) prr.set_recoverable(is_recoverable) diff --git a/src/allmydata/test/test_checker.py b/src/allmydata/test/test_checker.py index 585a3e43..d378860e 100644 --- a/src/allmydata/test/test_checker.py +++ b/src/allmydata/test/test_checker.py @@ -376,10 +376,10 @@ class BalancingAct(GridTestMixin, unittest.TestCase): def _check_and_repair(_): return self.imm.check_and_repair(Monitor()) def _check_counts(crr, shares_good, good_share_hosts): - p_crr = crr.get_post_repair_results().get_data() + prr = crr.get_post_repair_results() #print self._pretty_shares_chart(self.uri) - self.failUnlessEqual(p_crr['count-shares-good'], shares_good) - self.failUnlessEqual(p_crr['count-good-share-hosts'], + self.failUnlessEqual(prr.get_share_counter_good(), shares_good) + self.failUnlessEqual(prr.get_host_counter_good_shares(), good_share_hosts) """ diff --git a/src/allmydata/test/test_deepcheck.py b/src/allmydata/test/test_deepcheck.py index 669a6d5a..ac50fc73 100644 --- a/src/allmydata/test/test_deepcheck.py +++ b/src/allmydata/test/test_deepcheck.py @@ -281,30 +281,27 @@ class DeepCheckWebGood(DeepCheckBase, unittest.TestCase): if not incomplete: self.failUnlessEqual(cr.needs_rebalancing(), needs_rebalancing, str((where, cr, cr.get_data()))) - d = cr.get_data() - self.failUnlessEqual(d["count-shares-good"], 10, where) - self.failUnlessEqual(d["count-shares-needed"], 3, where) - self.failUnlessEqual(d["count-shares-expected"], 10, where) + self.failUnlessEqual(cr.get_share_counter_good(), 10, where) + self.failUnlessEqual(cr.get_encoding_needed(), 3, where) + self.failUnlessEqual(cr.get_encoding_expected(), 10, where) if not incomplete: - self.failUnlessEqual(d["count-good-share-hosts"], num_servers, - where) - self.failUnlessEqual(d["count-corrupt-shares"], 0, where) - self.failUnlessEqual(d["list-corrupt-shares"], [], where) + self.failUnlessEqual(cr.get_host_counter_good_shares(), + num_servers, where) + self.failUnlessEqual(cr.get_corrupt_shares(), [], where) if not incomplete: - self.failUnlessEqual(sorted(d["servers-responding"]), + self.failUnlessEqual(sorted(cr.get_servers_responding()), sorted(self.g.get_all_serverids()), where) - self.failUnless("sharemap" in d, str((where, d))) all_serverids = set() - for (shareid, serverids) in d["sharemap"].items(): + for (shareid, serverids) in cr.get_sharemap().items(): all_serverids.update(serverids) self.failUnlessEqual(sorted(all_serverids), sorted(self.g.get_all_serverids()), where) - self.failUnlessEqual(d["count-wrong-shares"], 0, where) - self.failUnlessEqual(d["count-recoverable-versions"], 1, where) - self.failUnlessEqual(d["count-unrecoverable-versions"], 0, where) + self.failUnlessEqual(cr.get_share_counter_wrong(), 0, where) + self.failUnlessEqual(cr.get_version_counter_recoverable(), 1, where) + self.failUnlessEqual(cr.get_version_counter_unrecoverable(), 0, where) def check_and_repair_is_healthy(self, cr, n, where, incomplete=False): @@ -1010,9 +1007,8 @@ class DeepCheckWebBad(DeepCheckBase, unittest.TestCase): self.failUnless(ICheckResults.providedBy(cr), (cr, type(cr), where)) self.failUnless(cr.is_healthy(), (cr.get_report(), cr.is_healthy(), cr.get_summary(), where)) self.failUnless(cr.is_recoverable(), where) - d = cr.get_data() - self.failUnlessEqual(d["count-recoverable-versions"], 1, where) - self.failUnlessEqual(d["count-unrecoverable-versions"], 0, where) + self.failUnlessEqual(cr.get_version_counter_recoverable(), 1, where) + self.failUnlessEqual(cr.get_version_counter_unrecoverable(), 0, where) return cr except Exception, le: le.args = tuple(le.args + (where,)) @@ -1022,31 +1018,28 @@ class DeepCheckWebBad(DeepCheckBase, unittest.TestCase): self.failUnless(ICheckResults.providedBy(cr), where) self.failIf(cr.is_healthy(), where) self.failUnless(cr.is_recoverable(), where) - d = cr.get_data() - self.failUnlessEqual(d["count-recoverable-versions"], 1, where) - self.failUnlessEqual(d["count-unrecoverable-versions"], 0, where) + self.failUnlessEqual(cr.get_version_counter_recoverable(), 1, where) + self.failUnlessEqual(cr.get_version_counter_unrecoverable(), 0, where) return cr def check_has_corrupt_shares(self, cr, where): # by "corrupt-shares" we mean the file is still recoverable self.failUnless(ICheckResults.providedBy(cr), where) - d = cr.get_data() self.failIf(cr.is_healthy(), (where, cr)) self.failUnless(cr.is_recoverable(), where) - d = cr.get_data() - self.failUnless(d["count-shares-good"] < 10, where) - self.failUnless(d["count-corrupt-shares"], where) - self.failUnless(d["list-corrupt-shares"], where) + self.failUnless(cr.get_share_counter_good() < 10, where) + self.failUnless(cr.get_corrupt_shares(), where) return cr def check_is_unrecoverable(self, cr, where): self.failUnless(ICheckResults.providedBy(cr), where) - d = cr.get_data() self.failIf(cr.is_healthy(), where) self.failIf(cr.is_recoverable(), where) - self.failUnless(d["count-shares-good"] < d["count-shares-needed"], (d["count-shares-good"], d["count-shares-needed"], where)) - self.failUnlessEqual(d["count-recoverable-versions"], 0, where) - self.failUnlessEqual(d["count-unrecoverable-versions"], 1, where) + self.failUnless(cr.get_share_counter_good() < cr.get_encoding_needed(), + (cr.get_share_counter_good(), cr.get_encoding_needed(), + where)) + self.failUnlessEqual(cr.get_version_counter_recoverable(), 0, where) + self.failUnlessEqual(cr.get_version_counter_unrecoverable(), 1, where) return cr def do_check(self, ignored): diff --git a/src/allmydata/test/test_repairer.py b/src/allmydata/test/test_repairer.py index 531db0d1..60b28e24 100644 --- a/src/allmydata/test/test_repairer.py +++ b/src/allmydata/test/test_repairer.py @@ -128,14 +128,13 @@ class Verifier(GridTestMixin, unittest.TestCase, RepairTestMixin): touched in a way that matters. It doesn't use more than seven times as many reads as it needs.""" self.failUnless(vr.is_healthy(), (vr, vr.is_healthy(), vr.get_data())) - data = vr.get_data() - self.failUnless(data['count-shares-good'] == 10, data) - self.failUnless(len(data['sharemap']) == 10, data) - self.failUnless(data['count-shares-needed'] == 3, data) - self.failUnless(data['count-shares-expected'] == 10, data) - self.failUnless(data['count-good-share-hosts'] == 10, data) - self.failUnless(len(data['servers-responding']) == 10, data) - self.failUnless(len(data['list-corrupt-shares']) == 0, data) + self.failUnlessEqual(vr.get_share_counter_good(), 10) + self.failUnlessEqual(len(vr.get_sharemap()), 10) + self.failUnlessEqual(vr.get_encoding_needed(), 3) + self.failUnlessEqual(vr.get_encoding_expected(), 10) + self.failUnlessEqual(vr.get_host_counter_good_shares(), 10) + self.failUnlessEqual(len(vr.get_servers_responding()), 10) + self.failUnlessEqual(len(vr.get_corrupt_shares()), 0) def test_ok_no_corruption(self): self.basedir = "repairer/Verifier/ok_no_corruption" @@ -164,14 +163,13 @@ class Verifier(GridTestMixin, unittest.TestCase, RepairTestMixin): correctly. It doesn't use more than seven times as many reads as it needs.""" self.failIf(vr.is_healthy(), (vr, vr.is_healthy(), vr.get_data())) - data = vr.get_data() - self.failUnless(data['count-shares-good'] == 9, data) - self.failUnless(len(data['sharemap']) == 9, data) - self.failUnless(data['count-shares-needed'] == 3, data) - self.failUnless(data['count-shares-expected'] == 10, data) - self.failUnless(data['count-good-share-hosts'] == 9, data) - self.failUnless(len(data['servers-responding']) == 9, data) - self.failUnless(len(data['list-corrupt-shares']) == 0, data) + self.failUnlessEqual(vr.get_share_counter_good(), 9) + self.failUnlessEqual(len(vr.get_sharemap()), 9) + self.failUnlessEqual(vr.get_encoding_needed(), 3) + self.failUnlessEqual(vr.get_encoding_expected(), 10) + self.failUnlessEqual(vr.get_host_counter_good_shares(), 9) + self.failUnlessEqual(len(vr.get_servers_responding()), 9) + self.failUnlessEqual(len(vr.get_corrupt_shares()), 0) def test_corrupt_file_verno(self): self.basedir = "repairer/Verifier/corrupt_file_verno" @@ -185,17 +183,14 @@ class Verifier(GridTestMixin, unittest.TestCase, RepairTestMixin): # ShareVersionIncompatible exception, which should be counted in # list-incompatible-shares, rather than list-corrupt-shares. self.failIf(vr.is_healthy(), (vr, vr.is_healthy(), vr.get_data())) - data = vr.get_data() - self.failUnlessEqual(data['count-shares-good'], 9) - self.failUnlessEqual(len(data['sharemap']), 9) - self.failUnlessEqual(data['count-shares-needed'], 3) - self.failUnlessEqual(data['count-shares-expected'], 10) - self.failUnlessEqual(data['count-good-share-hosts'], 9) - self.failUnlessEqual(len(data['servers-responding']), 10) - self.failUnlessEqual(len(data['list-corrupt-shares']), 0) - self.failUnlessEqual(data['count-corrupt-shares'], 0) - self.failUnlessEqual(len(data['list-incompatible-shares']), 1) - self.failUnlessEqual(data['count-incompatible-shares'], 1) + self.failUnlessEqual(vr.get_share_counter_good(), 9) + self.failUnlessEqual(len(vr.get_sharemap()), 9) + self.failUnlessEqual(vr.get_encoding_needed(), 3) + self.failUnlessEqual(vr.get_encoding_expected(), 10) + self.failUnlessEqual(vr.get_host_counter_good_shares(), 9) + self.failUnlessEqual(len(vr.get_servers_responding()), 10) + self.failUnlessEqual(len(vr.get_corrupt_shares()), 0) + self.failUnlessEqual(len(vr.get_incompatible_shares()), 1) def test_corrupt_share_verno(self): self.basedir = "repairer/Verifier/corrupt_share_verno" @@ -207,17 +202,14 @@ class Verifier(GridTestMixin, unittest.TestCase, RepairTestMixin): # of them), which will be detected by the client as it downloads # those shares. self.failIf(vr.is_healthy(), (vr, vr.is_healthy(), vr.get_data())) - data = vr.get_data() - self.failUnlessEqual(data['count-shares-good'], 9) - self.failUnlessEqual(data['count-shares-needed'], 3) - self.failUnlessEqual(data['count-shares-expected'], 10) - self.failUnlessEqual(data['count-good-share-hosts'], 9) - self.failUnlessEqual(data['count-corrupt-shares'], 1) - self.failUnlessEqual(len(data['list-corrupt-shares']), 1) - self.failUnlessEqual(data['count-incompatible-shares'], 0) - self.failUnlessEqual(len(data['list-incompatible-shares']), 0) - self.failUnlessEqual(len(data['servers-responding']), 10) - self.failUnlessEqual(len(data['sharemap']), 9) + self.failUnlessEqual(vr.get_share_counter_good(), 9) + self.failUnlessEqual(vr.get_encoding_needed(), 3) + self.failUnlessEqual(vr.get_encoding_expected(), 10) + self.failUnlessEqual(vr.get_host_counter_good_shares(), 9) + self.failUnlessEqual(len(vr.get_corrupt_shares()), 1) + self.failUnlessEqual(len(vr.get_incompatible_shares()), 0) + self.failUnlessEqual(len(vr.get_servers_responding()), 10) + self.failUnlessEqual(len(vr.get_sharemap()), 9) def test_corrupt_sharedata_offset(self): self.basedir = "repairer/Verifier/corrupt_sharedata_offset" @@ -724,7 +716,7 @@ class Repairer(GridTestMixin, unittest.TestCase, RepairTestMixin, # not respond to the pre-repair filecheck prr = rr.get_post_repair_results() expected = set(self.g.get_all_serverids()) - self.failUnlessEqual(expected, set(prr.get_data()["servers-responding"])) + self.failUnlessEqual(expected, set(prr.get_servers_responding())) d.addCallback(_check) return d diff --git a/src/allmydata/web/check_results.py b/src/allmydata/web/check_results.py index 39524a96..3d381544 100644 --- a/src/allmydata/web/check_results.py +++ b/src/allmydata/web/check_results.py @@ -81,21 +81,20 @@ class ResultsBase: assert ICheckResults(cr) c = self.client sb = c.get_storage_broker() - data = cr.get_data() r = [] def add(name, value): r.append(T.li[name + ": ", value]) add("Report", T.pre["\n".join(self._html(cr.get_report()))]) add("Share Counts", - "need %d-of-%d, have %d" % (data["count-shares-needed"], - data["count-shares-expected"], - data["count-shares-good"])) - add("Hosts with good shares", data["count-good-share-hosts"]) + "need %d-of-%d, have %d" % (cr.get_encoding_needed(), + cr.get_encoding_expected(), + cr.get_share_counter_good())) + add("Hosts with good shares", cr.get_host_counter_good_shares()) - if data["list-corrupt-shares"]: + if cr.get_corrupt_shares(): badsharemap = [] - for (serverid, si, shnum) in data["list-corrupt-shares"]: + for (serverid, si, shnum) in cr.get_corrupt_shares(): nickname = sb.get_nickname_for_serverid(serverid) badsharemap.append(T.tr[T.td["sh#%d" % shnum], T.td[T.div(class_="nickname")[nickname], @@ -108,15 +107,15 @@ class ResultsBase: else: add("Corrupt shares", "none") - add("Wrong Shares", data["count-wrong-shares"]) + add("Wrong Shares", cr.get_share_counter_wrong()) sharemap = [] servers = {} # FIXME: The two tables below contain nickname-and-nodeid table column markup which is duplicated with each other, introducer.xhtml, and deep-check-results.xhtml. All of these (and any other presentations of nickname-and-nodeid) should be combined. - for shareid in sorted(data["sharemap"].keys()): - serverids = data["sharemap"][shareid] + for shareid in sorted(cr.get_sharemap().keys()): + serverids = cr.get_sharemap()[shareid] for i,serverid in enumerate(serverids): if serverid not in servers: servers[serverid] = [] @@ -134,8 +133,8 @@ class ResultsBase: sharemap]) - add("Recoverable Versions", data["count-recoverable-versions"]) - add("Unrecoverable Versions", data["count-unrecoverable-versions"]) + add("Recoverable Versions", cr.get_version_counter_recoverable()) + add("Unrecoverable Versions", cr.get_version_counter_unrecoverable()) # this table is sorted by permuted order sb = c.get_storage_broker()