]> git.rkrishnan.org Git - tahoe-lafs/tahoe-lafs.git/commitdiff
CheckResults: replace get_data() with as_dict(), use getters in web status
authorBrian Warner <warner@lothar.com>
Fri, 25 May 2012 07:14:08 +0000 (00:14 -0700)
committerBrian Warner <warner@lothar.com>
Sat, 2 Jun 2012 18:39:10 +0000 (11:39 -0700)
src/allmydata/check_results.py
src/allmydata/test/test_deepcheck.py
src/allmydata/test/test_repairer.py
src/allmydata/test/test_upload.py
src/allmydata/web/check_results.py

index 922b6c15fddffb33d84dbf0feb200c2ae6390de1..cd21d0cccaedb1a4cfa458019e0c26d6c828f455 100644 (file)
@@ -109,7 +109,7 @@ class CheckResults:
     def get_sharemap(self):
         return self._data["sharemap"]
 
-    def get_data(self):
+    def as_dict(self):
         return self._data
 
     def get_summary(self):
index ac50fc735b229a3394b3721344909d13bfad99de..86415b29a08afc65af9fe753b0567626fcc9c144 100644 (file)
@@ -280,7 +280,7 @@ class DeepCheckWebGood(DeepCheckBase, unittest.TestCase):
         needs_rebalancing = bool( num_servers < 10 )
         if not incomplete:
             self.failUnlessEqual(cr.needs_rebalancing(), needs_rebalancing,
-                                 str((where, cr, cr.get_data())))
+                                 str((where, cr, cr.as_dict())))
         self.failUnlessEqual(cr.get_share_counter_good(), 10, where)
         self.failUnlessEqual(cr.get_encoding_needed(), 3, where)
         self.failUnlessEqual(cr.get_encoding_expected(), 10, where)
index 60b28e2482b59737b644d37dc0e3649b5fe2c34b..0e007423002561dbe26167827798fe10f6d993e5 100644 (file)
@@ -117,7 +117,7 @@ class Verifier(GridTestMixin, unittest.TestCase, RepairTestMixin):
                 judgement(vr)
             except unittest.FailTest, e:
                 # FailTest just uses e.args[0] == str
-                new_arg = str(e.args[0]) + "\nvr.data is: " + str(vr.get_data())
+                new_arg = str(e.args[0]) + "\nvr.data is: " + str(vr.as_dict())
                 e.args = (new_arg,)
                 raise
         d.addCallback(_check)
@@ -127,7 +127,7 @@ class Verifier(GridTestMixin, unittest.TestCase, RepairTestMixin):
         """ Verify says the file is healthy when none of the shares have been
         touched in a way that matters. It doesn't use more than seven times
         as many reads as it needs."""
-        self.failUnless(vr.is_healthy(), (vr, vr.is_healthy(), vr.get_data()))
+        self.failUnless(vr.is_healthy(), (vr, vr.is_healthy(), vr.as_dict()))
         self.failUnlessEqual(vr.get_share_counter_good(), 10)
         self.failUnlessEqual(len(vr.get_sharemap()), 10)
         self.failUnlessEqual(vr.get_encoding_needed(), 3)
@@ -162,7 +162,7 @@ class Verifier(GridTestMixin, unittest.TestCase, RepairTestMixin):
         giving you the share data. Test that verifier handles these answers
         correctly. It doesn't use more than seven times as many reads as it
         needs."""
-        self.failIf(vr.is_healthy(), (vr, vr.is_healthy(), vr.get_data()))
+        self.failIf(vr.is_healthy(), (vr, vr.is_healthy(), vr.as_dict()))
         self.failUnlessEqual(vr.get_share_counter_good(), 9)
         self.failUnlessEqual(len(vr.get_sharemap()), 9)
         self.failUnlessEqual(vr.get_encoding_needed(), 3)
@@ -182,7 +182,7 @@ class Verifier(GridTestMixin, unittest.TestCase, RepairTestMixin):
         # offsets) to something larger than 2 will trigger a
         # ShareVersionIncompatible exception, which should be counted in
         # list-incompatible-shares, rather than list-corrupt-shares.
-        self.failIf(vr.is_healthy(), (vr, vr.is_healthy(), vr.get_data()))
+        self.failIf(vr.is_healthy(), (vr, vr.is_healthy(), vr.as_dict()))
         self.failUnlessEqual(vr.get_share_counter_good(), 9)
         self.failUnlessEqual(len(vr.get_sharemap()), 9)
         self.failUnlessEqual(vr.get_encoding_needed(), 3)
@@ -201,7 +201,7 @@ class Verifier(GridTestMixin, unittest.TestCase, RepairTestMixin):
         # corruption of fields that the server does not check (which is most
         # of them), which will be detected by the client as it downloads
         # those shares.
-        self.failIf(vr.is_healthy(), (vr, vr.is_healthy(), vr.get_data()))
+        self.failIf(vr.is_healthy(), (vr, vr.is_healthy(), vr.as_dict()))
         self.failUnlessEqual(vr.get_share_counter_good(), 9)
         self.failUnlessEqual(vr.get_encoding_needed(), 3)
         self.failUnlessEqual(vr.get_encoding_expected(), 10)
@@ -490,7 +490,7 @@ class Repairer(GridTestMixin, unittest.TestCase, RepairTestMixin,
             self.failIfBigger(delta_reads, MAX_DELTA_READS)
             self.failIfBigger(delta_allocates, (DELTA_WRITES_PER_SHARE * 7))
             self.failIf(pre.is_healthy())
-            self.failUnless(post.is_healthy(), post.get_data())
+            self.failUnless(post.is_healthy(), post.as_dict())
 
             # Make sure we really have 10 shares.
             shares = self.find_uri_shares(self.uri)
index 6a83f580d9c614dbbc78de1a31f2da178ed2688c..49df769f7c85d84f6fd7197aaa0c479ef49854a4 100644 (file)
@@ -885,9 +885,8 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
         m = monitor.Monitor()
         d.addCallback(lambda fn: fn.check(m))
         def _check(cr):
-            data = cr.get_data()
-            self.failUnlessEqual(data["count-shares-needed"], 7)
-            self.failUnlessEqual(data["count-shares-expected"], 12)
+            self.failUnlessEqual(cr.get_encoding_needed(), 7)
+            self.failUnlessEqual(cr.get_encoding_expected(), 12)
         d.addCallback(_check)
         return d
 
index 3d381544b640bf7b9be15cddf4f7021c464c0071..a0ebf3d30448375ddc2eeca6b8147452cce8336f 100644 (file)
@@ -8,30 +8,27 @@ from allmydata.web.operations import ReloadMixin
 from allmydata.interfaces import ICheckAndRepairResults, ICheckResults
 from allmydata.util import base32, idlib
 
-def json_check_counts(d):
-    r = {}
-    r["count-shares-good"] = d["count-shares-good"]
-    r["count-shares-needed"] = d["count-shares-needed"]
-    r["count-shares-expected"] = d["count-shares-expected"]
-    r["count-good-share-hosts"] = d["count-good-share-hosts"]
-    r["count-corrupt-shares"] = d["count-corrupt-shares"]
-    r["list-corrupt-shares"] = [ (idlib.nodeid_b2a(serverid),
-                                  base32.b2a(si), shnum)
-                                 for (serverid, si, shnum)
-                                 in d["list-corrupt-shares"] ]
-    r["servers-responding"] = [idlib.nodeid_b2a(serverid)
-                               for serverid in d["servers-responding"]]
-    sharemap = {}
-    for (shareid, serverids) in d["sharemap"].items():
-        sharemap[shareid] = [idlib.nodeid_b2a(serverid)
-                             for serverid in serverids]
-    r["sharemap"] = sharemap
-
-    r["count-wrong-shares"] = d["count-wrong-shares"]
-    r["count-recoverable-versions"] = d["count-recoverable-versions"]
-    r["count-unrecoverable-versions"] = d["count-unrecoverable-versions"]
-
-    return r
+def json_check_counts(r):
+    d = {"count-shares-good": r.get_share_counter_good(),
+         "count-shares-needed": r.get_encoding_needed(),
+         "count-shares-expected": r.get_encoding_expected(),
+         "count-good-share-hosts": r.get_host_counter_good_shares(),
+         "count-corrupt-shares": len(r.get_corrupt_shares()),
+         "list-corrupt-shares": [ (idlib.nodeid_b2a(serverid),
+                                   base32.b2a(si), shnum)
+                                  for (serverid, si, shnum)
+                                  in r.get_corrupt_shares() ],
+         "servers-responding": [idlib.nodeid_b2a(serverid)
+                                for serverid in r.get_servers_responding()],
+         "sharemap": dict([(shareid, [idlib.nodeid_b2a(serverid)
+                                      for serverid in serverids])
+                           for (shareid, serverids)
+                           in r.get_sharemap().items()]),
+         "count-wrong-shares": r.get_share_counter_wrong(),
+         "count-recoverable-versions": r.get_version_counter_recoverable(),
+         "count-unrecoverable-versions": r.get_version_counter_unrecoverable(),
+         }
+    return d
 
 def json_check_results(r):
     if r is None:
@@ -43,7 +40,7 @@ def json_check_results(r):
     data = {}
     data["storage-index"] = r.get_storage_index_string()
     data["summary"] = r.get_summary()
-    data["results"] = json_check_counts(r.get_data())
+    data["results"] = json_check_counts(r)
     data["results"]["needs-rebalancing"] = r.needs_rebalancing()
     data["results"]["healthy"] = r.is_healthy()
     data["results"]["recoverable"] = r.is_recoverable()