--- /dev/null
+
+import re
+from twisted.internet import defer
+from nevow.testutil import FakeRequest
+from nevow import inevow, context
+
+class WebRenderingMixin:
+ # d=page.renderString() or s=page.renderSynchronously() will exercise
+ # docFactory, render_*/data_* . It won't exercise want_json(), or my
+ # renderHTTP() override which tests want_json(). To exercise args=, we
+ # must build a context. Pages which use a return_to= argument need a
+ # context.
+
+ # d=page.renderHTTP(ctx) will exercise my renderHTTP, want_json, and
+ # docFactory/render_*/data_*, but it requires building a context. Since
+ # we're already building a context, it is easy to exercise args= .
+
+ # so, use at least two d=page.renderHTTP(ctx) per page (one for json, one
+ # for html), then use lots of simple s=page.renderSynchronously() to
+ # exercise the fine details (the ones that don't require args=).
+
+ def make_context(self, req):
+ ctx = context.RequestContext(tag=req)
+ ctx.remember(req, inevow.IRequest)
+ ctx.remember(None, inevow.IData)
+ ctx = context.WovenContext(parent=ctx, precompile=False)
+ return ctx
+
+ def render1(self, page, **kwargs):
+ # use this to exercise an overridden renderHTTP, usually for
+ # output=json or render_GET. It always returns a Deferred.
+ req = FakeRequest(**kwargs)
+ ctx = self.make_context(req)
+ d = defer.maybeDeferred(page.renderHTTP, ctx)
+ def _done(res):
+ if isinstance(res, str):
+ return res + req.v
+ return req.v
+ d.addCallback(_done)
+ return d
+
+ def render2(self, page, **kwargs):
+ # use this to exercise the normal Nevow docFactory rendering. It
+ # returns a string. If one of the render_* methods returns a
+ # Deferred, this will throw an exception. (note that
+ # page.renderString is the Deferred-returning equivalent)
+ req = FakeRequest(**kwargs)
+ ctx = self.make_context(req)
+ return page.renderSynchronously(ctx)
+
+ def failUnlessIn(self, substring, s):
+ self.failUnless(substring in s, s)
+
+ def remove_tags(self, s):
+ s = re.sub(r'<[^>]*>', ' ', s)
+ s = re.sub(r'\s+', ' ', s)
+ return s
+
--- /dev/null
+
+import simplejson
+from twisted.trial import unittest
+from allmydata import check_results, uri
+from allmydata.web import check_results as web_check_results
+from common_web import WebRenderingMixin
+
+class FakeClient:
+ def get_nickname_for_peerid(self, peerid):
+ if peerid == "\x00"*20:
+ return "peer-0"
+ if peerid == "\xff"*20:
+ return "peer-f"
+ if peerid == "\x11"*20:
+ return "peer-11"
+ return "peer-unknown"
+
+ def get_permuted_peers(self, service, key):
+ return [("\x00"*20, None),
+ ("\x11"*20, None),
+ ("\xff"*20, None),
+ ]
+
+class WebResultsRendering(unittest.TestCase, WebRenderingMixin):
+
+ def render_json(self, page):
+ d = self.render1(page, args={"output": ["json"]})
+ return d
+
+ def test_literal(self):
+ c = FakeClient()
+ lcr = web_check_results.LiteralCheckResults(c)
+
+ d = self.render1(lcr)
+ def _check(html):
+ s = self.remove_tags(html)
+ self.failUnlessIn("Literal files are always healthy", s)
+ d.addCallback(_check)
+ d.addCallback(lambda ignored:
+ self.render1(lcr, args={"return_to": ["FOOURL"]}))
+ def _check_return_to(html):
+ s = self.remove_tags(html)
+ self.failUnlessIn("Literal files are always healthy", s)
+ self.failUnlessIn('<a href="FOOURL">Return to parent directory</a>',
+ html)
+ d.addCallback(_check_return_to)
+ d.addCallback(lambda ignored: self.render_json(lcr))
+ def _check_json(json):
+ j = simplejson.loads(json)
+ self.failUnlessEqual(j["storage-index"], "")
+ self.failUnlessEqual(j["results"]["healthy"], True)
+ d.addCallback(_check_json)
+ return d
+
+ def test_check(self):
+ c = FakeClient()
+ serverid_1 = "\x00"*20
+ serverid_f = "\xff"*20
+ u = uri.CHKFileURI("\x00"*16, "\x00"*32, 3, 10, 1234)
+ cr = check_results.CheckResults(u, u.storage_index)
+ cr.set_healthy(True)
+ cr.set_needs_rebalancing(False)
+ cr.set_summary("groovy")
+ data = { "count-shares-needed": 3,
+ "count-shares-expected": 9,
+ "count-shares-good": 10,
+ "count-good-share-hosts": 11,
+ "list-corrupt-shares": [],
+ "count-wrong-shares": 0,
+ "sharemap": {"shareid1": [serverid_1, serverid_f]},
+ "count-recoverable-versions": 1,
+ "count-unrecoverable-versions": 0,
+ "servers-responding": [],
+ }
+ cr.set_data(data)
+
+ w = web_check_results.CheckResults(c, cr)
+ html = self.render2(w)
+ s = self.remove_tags(html)
+ self.failUnlessIn("File Check Results for SI=2k6avp", s) # abbreviated
+ self.failUnlessIn("Healthy : groovy", s)
+ self.failUnlessIn("Share Counts: need 3-of-9, have 10", s)
+ self.failUnlessIn("Hosts with good shares: 11", s)
+ self.failUnlessIn("Corrupt shares: none", s)
+ self.failUnlessIn("Wrong Shares: 0", s)
+ self.failUnlessIn("Recoverable Versions: 1", s)
+ self.failUnlessIn("Unrecoverable Versions: 0", s)
+
+ cr.set_healthy(False)
+ cr.set_recoverable(True)
+ cr.set_summary("ungroovy")
+ html = self.render2(w)
+ s = self.remove_tags(html)
+ self.failUnlessIn("File Check Results for SI=2k6avp", s) # abbreviated
+ self.failUnlessIn("Not Healthy! : ungroovy", s)
+
+ cr.set_healthy(False)
+ cr.set_recoverable(False)
+ cr.set_summary("rather dead")
+ data["list-corrupt-shares"] = [(serverid_1, u.storage_index, 2)]
+ cr.set_data(data)
+ html = self.render2(w)
+ s = self.remove_tags(html)
+ self.failUnlessIn("File Check Results for SI=2k6avp", s) # abbreviated
+ self.failUnlessIn("Not Recoverable! : rather dead", s)
+ self.failUnlessIn("Corrupt shares: sh#2 aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa (peer-0)", s)
+
+ html = self.render2(w)
+ s = self.remove_tags(html)
+ self.failUnlessIn("File Check Results for SI=2k6avp", s) # abbreviated
+ self.failUnlessIn("Not Recoverable! : rather dead", s)
+
+ html = self.render2(w, args={"return_to": ["FOOURL"]})
+ self.failUnlessIn('<a href="FOOURL">Return to parent directory</a>',
+ html)
+
+ d = self.render_json(w)
+ def _check_json(jdata):
+ j = simplejson.loads(jdata)
+ self.failUnlessEqual(j["summary"], "rather dead")
+ self.failUnlessEqual(j["storage-index"],
+ "2k6avpjga3dho3zsjo6nnkt7n4")
+ expected = {'needs-rebalancing': False,
+ 'count-shares-expected': 9,
+ 'healthy': False,
+ 'count-unrecoverable-versions': 0,
+ 'count-shares-needed': 3,
+ 'sharemap': {"shareid1":
+ ["aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
+ "77777777777777777777777777777777"]},
+ 'count-recoverable-versions': 1,
+ 'list-corrupt-shares':
+ [["aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
+ "2k6avpjga3dho3zsjo6nnkt7n4", 2]],
+ 'count-good-share-hosts': 11,
+ 'count-wrong-shares': 0,
+ 'count-shares-good': 10,
+ 'count-corrupt-shares': 0,
+ 'servers-responding': [],
+ 'recoverable': False,
+ }
+ self.failUnlessEqual(j["results"], expected)
+ d.addCallback(_check_json)
+ d.addCallback(lambda ignored: self.render1(w))
+ def _check(html):
+ s = self.remove_tags(html)
+ self.failUnlessIn("File Check Results for SI=2k6avp", s)
+ self.failUnlessIn("Not Recoverable! : rather dead", s)
+ d.addCallback(_check)
+ return d
+
+
+ def test_check_and_repair(self):
+ c = FakeClient()
+ serverid_1 = "\x00"*20
+ serverid_f = "\xff"*20
+ u = uri.CHKFileURI("\x00"*16, "\x00"*32, 3, 10, 1234)
+
+ pre_cr = check_results.CheckResults(u, u.storage_index)
+ pre_cr.set_healthy(False)
+ pre_cr.set_recoverable(True)
+ pre_cr.set_needs_rebalancing(False)
+ pre_cr.set_summary("illing")
+ data = { "count-shares-needed": 3,
+ "count-shares-expected": 10,
+ "count-shares-good": 6,
+ "count-good-share-hosts": 7,
+ "list-corrupt-shares": [],
+ "count-wrong-shares": 0,
+ "sharemap": {"shareid1": [serverid_1, serverid_f]},
+ "count-recoverable-versions": 1,
+ "count-unrecoverable-versions": 0,
+ "servers-responding": [],
+ }
+ pre_cr.set_data(data)
+
+ post_cr = check_results.CheckResults(u, u.storage_index)
+ post_cr.set_healthy(True)
+ post_cr.set_recoverable(True)
+ post_cr.set_needs_rebalancing(False)
+ post_cr.set_summary("groovy")
+ data = { "count-shares-needed": 3,
+ "count-shares-expected": 10,
+ "count-shares-good": 10,
+ "count-good-share-hosts": 11,
+ "list-corrupt-shares": [],
+ "count-wrong-shares": 0,
+ "sharemap": {"shareid1": [serverid_1, serverid_f]},
+ "count-recoverable-versions": 1,
+ "count-unrecoverable-versions": 0,
+ "servers-responding": [],
+ }
+ post_cr.set_data(data)
+
+ crr = check_results.CheckAndRepairResults(u.storage_index)
+ crr.pre_repair_results = pre_cr
+ crr.post_repair_results = post_cr
+ crr.repair_attempted = False
+
+ w = web_check_results.CheckAndRepairResults(c, crr)
+ html = self.render2(w)
+ s = self.remove_tags(html)
+
+ self.failUnlessIn("File Check-And-Repair Results for SI=2k6avp", s)
+ self.failUnlessIn("Healthy : groovy", s)
+ self.failUnlessIn("No repair necessary", s)
+ self.failUnlessIn("Post-Repair Checker Results:", s)
+ self.failUnlessIn("Share Counts: need 3-of-10, have 10", s)
+
+ crr.repair_attempted = True
+ crr.repair_successful = True
+ html = self.render2(w)
+ s = self.remove_tags(html)
+
+ self.failUnlessIn("File Check-And-Repair Results for SI=2k6avp", s)
+ self.failUnlessIn("Healthy : groovy", s)
+ self.failUnlessIn("Repair successful", s)
+ self.failUnlessIn("Post-Repair Checker Results:", s)
+
+ crr.repair_attempted = True
+ crr.repair_successful = False
+ post_cr.set_healthy(False)
+ post_cr.set_summary("better")
+ html = self.render2(w)
+ s = self.remove_tags(html)
+
+ self.failUnlessIn("File Check-And-Repair Results for SI=2k6avp", s)
+ self.failUnlessIn("Not Healthy! : better", s)
+ self.failUnlessIn("Repair unsuccessful", s)
+ self.failUnlessIn("Post-Repair Checker Results:", s)
+
+ crr.repair_attempted = True
+ crr.repair_successful = False
+ post_cr.set_healthy(False)
+ post_cr.set_recoverable(False)
+ post_cr.set_summary("worse")
+ html = self.render2(w)
+ s = self.remove_tags(html)
+
+ self.failUnlessIn("File Check-And-Repair Results for SI=2k6avp", s)
+ self.failUnlessIn("Not Recoverable! : worse", s)
+ self.failUnlessIn("Repair unsuccessful", s)
+ self.failUnlessIn("Post-Repair Checker Results:", s)
+
+ d = self.render_json(w)
+ def _got_json(data):
+ j = simplejson.loads(data)
+ self.failUnlessEqual(j["repair-attempted"], True)
+ self.failUnlessEqual(j["storage-index"],
+ "2k6avpjga3dho3zsjo6nnkt7n4")
+ self.failUnlessEqual(j["pre-repair-results"]["summary"], "illing")
+ self.failUnlessEqual(j["post-repair-results"]["summary"], "worse")
+ d.addCallback(_got_json)
+
+ w2 = web_check_results.CheckAndRepairResults(c, None)
+ d.addCallback(lambda ignored: self.render_json(w2))
+ def _got_lit_results(data):
+ j = simplejson.loads(data)
+ self.failUnlessEqual(j["repair-attempted"], False)
+ self.failUnlessEqual(j["storage-index"], "")
+ d.addCallback(_got_lit_results)
+ return d
+