From c455d524535dbd573fb5604c880058f6b6964080 Mon Sep 17 00:00:00 2001 From: Brian Warner Date: Thu, 23 Oct 2008 16:00:31 -0700 Subject: [PATCH] deep-check: add webapi links to detailed per-file/dir results --- docs/webapi.txt | 15 +- src/allmydata/checker_results.py | 6 + src/allmydata/interfaces.py | 5 + src/allmydata/test/common.py | 24 ++++ src/allmydata/test/test_web.py | 3 + src/allmydata/web/checker-results.xhtml | 5 +- src/allmydata/web/checker_results.py | 157 +++++++++++++++------ src/allmydata/web/deep-check-results.xhtml | 2 + 8 files changed, 170 insertions(+), 47 deletions(-) diff --git a/docs/webapi.txt b/docs/webapi.txt index 14477399..b93babfa 100644 --- a/docs/webapi.txt +++ b/docs/webapi.txt @@ -214,6 +214,9 @@ GET /operations/$HANDLE?output=JSON (same) web browser to reload the status page about 60 seconds later. This tag will be removed once the operation has completed. + There may be more status information available under + /operations/$HANDLE/$ETC : i.e., the handle forms the root of a URL space. + POST /operations/$HANDLE?t=cancel This terminates the operation, and returns an HTML page explaining what was @@ -814,10 +817,14 @@ POST $URL?t=start-deep-check (must add &ophandle=XYZ) Since this operation can take a long time (perhaps a second per object), the ophandle= argument is required (see "Slow Operations, Progress, and Cancelling" above). The response to this POST will be a redirect to the - corresponding /operations/$HANDLE?t=status page (with output=HTML or - output=JSON to match the output= argument given to the POST). The - deep-check operation will continue to run in the background, and the - /operations page should be used to find out when the operation is done. + corresponding /operations/$HANDLE page (with output=HTML or output=JSON to + match the output= argument given to the POST). The deep-check operation + will continue to run in the background, and the /operations page should be + used to find out when the operation is done. + + Detailed checker results for non-healthy files and directories will be + available under /operations/$HANDLE/$STORAGEINDEX, and the HTML status will + contain links to these detailed results. The HTML /operations/$HANDLE page for incomplete operations will contain a meta-refresh tag, set to 60 seconds, so that a browser which uses diff --git a/src/allmydata/checker_results.py b/src/allmydata/checker_results.py index dd4e50c7..46da9a24 100644 --- a/src/allmydata/checker_results.py +++ b/src/allmydata/checker_results.py @@ -91,6 +91,7 @@ class DeepResultsBase: self.objects_unhealthy = 0 self.corrupt_shares = [] self.all_results = {} + self.all_results_by_storage_index = {} self.stats = {} def update_stats(self, new_stats): @@ -105,6 +106,9 @@ class DeepResultsBase: def get_all_results(self): return self.all_results + def get_results_for_storage_index(self, storage_index): + return self.all_results_by_storage_index[storage_index] + def get_stats(self): return self.stats @@ -123,6 +127,7 @@ class DeepCheckResults(DeepResultsBase): else: self.objects_unhealthy += 1 self.all_results[tuple(path)] = r + self.all_results_by_storage_index[r.get_storage_index()] = r self.corrupt_shares.extend(r.get_data()["list-corrupt-shares"]) def get_counters(self): @@ -171,6 +176,7 @@ class DeepCheckAndRepairResults(DeepResultsBase): else: self.objects_unhealthy_post_repair += 1 self.all_results[tuple(path)] = r + self.all_results_by_storage_index[r.get_storage_index()] = r self.corrupt_shares_post_repair.extend(post_repair.get_data()["list-corrupt-shares"]) def get_counters(self): diff --git a/src/allmydata/interfaces.py b/src/allmydata/interfaces.py index 9598f8ec..dca8ea3b 100644 --- a/src/allmydata/interfaces.py +++ b/src/allmydata/interfaces.py @@ -1649,6 +1649,11 @@ class IDeepCheckResults(Interface): be slash-joined) to an ICheckerResults instance, one for each object that was checked.""" + def get_results_for_storage_index(storage_index): + """Retrive the ICheckerResults instance for the given (binary) + storage index. Raises KeyError if there are no results for that + storage index.""" + def get_stats(): """Return a dictionary with the same keys as IDirectoryNode.deep_stats().""" diff --git a/src/allmydata/test/common.py b/src/allmydata/test/common.py index 0a74e8cc..b3659407 100644 --- a/src/allmydata/test/common.py +++ b/src/allmydata/test/common.py @@ -50,13 +50,26 @@ class FakeCHKFileNode: r = CheckerResults(self.storage_index) is_bad = self.bad_shares.get(self.storage_index, None) data = {} + data["count-shares-needed"] = 3 + data["count-shares-expected"] = 10 + data["count-good-share-hosts"] = 10 + data["count-wrong-shares"] = 0 + nodeid = "\x00"*20 + data["list-corrupt-shares"] = [] + data["sharemap"] = {1: [nodeid]} + data["count-recoverable-versions"] = 1 + data["count-unrecoverable-versions"] = 0 if is_bad: r.set_healthy(False) + data["count-shares-good"] = 9 + data["list-corrupt-shares"] = [(nodeid, self.storage_index, 0)] r.problems = failure.Failure(CorruptShareError(is_bad)) else: r.set_healthy(True) + data["count-shares-good"] = 10 r.problems = [] r.set_data(data) + r.set_needs_rebalancing(False) return defer.succeed(r) def check_and_repair(self, monitor, verify=False): d = self.check(verify) @@ -153,16 +166,27 @@ class FakeMutableFileNode: r = CheckerResults(self.storage_index) is_bad = self.bad_shares.get(self.storage_index, None) data = {} + data["count-shares-needed"] = 3 + data["count-shares-expected"] = 10 + data["count-good-share-hosts"] = 10 + data["count-wrong-shares"] = 0 data["list-corrupt-shares"] = [] + nodeid = "\x00"*20 + data["sharemap"] = {"seq1-abcd-sh0": [nodeid]} + data["count-recoverable-versions"] = 1 + data["count-unrecoverable-versions"] = 0 if is_bad: r.set_healthy(False) + data["count-shares-good"] = 9 r.problems = failure.Failure(CorruptShareError("peerid", 0, # shnum is_bad)) else: r.set_healthy(True) + data["count-shares-good"] = 10 r.problems = [] r.set_data(data) + r.set_needs_rebalancing(False) return defer.succeed(r) def check_and_repair(self, monitor, verify=False): diff --git a/src/allmydata/test/test_web.py b/src/allmydata/test/test_web.py index 0d9ac956..54b3b463 100644 --- a/src/allmydata/test/test_web.py +++ b/src/allmydata/test/test_web.py @@ -48,6 +48,9 @@ class FakeClient(service.MultiService): def connected_to_introducer(self): return False + def get_nickname_for_peerid(self, peerid): + return u"John Doe" + def create_node_from_uri(self, auri): u = uri.from_string(auri) if (INewDirectoryURI.providedBy(u) diff --git a/src/allmydata/web/checker-results.xhtml b/src/allmydata/web/checker-results.xhtml index c6eb79e9..a27df0e7 100644 --- a/src/allmydata/web/checker-results.xhtml +++ b/src/allmydata/web/checker-results.xhtml @@ -10,7 +10,10 @@

File Check Results for SI=

-
+
+ + +
diff --git a/src/allmydata/web/checker_results.py b/src/allmydata/web/checker_results.py index 5f2eb65c..4c7030c4 100644 --- a/src/allmydata/web/checker_results.py +++ b/src/allmydata/web/checker_results.py @@ -2,16 +2,71 @@ import time import simplejson from nevow import rend, inevow, tags as T -from twisted.web import html -from allmydata.web.common import getxmlfile, get_arg, IClient +from twisted.web import http, html +from allmydata.web.common import getxmlfile, get_arg, get_root, \ + IClient, WebError from allmydata.web.operations import ReloadMixin from allmydata.interfaces import ICheckAndRepairResults, ICheckerResults from allmydata.util import base32, idlib class ResultsBase: - def _render_results(self, cr): + def _render_results(self, ctx, cr): assert ICheckerResults(cr) - return T.pre["\n".join(self._html(cr.get_report()))] # TODO: more + c = IClient(ctx) + data = cr.get_data() + r = [] + def add(name, value): + r.append(T.li[name + ": ", value]) + + add("Report", T.pre["\n".join(self._html(cr.get_report()))]) + add("Share Counts", + "need %d-of-%d, have %d" % (data["count-shares-needed"], + data["count-shares-expected"], + data["count-shares-good"])) + add("Hosts with good shares", data["count-good-share-hosts"]) + + if data["list-corrupt-shares"]: + badsharemap = [] + for (serverid, si, shnum) in data["list-corrupt-shares"]: + nickname = c.get_nickname_for_peerid(serverid) + badsharemap.append(T.tr[T.td["sh#%d" % shnum], + T.td[T.tt[base32.b2a(serverid)], + " (", nickname, ")"], + ]) + add("Corrupt shares", T.table(border="1")[badsharemap]) + else: + add("Corrupt shares", "none") + + add("Wrong Shares", data["count-wrong-shares"]) + + sharemap = [] + servers = {} + for shareid in sorted(data["sharemap"].keys()): + serverids = data["sharemap"][shareid] + for i,serverid in enumerate(serverids): + servers[serverid] = servers.get(serverid,0) + 1 + shareid_s = "" + if i == 0: + shareid_s = shareid + nickname = c.get_nickname_for_peerid(serverid) + sharemap.append(T.tr[T.td[shareid_s], + T.td[T.tt[base32.b2a(serverid)], + " (", nickname, ")"], + ]) + add("Good Shares", T.table(border="1")[sharemap]) + + add("Recoverable Versions", data["count-recoverable-versions"]) + add("Unrecoverable Versions", data["count-unrecoverable-versions"]) + + servermap = [] + for serverid in sorted(servers.keys()): + nickname = c.get_nickname_for_peerid(serverid) + servermap.append(T.tr[T.td[T.tt[base32.b2a(serverid)], + " (", nickname, ")"], + T.td["*" * servers[serverid]] ]) + add("Share Balancing", T.table(border="1")[servermap]) + + return T.ul[r] def _json_check_and_repair_results(self, r): data = {} @@ -69,6 +124,17 @@ class ResultsBase: return True return False + def _render_si_link(self, ctx, storage_index): + si_s = base32.b2a(storage_index) + root = get_root(ctx) + req = inevow.IRequest(ctx) + ophandle = req.prepath[-1] + target = "%s/operations/%s/%s" % (get_root(ctx), ophandle, si_s) + output = get_arg(ctx, "output") + if output: + target = target + "?output=%s" % output + return T.a(href=target)[si_s] + class LiteralCheckerResults(rend.Page, ResultsBase): docFactory = getxmlfile("literal-checker-results.xhtml") @@ -84,65 +150,64 @@ class LiteralCheckerResults(rend.Page, ResultsBase): } return simplejson.dumps(data, indent=1) + "\n" -class CheckerResults(rend.Page, ResultsBase): - docFactory = getxmlfile("checker-results.xhtml") - - def __init__(self, results): - self.r = ICheckerResults(results) +class CheckerBase: def renderHTTP(self, ctx): if self.want_json(ctx): return self.json(ctx) return rend.Page.renderHTTP(self, ctx) + def render_storage_index(self, ctx, data): + return self.r.get_storage_index_string() + + def render_return(self, ctx, data): + req = inevow.IRequest(ctx) + return_to = get_arg(req, "return_to", None) + if return_to: + return T.div[T.a(href=return_to)["Return to parent directory"]] + return "" + +class CheckerResults(CheckerBase, rend.Page, ResultsBase): + docFactory = getxmlfile("checker-results.xhtml") + + def __init__(self, results): + self.r = ICheckerResults(results) + def json(self, ctx): inevow.IRequest(ctx).setHeader("content-type", "text/plain") data = self._json_check_results(self.r) return simplejson.dumps(data, indent=1) + "\n" - def render_storage_index(self, ctx, data): - return self.r.get_storage_index_string() - def render_healthy(self, ctx, data): if self.r.is_healthy(): return ctx.tag["Healthy!"] - return ctx.tag["Not Healthy!:", self._html(self.r.get_summary())] + return ctx.tag["Not Healthy!: ", self._html(self.r.get_summary())] + + def render_rebalance(self, ctx, data): + if self.r.needs_rebalancing(): + return ctx.tag["(needs rebalancing)"] + return ctx.tag["(does not need rebalancing)"] def render_results(self, ctx, data): - cr = self._render_results(self.r) + cr = self._render_results(ctx, self.r) return ctx.tag[cr] - def render_return(self, ctx, data): - req = inevow.IRequest(ctx) - return_to = get_arg(req, "return_to", None) - if return_to: - return T.div[T.a(href=return_to)["Return to parent directory"]] - return "" - -class CheckAndRepairResults(rend.Page, ResultsBase): +class CheckAndRepairResults(CheckerBase, rend.Page, ResultsBase): docFactory = getxmlfile("check-and-repair-results.xhtml") def __init__(self, results): self.r = ICheckAndRepairResults(results) - def renderHTTP(self, ctx): - if self.want_json(ctx): - return self.json(ctx) - return rend.Page.renderHTTP(self, ctx) - def json(self, ctx): inevow.IRequest(ctx).setHeader("content-type", "text/plain") data = self._json_check_and_repair_results(self.r) return simplejson.dumps(data, indent=1) + "\n" - def render_storage_index(self, ctx, data): - return self.r.get_storage_index_string() - def render_healthy(self, ctx, data): cr = self.r.get_post_repair_results() if cr.is_healthy(): return ctx.tag["Healthy!"] - return ctx.tag["Not Healthy!:", self._html(cr.get_summary())] + return ctx.tag["Not Healthy!: ", self._html(cr.get_summary())] def render_repair_results(self, ctx, data): if self.r.get_repair_attempted(): @@ -153,30 +218,36 @@ class CheckAndRepairResults(rend.Page, ResultsBase): return ctx.tag["No repair necessary"] def render_post_repair_results(self, ctx, data): - cr = self._render_results(self.r.get_post_repair_results()) + cr = self._render_results(ctx, self.r.get_post_repair_results()) return ctx.tag[cr] def render_maybe_pre_repair_results(self, ctx, data): if self.r.get_repair_attempted(): - cr = self._render_results(self.r.get_pre_repair_results()) + cr = self._render_results(ctx, self.r.get_pre_repair_results()) return ctx.tag[T.div["Pre-Repair Checker Results:"], cr] return "" - def render_return(self, ctx, data): - req = inevow.IRequest(ctx) - return_to = get_arg(req, "return_to", None) - if return_to: - return T.div[T.a(href=return_to)["Return to parent directory"]] - return "" class DeepCheckResults(rend.Page, ResultsBase, ReloadMixin): docFactory = getxmlfile("deep-check-results.xhtml") def __init__(self, monitor): - #assert IDeepCheckResults(results) - #self.r = results self.monitor = monitor + def childFactory(self, ctx, name): + if not name: + return self + # /operation/$OPHANDLE/$STORAGEINDEX provides detailed information + # about a specific file or directory that was checked + si = base32.a2b(name) + r = self.monitor.get_status() + try: + return CheckerResults(r.get_results_for_storage_index(si)) + except KeyError: + raise WebError("No detailed results for SI %s" % html.escape(name), + http.NOT_FOUND) + return rend.Page.childFactory(self, ctx, name) + def renderHTTP(self, ctx): if self.want_json(ctx): return self.json(ctx) @@ -276,7 +347,7 @@ class DeepCheckResults(rend.Page, ResultsBase, ReloadMixin): ctx.fillSlots("serverid", idlib.shortnodeid_b2a(serverid)) if nickname: ctx.fillSlots("nickname", self._html(nickname)) - ctx.fillSlots("si", base32.b2a(storage_index)) + ctx.fillSlots("si", self._render_si_link(ctx, storage_index)) ctx.fillSlots("shnum", str(sharenum)) return ctx.tag @@ -296,6 +367,8 @@ class DeepCheckResults(rend.Page, ResultsBase, ReloadMixin): path, r = data ctx.fillSlots("path", "/".join(self._html(path))) ctx.fillSlots("healthy", str(r.is_healthy())) + storage_index = r.get_storage_index() + ctx.fillSlots("storage_index", self._render_si_link(ctx, storage_index)) ctx.fillSlots("summary", self._html(r.get_summary())) return ctx.tag diff --git a/src/allmydata/web/deep-check-results.xhtml b/src/allmydata/web/deep-check-results.xhtml index a15be3f5..9ac8fb55 100644 --- a/src/allmydata/web/deep-check-results.xhtml +++ b/src/allmydata/web/deep-check-results.xhtml @@ -67,11 +67,13 @@ Relative Path Healthy + Storage Index Summary + -- 2.45.2