]> git.rkrishnan.org Git - tahoe-lafs/tahoe-lafs.git/commitdiff
deep-check: add webapi links to detailed per-file/dir results
authorBrian Warner <warner@lothar.com>
Thu, 23 Oct 2008 23:00:31 +0000 (16:00 -0700)
committerBrian Warner <warner@lothar.com>
Thu, 23 Oct 2008 23:00:31 +0000 (16:00 -0700)
docs/webapi.txt
src/allmydata/checker_results.py
src/allmydata/interfaces.py
src/allmydata/test/common.py
src/allmydata/test/test_web.py
src/allmydata/web/checker-results.xhtml
src/allmydata/web/checker_results.py
src/allmydata/web/deep-check-results.xhtml

index 14477399ce74bb5c1d6449c345be1f34a8469ab9..b93babfa02af957c9fe489ddd5bb53fb3e81beb6 100644 (file)
@@ -214,6 +214,9 @@ GET /operations/$HANDLE?output=JSON   (same)
  web browser to reload the status page about 60 seconds later. This tag will
  be removed once the operation has completed.
 
+ There may be more status information available under
+ /operations/$HANDLE/$ETC : i.e., the handle forms the root of a URL space.
+
 POST /operations/$HANDLE?t=cancel
 
  This terminates the operation, and returns an HTML page explaining what was
@@ -814,10 +817,14 @@ POST $URL?t=start-deep-check    (must add &ophandle=XYZ)
   Since this operation can take a long time (perhaps a second per object),
   the ophandle= argument is required (see "Slow Operations, Progress, and
   Cancelling" above). The response to this POST will be a redirect to the
-  corresponding /operations/$HANDLE?t=status page (with output=HTML or
-  output=JSON to match the output= argument given to the POST). The
-  deep-check operation will continue to run in the background, and the
-  /operations page should be used to find out when the operation is done.
+  corresponding /operations/$HANDLE page (with output=HTML or output=JSON to
+  match the output= argument given to the POST). The deep-check operation
+  will continue to run in the background, and the /operations page should be
+  used to find out when the operation is done.
+
+  Detailed checker results for non-healthy files and directories will be
+  available under /operations/$HANDLE/$STORAGEINDEX, and the HTML status will
+  contain links to these detailed results.
 
   The HTML /operations/$HANDLE page for incomplete operations will contain a
   meta-refresh tag, set to 60 seconds, so that a browser which uses
index dd4e50c702f2d956786b7564448bdd05f2e1c616..46da9a24858a0d852fa56fc1f0d7552810cda2ff 100644 (file)
@@ -91,6 +91,7 @@ class DeepResultsBase:
         self.objects_unhealthy = 0
         self.corrupt_shares = []
         self.all_results = {}
+        self.all_results_by_storage_index = {}
         self.stats = {}
 
     def update_stats(self, new_stats):
@@ -105,6 +106,9 @@ class DeepResultsBase:
     def get_all_results(self):
         return self.all_results
 
+    def get_results_for_storage_index(self, storage_index):
+        return self.all_results_by_storage_index[storage_index]
+
     def get_stats(self):
         return self.stats
 
@@ -123,6 +127,7 @@ class DeepCheckResults(DeepResultsBase):
         else:
             self.objects_unhealthy += 1
         self.all_results[tuple(path)] = r
+        self.all_results_by_storage_index[r.get_storage_index()] = r
         self.corrupt_shares.extend(r.get_data()["list-corrupt-shares"])
 
     def get_counters(self):
@@ -171,6 +176,7 @@ class DeepCheckAndRepairResults(DeepResultsBase):
         else:
             self.objects_unhealthy_post_repair += 1
         self.all_results[tuple(path)] = r
+        self.all_results_by_storage_index[r.get_storage_index()] = r
         self.corrupt_shares_post_repair.extend(post_repair.get_data()["list-corrupt-shares"])
 
     def get_counters(self):
index 9598f8ec9494484f342d55dcf7cf90477e336370..dca8ea3b26c9dcf9aa50a8abf71fb3dc972eb1c4 100644 (file)
@@ -1649,6 +1649,11 @@ class IDeepCheckResults(Interface):
         be slash-joined) to an ICheckerResults instance, one for each object
         that was checked."""
 
+    def get_results_for_storage_index(storage_index):
+        """Retrive the ICheckerResults instance for the given (binary)
+        storage index. Raises KeyError if there are no results for that
+        storage index."""
+
     def get_stats():
         """Return a dictionary with the same keys as
         IDirectoryNode.deep_stats()."""
index 0a74e8cc4f7c95a333cb3f6cb20ad22e9c2e4181..b36594072a8eb911631a9ef22cec122a1d0c855a 100644 (file)
@@ -50,13 +50,26 @@ class FakeCHKFileNode:
         r = CheckerResults(self.storage_index)
         is_bad = self.bad_shares.get(self.storage_index, None)
         data = {}
+        data["count-shares-needed"] = 3
+        data["count-shares-expected"] = 10
+        data["count-good-share-hosts"] = 10
+        data["count-wrong-shares"] = 0
+        nodeid = "\x00"*20
+        data["list-corrupt-shares"] = []
+        data["sharemap"] = {1: [nodeid]}
+        data["count-recoverable-versions"] = 1
+        data["count-unrecoverable-versions"] = 0
         if is_bad:
              r.set_healthy(False)
+             data["count-shares-good"] = 9
+             data["list-corrupt-shares"] = [(nodeid, self.storage_index, 0)]
              r.problems = failure.Failure(CorruptShareError(is_bad))
         else:
              r.set_healthy(True)
+             data["count-shares-good"] = 10
              r.problems = []
         r.set_data(data)
+        r.set_needs_rebalancing(False)
         return defer.succeed(r)
     def check_and_repair(self, monitor, verify=False):
         d = self.check(verify)
@@ -153,16 +166,27 @@ class FakeMutableFileNode:
         r = CheckerResults(self.storage_index)
         is_bad = self.bad_shares.get(self.storage_index, None)
         data = {}
+        data["count-shares-needed"] = 3
+        data["count-shares-expected"] = 10
+        data["count-good-share-hosts"] = 10
+        data["count-wrong-shares"] = 0
         data["list-corrupt-shares"] = []
+        nodeid = "\x00"*20
+        data["sharemap"] = {"seq1-abcd-sh0": [nodeid]}
+        data["count-recoverable-versions"] = 1
+        data["count-unrecoverable-versions"] = 0
         if is_bad:
              r.set_healthy(False)
+             data["count-shares-good"] = 9
              r.problems = failure.Failure(CorruptShareError("peerid",
                                                             0, # shnum
                                                             is_bad))
         else:
              r.set_healthy(True)
+             data["count-shares-good"] = 10
              r.problems = []
         r.set_data(data)
+        r.set_needs_rebalancing(False)
         return defer.succeed(r)
 
     def check_and_repair(self, monitor, verify=False):
index 0d9ac95683a53d4d9dea16bf07f23d4b4adbe6bd..54b3b463f49bba1ae541b0120a5e37dc56dc8868 100644 (file)
@@ -48,6 +48,9 @@ class FakeClient(service.MultiService):
     def connected_to_introducer(self):
         return False
 
+    def get_nickname_for_peerid(self, peerid):
+        return u"John Doe"
+
     def create_node_from_uri(self, auri):
         u = uri.from_string(auri)
         if (INewDirectoryURI.providedBy(u)
index c6eb79e978d3d6ac269f314c1408fe0beb4bf0dd..a27df0e73fbbd7a2a384f1527f482091d616c559 100644 (file)
 
 <h1>File Check Results for SI=<span n:render="storage_index" /></h1>
 
-<div n:render="healthy" />
+<div>
+  <span n:render="healthy" />
+  <span n:render="rebalance" />
+</div>
 
 <div n:render="results" />
 
index 5f2eb65c78efb6ddffd0af819cddf0885d4e8073..4c7030c45c695ed41f9dbb0fa7d67e633f28d53a 100644 (file)
@@ -2,16 +2,71 @@
 import time
 import simplejson
 from nevow import rend, inevow, tags as T
-from twisted.web import html
-from allmydata.web.common import getxmlfile, get_arg, IClient
+from twisted.web import http, html
+from allmydata.web.common import getxmlfile, get_arg, get_root, \
+     IClient, WebError
 from allmydata.web.operations import ReloadMixin
 from allmydata.interfaces import ICheckAndRepairResults, ICheckerResults
 from allmydata.util import base32, idlib
 
 class ResultsBase:
-    def _render_results(self, cr):
+    def _render_results(self, ctx, cr):
         assert ICheckerResults(cr)
-        return T.pre["\n".join(self._html(cr.get_report()))] # TODO: more
+        c = IClient(ctx)
+        data = cr.get_data()
+        r = []
+        def add(name, value):
+            r.append(T.li[name + ": ", value])
+
+        add("Report", T.pre["\n".join(self._html(cr.get_report()))])
+        add("Share Counts",
+            "need %d-of-%d, have %d" % (data["count-shares-needed"],
+                                        data["count-shares-expected"],
+                                        data["count-shares-good"]))
+        add("Hosts with good shares", data["count-good-share-hosts"])
+
+        if data["list-corrupt-shares"]:
+            badsharemap = []
+            for (serverid, si, shnum) in data["list-corrupt-shares"]:
+                nickname = c.get_nickname_for_peerid(serverid)
+                badsharemap.append(T.tr[T.td["sh#%d" % shnum],
+                                        T.td[T.tt[base32.b2a(serverid)],
+                                             " (", nickname, ")"],
+                                        ])
+            add("Corrupt shares", T.table(border="1")[badsharemap])
+        else:
+            add("Corrupt shares", "none")
+
+        add("Wrong Shares", data["count-wrong-shares"])
+
+        sharemap = []
+        servers = {}
+        for shareid in sorted(data["sharemap"].keys()):
+            serverids = data["sharemap"][shareid]
+            for i,serverid in enumerate(serverids):
+                servers[serverid] = servers.get(serverid,0) + 1
+                shareid_s = ""
+                if i == 0:
+                    shareid_s = shareid
+                nickname = c.get_nickname_for_peerid(serverid)
+                sharemap.append(T.tr[T.td[shareid_s],
+                                     T.td[T.tt[base32.b2a(serverid)],
+                                          " (", nickname, ")"],
+                                     ])
+        add("Good Shares", T.table(border="1")[sharemap])
+
+        add("Recoverable Versions", data["count-recoverable-versions"])
+        add("Unrecoverable Versions", data["count-unrecoverable-versions"])
+
+        servermap = []
+        for serverid in sorted(servers.keys()):
+            nickname = c.get_nickname_for_peerid(serverid)
+            servermap.append(T.tr[T.td[T.tt[base32.b2a(serverid)],
+                                       " (", nickname, ")"],
+                                  T.td["*" * servers[serverid]] ])
+        add("Share Balancing", T.table(border="1")[servermap])
+
+        return T.ul[r]
 
     def _json_check_and_repair_results(self, r):
         data = {}
@@ -69,6 +124,17 @@ class ResultsBase:
             return True
         return False
 
+    def _render_si_link(self, ctx, storage_index):
+        si_s = base32.b2a(storage_index)
+        root = get_root(ctx)
+        req = inevow.IRequest(ctx)
+        ophandle = req.prepath[-1]
+        target = "%s/operations/%s/%s" % (get_root(ctx), ophandle, si_s)
+        output = get_arg(ctx, "output")
+        if output:
+            target = target + "?output=%s" % output
+        return T.a(href=target)[si_s]
+
 class LiteralCheckerResults(rend.Page, ResultsBase):
     docFactory = getxmlfile("literal-checker-results.xhtml")
 
@@ -84,65 +150,64 @@ class LiteralCheckerResults(rend.Page, ResultsBase):
                 }
         return simplejson.dumps(data, indent=1) + "\n"
 
-class CheckerResults(rend.Page, ResultsBase):
-    docFactory = getxmlfile("checker-results.xhtml")
-
-    def __init__(self, results):
-        self.r = ICheckerResults(results)
+class CheckerBase:
 
     def renderHTTP(self, ctx):
         if self.want_json(ctx):
             return self.json(ctx)
         return rend.Page.renderHTTP(self, ctx)
 
+    def render_storage_index(self, ctx, data):
+        return self.r.get_storage_index_string()
+
+    def render_return(self, ctx, data):
+        req = inevow.IRequest(ctx)
+        return_to = get_arg(req, "return_to", None)
+        if return_to:
+            return T.div[T.a(href=return_to)["Return to parent directory"]]
+        return ""
+
+class CheckerResults(CheckerBase, rend.Page, ResultsBase):
+    docFactory = getxmlfile("checker-results.xhtml")
+
+    def __init__(self, results):
+        self.r = ICheckerResults(results)
+
     def json(self, ctx):
         inevow.IRequest(ctx).setHeader("content-type", "text/plain")
         data = self._json_check_results(self.r)
         return simplejson.dumps(data, indent=1) + "\n"
 
-    def render_storage_index(self, ctx, data):
-        return self.r.get_storage_index_string()
-
     def render_healthy(self, ctx, data):
         if self.r.is_healthy():
             return ctx.tag["Healthy!"]
-        return ctx.tag["Not Healthy!:", self._html(self.r.get_summary())]
+        return ctx.tag["Not Healthy!: ", self._html(self.r.get_summary())]
+
+    def render_rebalance(self, ctx, data):
+        if self.r.needs_rebalancing():
+            return ctx.tag["(needs rebalancing)"]
+        return ctx.tag["(does not need rebalancing)"]
 
     def render_results(self, ctx, data):
-        cr = self._render_results(self.r)
+        cr = self._render_results(ctx, self.r)
         return ctx.tag[cr]
 
-    def render_return(self, ctx, data):
-        req = inevow.IRequest(ctx)
-        return_to = get_arg(req, "return_to", None)
-        if return_to:
-            return T.div[T.a(href=return_to)["Return to parent directory"]]
-        return ""
-
-class CheckAndRepairResults(rend.Page, ResultsBase):
+class CheckAndRepairResults(CheckerBase, rend.Page, ResultsBase):
     docFactory = getxmlfile("check-and-repair-results.xhtml")
 
     def __init__(self, results):
         self.r = ICheckAndRepairResults(results)
 
-    def renderHTTP(self, ctx):
-        if self.want_json(ctx):
-            return self.json(ctx)
-        return rend.Page.renderHTTP(self, ctx)
-
     def json(self, ctx):
         inevow.IRequest(ctx).setHeader("content-type", "text/plain")
         data = self._json_check_and_repair_results(self.r)
         return simplejson.dumps(data, indent=1) + "\n"
 
-    def render_storage_index(self, ctx, data):
-        return self.r.get_storage_index_string()
-
     def render_healthy(self, ctx, data):
         cr = self.r.get_post_repair_results()
         if cr.is_healthy():
             return ctx.tag["Healthy!"]
-        return ctx.tag["Not Healthy!:", self._html(cr.get_summary())]
+        return ctx.tag["Not Healthy!: ", self._html(cr.get_summary())]
 
     def render_repair_results(self, ctx, data):
         if self.r.get_repair_attempted():
@@ -153,30 +218,36 @@ class CheckAndRepairResults(rend.Page, ResultsBase):
         return ctx.tag["No repair necessary"]
 
     def render_post_repair_results(self, ctx, data):
-        cr = self._render_results(self.r.get_post_repair_results())
+        cr = self._render_results(ctx, self.r.get_post_repair_results())
         return ctx.tag[cr]
 
     def render_maybe_pre_repair_results(self, ctx, data):
         if self.r.get_repair_attempted():
-            cr = self._render_results(self.r.get_pre_repair_results())
+            cr = self._render_results(ctx, self.r.get_pre_repair_results())
             return ctx.tag[T.div["Pre-Repair Checker Results:"], cr]
         return ""
 
-    def render_return(self, ctx, data):
-        req = inevow.IRequest(ctx)
-        return_to = get_arg(req, "return_to", None)
-        if return_to:
-            return T.div[T.a(href=return_to)["Return to parent directory"]]
-        return ""
 
 class DeepCheckResults(rend.Page, ResultsBase, ReloadMixin):
     docFactory = getxmlfile("deep-check-results.xhtml")
 
     def __init__(self, monitor):
-        #assert IDeepCheckResults(results)
-        #self.r = results
         self.monitor = monitor
 
+    def childFactory(self, ctx, name):
+        if not name:
+            return self
+        # /operation/$OPHANDLE/$STORAGEINDEX provides detailed information
+        # about a specific file or directory that was checked
+        si = base32.a2b(name)
+        r = self.monitor.get_status()
+        try:
+            return CheckerResults(r.get_results_for_storage_index(si))
+        except KeyError:
+            raise WebError("No detailed results for SI %s" % html.escape(name),
+                           http.NOT_FOUND)
+        return rend.Page.childFactory(self, ctx, name)
+
     def renderHTTP(self, ctx):
         if self.want_json(ctx):
             return self.json(ctx)
@@ -276,7 +347,7 @@ class DeepCheckResults(rend.Page, ResultsBase, ReloadMixin):
         ctx.fillSlots("serverid", idlib.shortnodeid_b2a(serverid))
         if nickname:
             ctx.fillSlots("nickname", self._html(nickname))
-        ctx.fillSlots("si", base32.b2a(storage_index))
+        ctx.fillSlots("si", self._render_si_link(ctx, storage_index))
         ctx.fillSlots("shnum", str(sharenum))
         return ctx.tag
 
@@ -296,6 +367,8 @@ class DeepCheckResults(rend.Page, ResultsBase, ReloadMixin):
         path, r = data
         ctx.fillSlots("path", "/".join(self._html(path)))
         ctx.fillSlots("healthy", str(r.is_healthy()))
+        storage_index = r.get_storage_index()
+        ctx.fillSlots("storage_index", self._render_si_link(ctx, storage_index))
         ctx.fillSlots("summary", self._html(r.get_summary()))
         return ctx.tag
 
index a15be3f5e4f7b769dca85bfb74902048b15759b6..9ac8fb55518c125087aeda74ebf2ea7e037bb242 100644 (file)
   <tr n:pattern="header">
     <td>Relative Path</td>
     <td>Healthy</td>
+    <td>Storage Index</td>
     <td>Summary</td>
   </tr>
   <tr n:pattern="item" n:render="object">
     <td><n:slot name="path"/></td>
     <td><n:slot name="healthy"/></td>
+    <td><tt><n:slot name="storage_index"/></tt></td>
     <td><n:slot name="summary"/></td>
   </tr>
 </table>