]> git.rkrishnan.org Git - tahoe-lafs/tahoe-lafs.git/commitdiff
helper stats: fix the /helper_status page, the recent conflict merging missed some...
authorBrian Warner <warner@allmydata.com>
Mon, 14 Apr 2008 20:18:53 +0000 (13:18 -0700)
committerBrian Warner <warner@allmydata.com>
Mon, 14 Apr 2008 20:18:53 +0000 (13:18 -0700)
misc/munin/tahoe-helperstats-active.py
misc/munin/tahoe-helperstats-fetched.py
src/allmydata/offloaded.py
src/allmydata/test/test_system.py
src/allmydata/web/status.py

index 874fddf4e8c804d8ca354d3f378931294796d34a..472c8c42dad9314804f74bd14351df7f6f58d9fb 100644 (file)
@@ -21,5 +21,5 @@ if len(sys.argv) > 1:
 url = os.environ["url"]
 
 data = simplejson.loads(urllib.urlopen(url).read())
-print "fetched.value %d" % data["CHK_active_uploads"]
+print "fetched.value %d" % data["chk_upload_helper.active_uploads"]
 
index 9dcbea5d71e6d2da2b3f1abca457e9e77826cf7e..c64101dfeb6c7f772c2f9dcc0c24e8aee37c0952 100644 (file)
@@ -23,4 +23,4 @@ if len(sys.argv) > 1:
 url = os.environ["url"]
 
 data = simplejson.loads(urllib.urlopen(url).read())
-print "fetched.value %d" % data["CHK_fetched_bytes"]
+print "fetched.value %d" % data["chk_upload_helper.fetched_bytes"]
index b6f2041e7622d7f841e19f891957e73bd1fb1b85..dd6acaece687970eb1f279d777802ec966596ee0 100644 (file)
@@ -377,9 +377,7 @@ class CHKCiphertextFetcher(AskUntilSuccessMixin):
                 self._f.write(data)
                 self._have += len(data)
                 self._ciphertext_fetched += len(data)
-                stats_provider = self._upload_helper._helper.stats_provider
-                if stats_provider:
-                    stats_provider.count("chk_upload_helper.fetched_bytes", len(data))
+                self._upload_helper._helper.count("chk_upload_helper.fetched_bytes", len(data))
             return False # not done
         d.addCallback(_got_data)
         return d
@@ -479,6 +477,12 @@ class Helper(Referenceable, service.MultiService):
         self.stats_provider = stats_provider
         if stats_provider:
             stats_provider.register_producer(self)
+        self._counters = {"chk_upload_helper.upload_requests": 0,
+                          "chk_upload_helper.upload_already_present": 0,
+                          "chk_upload_helper.upload_need_upload": 0,
+                          "chk_upload_helper.fetched_bytes": 0,
+                          "chk_upload_helper.encoded_bytes": 0,
+                          }
         service.MultiService.__init__(self)
 
     def setServiceParent(self, parent):
@@ -489,6 +493,11 @@ class Helper(Referenceable, service.MultiService):
             kwargs['facility'] = "tahoe.helper"
         return self.parent.log(*args, **kwargs)
 
+    def count(self, key, value=1):
+        if self.stats_provider:
+            self.stats_provider.count(key, value)
+        self._counters[key] += value
+
     def get_stats(self):
         OLD = 86400*2 # 48hours
         now = time.time()
@@ -512,18 +521,19 @@ class Helper(Referenceable, service.MultiService):
             enc_size += size
             if now - mtime > OLD:
                 enc_size_old += size
-        return { 'chk_upload_helper.active_uploads': len(self._active_uploads),
-                 'chk_upload_helper.incoming_count': inc_count,
-                 'chk_upload_helper.incoming_size': inc_size,
-                 'chk_upload_helper.incoming_size_old': inc_size_old,
-                 'chk_upload_helper.encoding_count': enc_count,
-                 'chk_upload_helper.encoding_size': enc_size,
-                 'chk_upload_helper.encoding_size_old': enc_size_old,
-               }
+        stats = { 'chk_upload_helper.active_uploads': len(self._active_uploads),
+                  'chk_upload_helper.incoming_count': inc_count,
+                  'chk_upload_helper.incoming_size': inc_size,
+                  'chk_upload_helper.incoming_size_old': inc_size_old,
+                  'chk_upload_helper.encoding_count': enc_count,
+                  'chk_upload_helper.encoding_size': enc_size,
+                  'chk_upload_helper.encoding_size_old': enc_size_old,
+                  }
+        stats.update(self._counters)
+        return stats
 
     def remote_upload_chk(self, storage_index):
-        if self.stats_provider:
-            self.stats_provider.count("chk_upload_helper.upload_requests")
+        self.count("chk_upload_helper.upload_requests")
         r = upload.UploadResults()
         started = time.time()
         si_s = storage.si_b2a(storage_index)
@@ -541,13 +551,11 @@ class Helper(Referenceable, service.MultiService):
             r.timings['existence_check'] = elapsed
             if already_present:
                 # the necessary results are placed in the UploadResults
-                if self.stats_provider:
-                    self.stats_provider.count("chk_upload_helper.upload_already_present")
+                self.count("chk_upload_helper.upload_already_present")
                 self.log("file already found in grid", parent=lp)
                 return (r, None)
 
-            if self.stats_provider:
-                self.stats_provider.count("chk_upload_helper.upload_need_upload")
+            self.count("chk_upload_helper.upload_need_upload")
             # the file is not present in the grid, by which we mean there are
             # less than 'N' shares available.
             self.log("unable to find file in the grid", parent=lp,
@@ -598,6 +606,5 @@ class Helper(Referenceable, service.MultiService):
         return d
 
     def upload_finished(self, storage_index, size):
-        if self.stats_provider:
-            self.stats_provider.count("chk_upload_helper.encoded_bytes", size)
+        self.count("chk_upload_helper.encoded_bytes", size)
         del self._active_uploads[storage_index]
index e19c57d6d035a745605e74a9540c5dc35f22b8d0..c191671bcb476cac1239854bbdc41dc85fa0673b 100644 (file)
@@ -1306,6 +1306,55 @@ class SystemTest(testutil.SignalMixin, testutil.PollMixin, unittest.TestCase):
             return self.GET("status/retrieve-%d" % self._retrieve_status)
         d.addCallback(_got_publish)
 
+        # check that the helper status page exists
+        d.addCallback(lambda res:
+                      self.GET("helper_status", followRedirect=True))
+        def _got_helper_status(res):
+            self.failUnless("Bytes Fetched:" in res)
+            # touch a couple of files in the helper's working directory to
+            # exercise more code paths
+            workdir = os.path.join(self.getdir("client0"), "helper")
+            incfile = os.path.join(workdir, "CHK_incoming", "spurious")
+            f = open(incfile, "wb")
+            f.write("small file")
+            f.close()
+            then = time.time() - 86400*3
+            now = time.time()
+            os.utime(incfile, (now, then))
+            encfile = os.path.join(workdir, "CHK_encoding", "spurious")
+            f = open(encfile, "wb")
+            f.write("less small file")
+            f.close()
+            os.utime(encfile, (now, then))
+        d.addCallback(_got_helper_status)
+        # and that the json form exists
+        d.addCallback(lambda res:
+                      self.GET("helper_status?t=json", followRedirect=True))
+        def _got_helper_status_json(res):
+            data = simplejson.loads(res)
+            self.failUnlessEqual(data["chk_upload_helper.upload_need_upload"],
+                                 1)
+            self.failUnlessEqual(data["chk_upload_helper.incoming_count"], 1)
+            self.failUnlessEqual(data["chk_upload_helper.incoming_size"], 10)
+            self.failUnlessEqual(data["chk_upload_helper.incoming_size_old"],
+                                 10)
+            self.failUnlessEqual(data["chk_upload_helper.encoding_count"], 1)
+            self.failUnlessEqual(data["chk_upload_helper.encoding_size"], 15)
+            self.failUnlessEqual(data["chk_upload_helper.encoding_size_old"],
+                                 15)
+        d.addCallback(_got_helper_status_json)
+
+        # and check that client[3] (which uses a helper but does not run one
+        # itself) doesn't explode when you ask for its helper status with
+        # t=json
+        d.addCallback(lambda res:
+                      getPage(self.helper_webish_url + "helper_status?t=json"))
+        def _got_non_helper_status_json(res):
+            data = simplejson.loads(res)
+            self.failUnlessEqual(data, {})
+        d.addCallback(_got_non_helper_status_json)
+
+
         # TODO: mangle the second segment of a file, to test errors that
         # occur after we've already sent some good data, which uses a
         # different error path.
index 17810d9d9bb3b927c4eb912b06fbc4e81a8648a3..49518f98f42afa09a0f700b5569985bee145adae 100644 (file)
@@ -770,7 +770,7 @@ class HelperStatus(rend.Page):
             return self.render_JSON(ctx)
         # is there a better way to provide 'data' to all rendering methods?
         helper = IClient(ctx).getServiceNamed("helper")
-        self.original = helper.get_stats()["helper"]
+        self.original = helper.get_stats()
         return rend.Page.renderHTTP(self, ctx)
 
     def render_JSON(self, ctx):
@@ -779,32 +779,32 @@ class HelperStatus(rend.Page):
         except KeyError:
             return simplejson.dumps({})
 
-        stats = h.get_stats()["helper"]
+        stats = h.get_stats()
         return simplejson.dumps(stats, indent=1)
 
     def render_active_uploads(self, ctx, data):
-        return data["CHK_active_uploads"]
+        return data["chk_upload_helper.active_uploads"]
 
     def render_incoming(self, ctx, data):
-        return "%d bytes in %d files" % (data["CHK_incoming_size"],
-                                         data["CHK_incoming_files"])
+        return "%d bytes in %d files" % (data["chk_upload_helper.incoming_size"],
+                                         data["chk_upload_helper.incoming_count"])
 
     def render_encoding(self, ctx, data):
-        return "%d bytes in %d files" % (data["CHK_encoding_size"],
-                                         data["CHK_encoding_files"])
+        return "%d bytes in %d files" % (data["chk_upload_helper.encoding_size"],
+                                         data["chk_upload_helper.encoding_count"])
 
     def render_upload_requests(self, ctx, data):
-        return str(data["CHK_upload_requests"])
+        return str(data["chk_upload_helper.upload_requests"])
 
     def render_upload_already_present(self, ctx, data):
-        return str(data["CHK_upload_already_present"])
+        return str(data["chk_upload_helper.upload_already_present"])
 
     def render_upload_need_upload(self, ctx, data):
-        return str(data["CHK_upload_need_upload"])
+        return str(data["chk_upload_helper.upload_need_upload"])
 
     def render_upload_bytes_fetched(self, ctx, data):
-        return str(data["CHK_fetched_bytes"])
+        return str(data["chk_upload_helper.fetched_bytes"])
 
     def render_upload_bytes_encoded(self, ctx, data):
-        return str(data["CHK_encoded_bytes"])
+        return str(data["chk_upload_helper.encoded_bytes"])