From: Brian Warner Date: Mon, 14 Apr 2008 20:18:53 +0000 (-0700) Subject: helper stats: fix the /helper_status page, the recent conflict merging missed some... X-Git-Url: https://git.rkrishnan.org/%5B/%5D%20/rgr-080307.php?a=commitdiff_plain;h=500934b72fb0c48096cb33ee7e03053c6bfcf167;p=tahoe-lafs%2Ftahoe-lafs.git helper stats: fix the /helper_status page, the recent conflict merging missed some uses. Added tests, updated the munin plugins to match --- diff --git a/misc/munin/tahoe-helperstats-active.py b/misc/munin/tahoe-helperstats-active.py index 874fddf4..472c8c42 100644 --- a/misc/munin/tahoe-helperstats-active.py +++ b/misc/munin/tahoe-helperstats-active.py @@ -21,5 +21,5 @@ if len(sys.argv) > 1: url = os.environ["url"] data = simplejson.loads(urllib.urlopen(url).read()) -print "fetched.value %d" % data["CHK_active_uploads"] +print "fetched.value %d" % data["chk_upload_helper.active_uploads"] diff --git a/misc/munin/tahoe-helperstats-fetched.py b/misc/munin/tahoe-helperstats-fetched.py index 9dcbea5d..c64101df 100644 --- a/misc/munin/tahoe-helperstats-fetched.py +++ b/misc/munin/tahoe-helperstats-fetched.py @@ -23,4 +23,4 @@ if len(sys.argv) > 1: url = os.environ["url"] data = simplejson.loads(urllib.urlopen(url).read()) -print "fetched.value %d" % data["CHK_fetched_bytes"] +print "fetched.value %d" % data["chk_upload_helper.fetched_bytes"] diff --git a/src/allmydata/offloaded.py b/src/allmydata/offloaded.py index b6f2041e..dd6acaec 100644 --- a/src/allmydata/offloaded.py +++ b/src/allmydata/offloaded.py @@ -377,9 +377,7 @@ class CHKCiphertextFetcher(AskUntilSuccessMixin): self._f.write(data) self._have += len(data) self._ciphertext_fetched += len(data) - stats_provider = self._upload_helper._helper.stats_provider - if stats_provider: - stats_provider.count("chk_upload_helper.fetched_bytes", len(data)) + self._upload_helper._helper.count("chk_upload_helper.fetched_bytes", len(data)) return False # not done d.addCallback(_got_data) return d @@ -479,6 +477,12 @@ class Helper(Referenceable, service.MultiService): self.stats_provider = stats_provider if stats_provider: stats_provider.register_producer(self) + self._counters = {"chk_upload_helper.upload_requests": 0, + "chk_upload_helper.upload_already_present": 0, + "chk_upload_helper.upload_need_upload": 0, + "chk_upload_helper.fetched_bytes": 0, + "chk_upload_helper.encoded_bytes": 0, + } service.MultiService.__init__(self) def setServiceParent(self, parent): @@ -489,6 +493,11 @@ class Helper(Referenceable, service.MultiService): kwargs['facility'] = "tahoe.helper" return self.parent.log(*args, **kwargs) + def count(self, key, value=1): + if self.stats_provider: + self.stats_provider.count(key, value) + self._counters[key] += value + def get_stats(self): OLD = 86400*2 # 48hours now = time.time() @@ -512,18 +521,19 @@ class Helper(Referenceable, service.MultiService): enc_size += size if now - mtime > OLD: enc_size_old += size - return { 'chk_upload_helper.active_uploads': len(self._active_uploads), - 'chk_upload_helper.incoming_count': inc_count, - 'chk_upload_helper.incoming_size': inc_size, - 'chk_upload_helper.incoming_size_old': inc_size_old, - 'chk_upload_helper.encoding_count': enc_count, - 'chk_upload_helper.encoding_size': enc_size, - 'chk_upload_helper.encoding_size_old': enc_size_old, - } + stats = { 'chk_upload_helper.active_uploads': len(self._active_uploads), + 'chk_upload_helper.incoming_count': inc_count, + 'chk_upload_helper.incoming_size': inc_size, + 'chk_upload_helper.incoming_size_old': inc_size_old, + 'chk_upload_helper.encoding_count': enc_count, + 'chk_upload_helper.encoding_size': enc_size, + 'chk_upload_helper.encoding_size_old': enc_size_old, + } + stats.update(self._counters) + return stats def remote_upload_chk(self, storage_index): - if self.stats_provider: - self.stats_provider.count("chk_upload_helper.upload_requests") + self.count("chk_upload_helper.upload_requests") r = upload.UploadResults() started = time.time() si_s = storage.si_b2a(storage_index) @@ -541,13 +551,11 @@ class Helper(Referenceable, service.MultiService): r.timings['existence_check'] = elapsed if already_present: # the necessary results are placed in the UploadResults - if self.stats_provider: - self.stats_provider.count("chk_upload_helper.upload_already_present") + self.count("chk_upload_helper.upload_already_present") self.log("file already found in grid", parent=lp) return (r, None) - if self.stats_provider: - self.stats_provider.count("chk_upload_helper.upload_need_upload") + self.count("chk_upload_helper.upload_need_upload") # the file is not present in the grid, by which we mean there are # less than 'N' shares available. self.log("unable to find file in the grid", parent=lp, @@ -598,6 +606,5 @@ class Helper(Referenceable, service.MultiService): return d def upload_finished(self, storage_index, size): - if self.stats_provider: - self.stats_provider.count("chk_upload_helper.encoded_bytes", size) + self.count("chk_upload_helper.encoded_bytes", size) del self._active_uploads[storage_index] diff --git a/src/allmydata/test/test_system.py b/src/allmydata/test/test_system.py index e19c57d6..c191671b 100644 --- a/src/allmydata/test/test_system.py +++ b/src/allmydata/test/test_system.py @@ -1306,6 +1306,55 @@ class SystemTest(testutil.SignalMixin, testutil.PollMixin, unittest.TestCase): return self.GET("status/retrieve-%d" % self._retrieve_status) d.addCallback(_got_publish) + # check that the helper status page exists + d.addCallback(lambda res: + self.GET("helper_status", followRedirect=True)) + def _got_helper_status(res): + self.failUnless("Bytes Fetched:" in res) + # touch a couple of files in the helper's working directory to + # exercise more code paths + workdir = os.path.join(self.getdir("client0"), "helper") + incfile = os.path.join(workdir, "CHK_incoming", "spurious") + f = open(incfile, "wb") + f.write("small file") + f.close() + then = time.time() - 86400*3 + now = time.time() + os.utime(incfile, (now, then)) + encfile = os.path.join(workdir, "CHK_encoding", "spurious") + f = open(encfile, "wb") + f.write("less small file") + f.close() + os.utime(encfile, (now, then)) + d.addCallback(_got_helper_status) + # and that the json form exists + d.addCallback(lambda res: + self.GET("helper_status?t=json", followRedirect=True)) + def _got_helper_status_json(res): + data = simplejson.loads(res) + self.failUnlessEqual(data["chk_upload_helper.upload_need_upload"], + 1) + self.failUnlessEqual(data["chk_upload_helper.incoming_count"], 1) + self.failUnlessEqual(data["chk_upload_helper.incoming_size"], 10) + self.failUnlessEqual(data["chk_upload_helper.incoming_size_old"], + 10) + self.failUnlessEqual(data["chk_upload_helper.encoding_count"], 1) + self.failUnlessEqual(data["chk_upload_helper.encoding_size"], 15) + self.failUnlessEqual(data["chk_upload_helper.encoding_size_old"], + 15) + d.addCallback(_got_helper_status_json) + + # and check that client[3] (which uses a helper but does not run one + # itself) doesn't explode when you ask for its helper status with + # t=json + d.addCallback(lambda res: + getPage(self.helper_webish_url + "helper_status?t=json")) + def _got_non_helper_status_json(res): + data = simplejson.loads(res) + self.failUnlessEqual(data, {}) + d.addCallback(_got_non_helper_status_json) + + # TODO: mangle the second segment of a file, to test errors that # occur after we've already sent some good data, which uses a # different error path. diff --git a/src/allmydata/web/status.py b/src/allmydata/web/status.py index 17810d9d..49518f98 100644 --- a/src/allmydata/web/status.py +++ b/src/allmydata/web/status.py @@ -770,7 +770,7 @@ class HelperStatus(rend.Page): return self.render_JSON(ctx) # is there a better way to provide 'data' to all rendering methods? helper = IClient(ctx).getServiceNamed("helper") - self.original = helper.get_stats()["helper"] + self.original = helper.get_stats() return rend.Page.renderHTTP(self, ctx) def render_JSON(self, ctx): @@ -779,32 +779,32 @@ class HelperStatus(rend.Page): except KeyError: return simplejson.dumps({}) - stats = h.get_stats()["helper"] + stats = h.get_stats() return simplejson.dumps(stats, indent=1) def render_active_uploads(self, ctx, data): - return data["CHK_active_uploads"] + return data["chk_upload_helper.active_uploads"] def render_incoming(self, ctx, data): - return "%d bytes in %d files" % (data["CHK_incoming_size"], - data["CHK_incoming_files"]) + return "%d bytes in %d files" % (data["chk_upload_helper.incoming_size"], + data["chk_upload_helper.incoming_count"]) def render_encoding(self, ctx, data): - return "%d bytes in %d files" % (data["CHK_encoding_size"], - data["CHK_encoding_files"]) + return "%d bytes in %d files" % (data["chk_upload_helper.encoding_size"], + data["chk_upload_helper.encoding_count"]) def render_upload_requests(self, ctx, data): - return str(data["CHK_upload_requests"]) + return str(data["chk_upload_helper.upload_requests"]) def render_upload_already_present(self, ctx, data): - return str(data["CHK_upload_already_present"]) + return str(data["chk_upload_helper.upload_already_present"]) def render_upload_need_upload(self, ctx, data): - return str(data["CHK_upload_need_upload"]) + return str(data["chk_upload_helper.upload_need_upload"]) def render_upload_bytes_fetched(self, ctx, data): - return str(data["CHK_fetched_bytes"]) + return str(data["chk_upload_helper.fetched_bytes"]) def render_upload_bytes_encoded(self, ctx, data): - return str(data["CHK_encoded_bytes"]) + return str(data["chk_upload_helper.encoded_bytes"])