]> git.rkrishnan.org Git - tahoe-lafs/tahoe-lafs.git/commitdiff
helper: add stats for the gatherer, show some on the webish welcome page
authorBrian Warner <warner@allmydata.com>
Thu, 27 Mar 2008 22:55:32 +0000 (15:55 -0700)
committerBrian Warner <warner@allmydata.com>
Thu, 27 Mar 2008 22:55:32 +0000 (15:55 -0700)
src/allmydata/offloaded.py
src/allmydata/test/test_helper.py
src/allmydata/web/welcome.xhtml
src/allmydata/webish.py

index 5b6246049fb502a6ab3e3e794947a3349a7508f2..a7bde89e62e7f7904b8dea42207cdb1306bbce06 100644 (file)
@@ -456,7 +456,7 @@ class LocalCiphertextReader(AskUntilSuccessMixin):
 
 
 class Helper(Referenceable, service.MultiService):
-    implements(interfaces.RIHelper)
+    implements(interfaces.RIHelper, interfaces.IStatsProducer)
     # this is the non-distributed version. When we need to have multiple
     # helpers, this object will become the HelperCoordinator, and will query
     # the farm of Helpers to see if anyone has the storage_index of interest,
@@ -473,14 +473,45 @@ class Helper(Referenceable, service.MultiService):
         fileutil.make_dirs(self._chk_incoming)
         fileutil.make_dirs(self._chk_encoding)
         self._active_uploads = {}
+        self._stats = {"CHK_upload_requests": 0,
+                       "CHK_upload_already_present": 0,
+                       "CHK_upload_need_upload": 0,
+                       }
         service.MultiService.__init__(self)
 
+    def setServiceParent(self, parent):
+        service.MultiService.setServiceParent(self, parent)
+        stats = parent.stats_provider
+        if stats:
+            stats.register_producer(self)
+
+    def get_stats(self):
+        chk_incoming_files, chk_incoming_size = 0,0
+        chk_encoding_files, chk_encoding_size = 0,0
+        for fn in os.listdir(self._chk_incoming):
+            size = os.stat(os.path.join(self._chk_incoming, fn))[stat.ST_SIZE]
+            chk_incoming_files += 1
+            chk_incoming_size += 1
+        for fn in os.listdir(self._chk_encoding):
+            size = os.stat(os.path.join(self._chk_encoding, fn))[stat.ST_SIZE]
+            chk_encoding_files += 1
+            chk_encoding_size += 1
+        stats = {"CHK_active_uploads": len(self._active_uploads),
+                 "CHK_incoming_files": chk_incoming_files,
+                 "CHK_incoming_size": chk_incoming_size,
+                 "CHK_encoding_files": chk_encoding_files,
+                 "CHK_encoding_size": chk_encoding_size,
+                 }
+        stats.update(self._stats)
+        return {"helper": stats}
+
     def log(self, *args, **kwargs):
         if 'facility' not in kwargs:
             kwargs['facility'] = "tahoe.helper"
         return self.parent.log(*args, **kwargs)
 
     def remote_upload_chk(self, storage_index):
+        self._stats["CHK_upload_requests"] += 1
         r = upload.UploadResults()
         started = time.time()
         si_s = storage.si_b2a(storage_index)
@@ -498,9 +529,11 @@ class Helper(Referenceable, service.MultiService):
             r.timings['existence_check'] = elapsed
             if already_present:
                 # the necessary results are placed in the UploadResults
+                self._stats["CHK_upload_already_present"] += 1
                 self.log("file already found in grid", parent=lp)
                 return (r, None)
 
+            self._stats["CHK_upload_need_upload"] += 1
             # the file is not present in the grid, by which we mean there are
             # less than 'N' shares available.
             self.log("unable to find file in the grid", parent=lp,
index 9470a0dc7b47e28773366a94168bf0b7bfcecc70..d72b9514534d803734167e61361b1655ca4b6519 100644 (file)
@@ -58,6 +58,7 @@ class FakeClient(service.MultiService):
                                    "n": 100,
                                    "max_segment_size": 1*MiB,
                                    }
+    stats_provider = None
     def log(self, *args, **kwargs):
         return log.msg(*args, **kwargs)
     def get_encoding_parameters(self):
index 9bb24580acce603d2eba57c575debf52d7dbc719..be608beeb406edf08b181845ba9368b5d31850d1 100644 (file)
@@ -20,7 +20,7 @@ Downloads</a></div>
 <div>My nodeid: <span n:render="string" n:data="my_nodeid" /></div>
 <div>My versions: <span n:render="string" n:data="version" /></div>
 <div>Tahoe code imported from: <span n:render="string" n:data="import_path" /></div>
-<div>My Storage Server: <span n:render="string" n:data="storage" /></div>
+<div n:render="services">Services Running:</div>
 
 <br />
 
index 8bda3d49d4a203761027c334295d0ea90fc4002a..7748a74933ae4c2f58a77077f8d8416ba2e4eaa8 100644 (file)
@@ -1452,17 +1452,30 @@ class Root(rend.Page):
         return str(allmydata)
     def data_my_nodeid(self, ctx, data):
         return idlib.nodeid_b2a(IClient(ctx).nodeid)
-    def data_storage(self, ctx, data):
+
+    def render_services(self, ctx, data):
+        ul = T.ul()
         client = IClient(ctx)
         try:
             ss = client.getServiceNamed("storage")
+            allocated_s = abbreviate_size(ss.allocated_size())
+            allocated = "about %s allocated" % allocated_s
+            sizelimit = "no size limit"
+            if ss.sizelimit is not None:
+                sizelimit = "size limit is %s" % abbreviate_size(ss.sizelimit)
+            ul[T.li["Storage Server: %s, %s" % (allocated, sizelimit)]]
         except KeyError:
-            return "Not running"
-        allocated = "about %s allocated" % abbreviate_size(ss.allocated_size())
-        sizelimit = "no size limit"
-        if ss.sizelimit is not None:
-            sizelimit = "size limit is %s" % abbreviate_size(ss.sizelimit)
-        return "%s, %s" % (allocated, sizelimit)
+            ul[T.li["Not running storage server"]]
+
+        try:
+            h = client.getServiceNamed("helper")
+            stats = h.get_stats()
+            active_uploads = stats["helper"]["CHK_active_uploads"]
+            ul[T.li["Helper: %d active uploads" % (active_uploads,)]]
+        except KeyError:
+            ul[T.li["Not running helper"]]
+
+        return ctx.tag[ul]
 
     def data_introducer_furl(self, ctx, data):
         return IClient(ctx).introducer_furl