]> git.rkrishnan.org Git - tahoe-lafs/tahoe-lafs.git/commitdiff
webish: download-results: add per-server response times
authorBrian Warner <warner@allmydata.com>
Tue, 4 Mar 2008 03:53:45 +0000 (20:53 -0700)
committerBrian Warner <warner@allmydata.com>
Tue, 4 Mar 2008 03:53:45 +0000 (20:53 -0700)
src/allmydata/download.py
src/allmydata/interfaces.py
src/allmydata/web/download-status.xhtml
src/allmydata/webish.py

index e96bc6174999008951cfc889f4b3ccd5aa3398ce..53b268e71d8e452bd0c50f911e5fec521f2503ab 100644 (file)
@@ -261,12 +261,19 @@ class BlockDownloader:
 
     def start(self, segnum):
         lognum = self.log("get_block(segnum=%d)" % segnum)
+        started = time.time()
         d = self.vbucket.get_block(segnum)
         d.addCallbacks(self._hold_block, self._got_block_error,
-                       callbackArgs=(lognum,), errbackArgs=(lognum,))
+                       callbackArgs=(started, lognum,), errbackArgs=(lognum,))
         return d
 
-    def _hold_block(self, data, lognum):
+    def _hold_block(self, data, started, lognum):
+        if self.results:
+            elapsed = time.time() - started
+            peerid = self.vbucket.bucket.get_peerid()
+            if peerid not in self.results.timings["fetch_per_server"]:
+                self.results.timings["fetch_per_server"][peerid] = []
+            self.results.timings["fetch_per_server"][peerid].append(elapsed)
         self.log("got block", parent=lognum)
         self.parent.hold_block(self.blocknum, data)
 
@@ -331,6 +338,8 @@ class SegmentDownloader:
         for blocknum, vbucket in active_buckets.iteritems():
             bd = BlockDownloader(vbucket, blocknum, self, self.results)
             downloaders.append(bd)
+            if self.results:
+                self.results.servers_used.add(vbucket.bucket.get_peerid())
         l = [bd.start(self.segmentnumber) for bd in downloaders]
         return defer.DeferredList(l, fireOnOneErrback=True)
 
@@ -428,6 +437,7 @@ class FileDownloader:
         s.set_results(self._results)
         self._results.file_size = self._size
         self._results.timings["servers_peer_selection"] = {}
+        self._results.timings["fetch_per_server"] = {}
         self._results.timings["cumulative_fetch"] = 0.0
         self._results.timings["cumulative_decode"] = 0.0
         self._results.timings["cumulative_decrypt"] = 0.0
index 37c0efd0652df362dc0c939f608d5b98e96869d8..825169579cf18d44bf57a1fa577450cc0be5d861 100644 (file)
@@ -1308,7 +1308,7 @@ class IDownloadResults(Interface):
     public attributes which contain details about the download process.::
 
      .file_size : the size of the file, in bytes
-      .servers_used : set of server peerids that were used during download
+     .servers_used : set of server peerids that were used during download
      .server_problems : dict mapping server peerid to a problem string. Only
                         servers that had problems (bad hashes, disconnects) are
                         listed here.
@@ -1324,7 +1324,7 @@ class IDownloadResults(Interface):
        cumulative_decode : just time spent in zfec
        cumulative_decrypt : just time spent in decryption
        total : total download time, start to finish
-        servers_fetching : dict of peerid to list of per-segment fetch times
+       fetch_per_server : dict of peerid to list of per-segment fetch times
 
     """
 
index a38f54ffd8481190d716d5ba7b92ba434d24842c..5e563acfdd1ebf78689609113a51268aea6d8978 100644 (file)
@@ -22,9 +22,9 @@
 <div n:render="results">
   <h2>Download Results</h2>
   <ul>
-    <li>Servers Used: <span n:render="servers_used" /></li>
+    <li n:render="servers_used" />
     <li>Servermap: <span n:render="servermap" /></li>
-    <li n:render="problems"></li>
+    <li n:render="problems" />
     <li>Timings:</li>
     <ul>
       <li>File Size: <span n:render="string" n:data="file_size" /> bytes</li>
@@ -45,6 +45,7 @@
           (<span n:render="rate" n:data="rate_decrypt" />)</li>
         </ul>
       </ul>
+      <li n:render="server_timings" />
     </ul>
   </ul>
 </div>
index b57b9e05bdad7e202d0c16eaa3b196418a769e79..ed24496fa55a014e6021d4d524faf9b7b967af85 100644 (file)
@@ -1674,9 +1674,6 @@ class UploadStatusPage(UploadResultsRendererMixin, rend.Page):
 class DownloadResultsRendererMixin:
     # this requires a method named 'download_results'
 
-    def render_servers_used(self, ctx, data):
-        return "nope"
-
     def render_servermap(self, ctx, data):
         d = self.download_results()
         d.addCallback(lambda res: res.servermap)
@@ -1695,6 +1692,18 @@ class DownloadResultsRendererMixin:
         d.addCallback(_render)
         return d
 
+    def render_servers_used(self, ctx, data):
+        d = self.download_results()
+        d.addCallback(lambda res: res.servers_used)
+        def _got(servers_used):
+            if not servers_used:
+                return ""
+            peerids_s = ", ".join(["[%s]" % idlib.shortnodeid_b2a(peerid)
+                                   for peerid in servers_used])
+            return T.li["Servers Used: ", peerids_s]
+        d.addCallback(_got)
+        return d
+
     def render_problems(self, ctx, data):
         d = self.download_results()
         d.addCallback(lambda res: res.server_problems)
@@ -1796,6 +1805,22 @@ class DownloadResultsRendererMixin:
     def data_rate_decrypt(self, ctx, data):
         return self._get_rate("cumulative_decrypt")
 
+    def render_server_timings(self, ctx, data):
+        d = self.download_results()
+        d.addCallback(lambda res: res.timings.get("fetch_per_server"))
+        def _render(per_server):
+            if per_server is None:
+                return ""
+            l = T.ul()
+            for peerid in sorted(per_server.keys()):
+                peerid_s = idlib.shortnodeid_b2a(peerid)
+                times_s = ", ".join([self.render_time(None, t)
+                                     for t in per_server[peerid]])
+                l[T.li["[%s]: %s" % (peerid_s, times_s)]]
+            return T.li["Per-Server Segment Fetch Response Times: ", l]
+        d.addCallback(_render)
+        return d
+
 class DownloadStatusPage(DownloadResultsRendererMixin, rend.Page):
     docFactory = getxmlfile("download-status.xhtml")