def start(self, segnum):
lognum = self.log("get_block(segnum=%d)" % segnum)
+ started = time.time()
d = self.vbucket.get_block(segnum)
d.addCallbacks(self._hold_block, self._got_block_error,
- callbackArgs=(lognum,), errbackArgs=(lognum,))
+ callbackArgs=(started, lognum,), errbackArgs=(lognum,))
return d
- def _hold_block(self, data, lognum):
+ def _hold_block(self, data, started, lognum):
+ if self.results:
+ elapsed = time.time() - started
+ peerid = self.vbucket.bucket.get_peerid()
+ if peerid not in self.results.timings["fetch_per_server"]:
+ self.results.timings["fetch_per_server"][peerid] = []
+ self.results.timings["fetch_per_server"][peerid].append(elapsed)
self.log("got block", parent=lognum)
self.parent.hold_block(self.blocknum, data)
for blocknum, vbucket in active_buckets.iteritems():
bd = BlockDownloader(vbucket, blocknum, self, self.results)
downloaders.append(bd)
+ if self.results:
+ self.results.servers_used.add(vbucket.bucket.get_peerid())
l = [bd.start(self.segmentnumber) for bd in downloaders]
return defer.DeferredList(l, fireOnOneErrback=True)
s.set_results(self._results)
self._results.file_size = self._size
self._results.timings["servers_peer_selection"] = {}
+ self._results.timings["fetch_per_server"] = {}
self._results.timings["cumulative_fetch"] = 0.0
self._results.timings["cumulative_decode"] = 0.0
self._results.timings["cumulative_decrypt"] = 0.0
public attributes which contain details about the download process.::
.file_size : the size of the file, in bytes
- .servers_used : set of server peerids that were used during download
+ .servers_used : set of server peerids that were used during download
.server_problems : dict mapping server peerid to a problem string. Only
servers that had problems (bad hashes, disconnects) are
listed here.
cumulative_decode : just time spent in zfec
cumulative_decrypt : just time spent in decryption
total : total download time, start to finish
- servers_fetching : dict of peerid to list of per-segment fetch times
+ fetch_per_server : dict of peerid to list of per-segment fetch times
"""
<div n:render="results">
<h2>Download Results</h2>
<ul>
- <li>Servers Used: <span n:render="servers_used" /></li>
+ <li n:render="servers_used" />
<li>Servermap: <span n:render="servermap" /></li>
- <li n:render="problems"></li>
+ <li n:render="problems" />
<li>Timings:</li>
<ul>
<li>File Size: <span n:render="string" n:data="file_size" /> bytes</li>
(<span n:render="rate" n:data="rate_decrypt" />)</li>
</ul>
</ul>
+ <li n:render="server_timings" />
</ul>
</ul>
</div>
class DownloadResultsRendererMixin:
# this requires a method named 'download_results'
- def render_servers_used(self, ctx, data):
- return "nope"
-
def render_servermap(self, ctx, data):
d = self.download_results()
d.addCallback(lambda res: res.servermap)
d.addCallback(_render)
return d
+ def render_servers_used(self, ctx, data):
+ d = self.download_results()
+ d.addCallback(lambda res: res.servers_used)
+ def _got(servers_used):
+ if not servers_used:
+ return ""
+ peerids_s = ", ".join(["[%s]" % idlib.shortnodeid_b2a(peerid)
+ for peerid in servers_used])
+ return T.li["Servers Used: ", peerids_s]
+ d.addCallback(_got)
+ return d
+
def render_problems(self, ctx, data):
d = self.download_results()
d.addCallback(lambda res: res.server_problems)
def data_rate_decrypt(self, ctx, data):
return self._get_rate("cumulative_decrypt")
+ def render_server_timings(self, ctx, data):
+ d = self.download_results()
+ d.addCallback(lambda res: res.timings.get("fetch_per_server"))
+ def _render(per_server):
+ if per_server is None:
+ return ""
+ l = T.ul()
+ for peerid in sorted(per_server.keys()):
+ peerid_s = idlib.shortnodeid_b2a(peerid)
+ times_s = ", ".join([self.render_time(None, t)
+ for t in per_server[peerid]])
+ l[T.li["[%s]: %s" % (peerid_s, times_s)]]
+ return T.li["Per-Server Segment Fetch Response Times: ", l]
+ d.addCallback(_render)
+ return d
+
class DownloadStatusPage(DownloadResultsRendererMixin, rend.Page):
docFactory = getxmlfile("download-status.xhtml")