-import time, pprint, itertools
+import pprint, itertools
import simplejson
from twisted.internet import defer
from nevow import rend, inevow, tags as T
from allmydata.util import base32, idlib
from allmydata.web.common import getxmlfile, get_arg, \
- abbreviate_time, abbreviate_rate, abbreviate_size, plural, compute_rate
+ abbreviate_time, abbreviate_rate, abbreviate_size, plural, compute_rate, render_time
from allmydata.interfaces import IUploadStatus, IDownloadStatus, \
IPublishStatus, IRetrieveStatus, IServermapUpdaterStatus
def render_pushed_shares(self, ctx, data):
d = self.upload_results()
- d.addCallback(lambda res: res.pushed_shares)
+ d.addCallback(lambda res: res.get_pushed_shares())
return d
def render_preexisting_shares(self, ctx, data):
d = self.upload_results()
- d.addCallback(lambda res: res.preexisting_shares)
+ d.addCallback(lambda res: res.get_preexisting_shares())
return d
def render_sharemap(self, ctx, data):
d = self.upload_results()
- d.addCallback(lambda res: res.sharemap)
+ d.addCallback(lambda res: res.get_sharemap())
def _render(sharemap):
if sharemap is None:
return "None"
l = T.ul()
- for shnum, peerids in sorted(sharemap.items()):
- peerids = ', '.join([idlib.shortnodeid_b2a(i) for i in peerids])
- l[T.li["%d -> placed on [%s]" % (shnum, peerids)]]
+ for shnum, servers in sorted(sharemap.items()):
+ server_names = ', '.join([s.get_name() for s in servers])
+ l[T.li["%d -> placed on [%s]" % (shnum, server_names)]]
return l
d.addCallback(_render)
return d
def render_servermap(self, ctx, data):
d = self.upload_results()
- d.addCallback(lambda res: res.servermap)
+ d.addCallback(lambda res: res.get_servermap())
def _render(servermap):
if servermap is None:
return "None"
l = T.ul()
- for peerid in sorted(servermap.keys()):
- peerid_s = idlib.shortnodeid_b2a(peerid)
- shares_s = ",".join(["#%d" % shnum
- for shnum in servermap[peerid]])
- l[T.li["[%s] got share%s: %s" % (peerid_s,
- plural(servermap[peerid]),
- shares_s)]]
+ for server, shnums in sorted(servermap.items()):
+ shares_s = ",".join(["#%d" % shnum for shnum in shnums])
+ l[T.li["[%s] got share%s: %s" % (server.get_name(),
+ plural(shnums), shares_s)]]
return l
d.addCallback(_render)
return d
def data_file_size(self, ctx, data):
d = self.upload_results()
- d.addCallback(lambda res: res.file_size)
+ d.addCallback(lambda res: res.get_file_size())
return d
def _get_time(self, name):
d = self.upload_results()
- d.addCallback(lambda res: res.timings.get(name))
+ d.addCallback(lambda res: res.get_timings().get(name))
return d
def data_time_total(self, ctx, data):
def data_time_contacting_helper(self, ctx, data):
return self._get_time("contacting_helper")
- def data_time_existence_check(self, ctx, data):
- return self._get_time("existence_check")
-
def data_time_cumulative_fetch(self, ctx, data):
return self._get_time("cumulative_fetch")
def _get_rate(self, name):
d = self.upload_results()
def _convert(r):
- file_size = r.file_size
- time = r.timings.get(name)
- return compute_rate(file_size, time)
+ file_size = r.get_file_size()
+ duration = r.get_timings().get(name)
+ return compute_rate(file_size, duration)
d.addCallback(_convert)
return d
def data_rate_encode_and_push(self, ctx, data):
d = self.upload_results()
def _convert(r):
- file_size = r.file_size
- time1 = r.timings.get("cumulative_encoding")
- time2 = r.timings.get("cumulative_sending")
+ file_size = r.get_file_size()
+ time1 = r.get_timings().get("cumulative_encoding")
+ time2 = r.get_timings().get("cumulative_sending")
if (time1 is None or time2 is None):
return None
else:
def data_rate_ciphertext_fetch(self, ctx, data):
d = self.upload_results()
def _convert(r):
- fetch_size = r.ciphertext_fetched
- time = r.timings.get("cumulative_fetch")
- return compute_rate(fetch_size, time)
+ fetch_size = r.get_ciphertext_fetched()
+ duration = r.get_timings().get("cumulative_fetch")
+ return compute_rate(fetch_size, duration)
d.addCallback(_convert)
return d
return d
def render_started(self, ctx, data):
- TIME_FORMAT = "%H:%M:%S %d-%b-%Y"
- started_s = time.strftime(TIME_FORMAT,
- time.localtime(data.get_started()))
+ started_s = render_time(data.get_started())
return started_s
def render_si(self, ctx, data):
d = self.download_results()
def _convert(r):
file_size = r.file_size
- time = r.timings.get(name)
- return compute_rate(file_size, time)
+ duration = r.timings.get(name)
+ return compute_rate(file_size, duration)
d.addCallback(_convert)
return d
for ev in events:
ev = ev.copy()
if ev.has_key('server'):
- ev["serverid"] = base32.b2a(ev["server"].get_serverid())
+ ev["serverid"] = ev["server"].get_longname()
del ev["server"]
# find an empty slot in the rows
free_slot = None
for ev in events:
# DownloadStatus promises to give us events in temporal order
ev = ev.copy()
- ev["serverid"] = base32.b2a(ev["server"].get_serverid())
+ ev["serverid"] = ev["server"].get_longname()
del ev["server"]
if ev["serverid"] not in serverid_to_group:
groupnum = len(serverid_to_group)
rows[free_slot] = ev["finish_time"]
ev["row"] = (groupnum, free_slot)
new_events.append(ev)
+ del groupnum
# maybe also return serverid_to_group, groupnum_to_rows, and some
# indication of the highest finish_time
#
"start_time", "finish_time")
data["block"],data["block_rownums"] = self._find_overlap_requests(ds.block_requests)
- servernums = {}
- serverid_strings = {}
- for d_ev in data["dyhb"]:
- if d_ev["serverid"] not in servernums:
- servernum = len(servernums)
- servernums[d_ev["serverid"]] = servernum
- #title= "%s: %s" % ( ",".join([str(shnum) for shnum in shnums]))
- serverid_strings[servernum] = d_ev["serverid"][:4]
- data["server_info"] = dict([(serverid, {"num": servernums[serverid],
- "color": self.color(base32.a2b(serverid)),
- "short": serverid_strings[servernums[serverid]],
- })
- for serverid in servernums.keys()])
- data["num_serverids"] = len(serverid_strings)
+ server_info = {} # maps longname to {num,color,short}
+ server_shortnames = {} # maps servernum to shortname
+ for d_ev in ds.dyhb_requests:
+ s = d_ev["server"]
+ longname = s.get_longname()
+ if longname not in server_info:
+ num = len(server_info)
+ server_info[longname] = {"num": num,
+ "color": self.color(s),
+ "short": s.get_name() }
+ server_shortnames[str(num)] = s.get_name()
+
+ data["server_info"] = server_info
+ data["num_serverids"] = len(server_info)
# we'd prefer the keys of serverids[] to be ints, but this is JSON,
# so they get converted to strings. Stupid javascript.
- data["serverids"] = serverid_strings
+ data["serverids"] = server_shortnames
data["bounds"] = {"min": ds.first_timestamp, "max": ds.last_timestamp}
return simplejson.dumps(data, indent=1) + "\n"
rtt = received - sent
if not shnums:
shnums = ["-"]
- t[T.tr(style="background: %s" % self.color(server.get_serverid()))[
+ t[T.tr(style="background: %s" % self.color(server))[
[T.td[server.get_name()], T.td[srt(sent)], T.td[srt(received)],
T.td[",".join([str(shnum) for shnum in shnums])],
T.td[self.render_time(None, rtt)],
rtt = None
if r_ev["finish_time"] is not None:
rtt = r_ev["finish_time"] - r_ev["start_time"]
- color = self.color(server.get_serverid())
+ color = self.color(server)
t[T.tr(style="background: %s" % color)[
T.td[server.get_name()], T.td[r_ev["shnum"]],
T.td["[%d:+%d]" % (r_ev["start"], r_ev["length"])],
return l
- def color(self, peerid):
+ def color(self, server):
+ peerid = server.get_serverid() # binary
def m(c):
return min(ord(c) / 2 + 0x80, 0xff)
return "#%02x%02x%02x" % (m(peerid[0]), m(peerid[1]), m(peerid[2]))
return d
def render_started(self, ctx, data):
- TIME_FORMAT = "%H:%M:%S %d-%b-%Y"
- started_s = time.strftime(TIME_FORMAT,
- time.localtime(data.get_started()))
+ started_s = render_time(data.get_started())
return started_s + " (%s)" % data.get_started()
def render_si(self, ctx, data):
docFactory = getxmlfile("download-status-timeline.xhtml")
def render_started(self, ctx, data):
- TIME_FORMAT = "%H:%M:%S %d-%b-%Y"
- started_s = time.strftime(TIME_FORMAT,
- time.localtime(data.get_started()))
+ started_s = render_time(data.get_started())
return started_s + " (%s)" % data.get_started()
def render_si(self, ctx, data):
self.retrieve_status = data
def render_started(self, ctx, data):
- TIME_FORMAT = "%H:%M:%S %d-%b-%Y"
- started_s = time.strftime(TIME_FORMAT,
- time.localtime(data.get_started()))
+ started_s = render_time(data.get_started())
return started_s
def render_si(self, ctx, data):
def _get_rate(self, data, name):
file_size = self.retrieve_status.get_size()
- time = self.retrieve_status.timings.get(name)
- return compute_rate(file_size, time)
+ duration = self.retrieve_status.timings.get(name)
+ return compute_rate(file_size, duration)
def data_time_total(self, ctx, data):
return self.retrieve_status.timings.get("total")
self.publish_status = data
def render_started(self, ctx, data):
- TIME_FORMAT = "%H:%M:%S %d-%b-%Y"
- started_s = time.strftime(TIME_FORMAT,
- time.localtime(data.get_started()))
+ started_s = render_time(data.get_started())
return started_s
def render_si(self, ctx, data):
def _get_rate(self, data, name):
file_size = self.publish_status.get_size()
- time = self.publish_status.timings.get(name)
- return compute_rate(file_size, time)
+ duration = self.publish_status.timings.get(name)
+ return compute_rate(file_size, duration)
def data_time_total(self, ctx, data):
return self.publish_status.timings.get("total")
self.update_status = data
def render_started(self, ctx, data):
- TIME_FORMAT = "%H:%M:%S %d-%b-%Y"
- started_s = time.strftime(TIME_FORMAT,
- time.localtime(data.get_started()))
+ started_s = render_time(data.get_started())
return started_s
def render_finished(self, ctx, data):
when = data.get_finished()
if not when:
return "not yet"
- TIME_FORMAT = "%H:%M:%S %d-%b-%Y"
- started_s = time.strftime(TIME_FORMAT,
- time.localtime(data.get_finished()))
+ started_s = render_time(data.get_finished())
return started_s
def render_si(self, ctx, data):
def render_row(self, ctx, data):
s = data
- TIME_FORMAT = "%H:%M:%S %d-%b-%Y"
- started_s = time.strftime(TIME_FORMAT,
- time.localtime(s.get_started()))
+ started_s = render_time(s.get_started())
ctx.fillSlots("started", started_s)
si_s = base32.b2a_or_none(s.get_storage_index())