+ def relative_time(self, t):
+ if t is None:
+ return t
+ if self.download_status.first_timestamp is not None:
+ return t - self.download_status.first_timestamp
+ return t
+ def short_relative_time(self, t):
+ t = self.relative_time(t)
+ if t is None:
+ return ""
+ return "+%.6fs" % t
+
+ def _find_overlap(self, events, start_key, end_key):
+ # given a list of event dicts, return a new list in which each event
+ # has an extra "row" key (an int, starting at 0), and if appropriate
+ # a "serverid" key (ascii-encoded server id), replacing the "server"
+ # key. This is a hint to our JS frontend about how to overlap the
+ # parts of the graph it is drawing.
+
+ # we must always make a copy, since we're going to be adding keys
+ # and don't want to change the original objects. If we're
+ # stringifying serverids, we'll also be changing the serverid keys.
+ new_events = []
+ rows = []
+ for ev in events:
+ ev = ev.copy()
+ if ev.has_key('server'):
+ ev["serverid"] = ev["server"].get_longname()
+ del ev["server"]
+ # find an empty slot in the rows
+ free_slot = None
+ for row,finished in enumerate(rows):
+ if finished is not None:
+ if ev[start_key] > finished:
+ free_slot = row
+ break
+ if free_slot is None:
+ free_slot = len(rows)
+ rows.append(ev[end_key])
+ else:
+ rows[free_slot] = ev[end_key]
+ ev["row"] = free_slot
+ new_events.append(ev)
+ return new_events
+
+ def _find_overlap_requests(self, events):
+ """We compute a three-element 'row tuple' for each event: (serverid,
+ shnum, row). All elements are ints. The first is a mapping from
+ serverid to group number, the second is a mapping from shnum to
+ subgroup number. The third is a row within the subgroup.
+
+ We also return a list of lists of rowcounts, so renderers can decide
+ how much vertical space to give to each row.
+ """
+
+ serverid_to_group = {}
+ groupnum_to_rows = {} # maps groupnum to a table of rows. Each table
+ # is a list with an element for each row number
+ # (int starting from 0) that contains a
+ # finish_time, indicating that the row is empty
+ # beyond that time. If finish_time is None, it
+ # indicate a response that has not yet
+ # completed, so the row cannot be reused.
+ new_events = []
+ for ev in events:
+ # DownloadStatus promises to give us events in temporal order
+ ev = ev.copy()
+ ev["serverid"] = ev["server"].get_longname()
+ del ev["server"]
+ if ev["serverid"] not in serverid_to_group:
+ groupnum = len(serverid_to_group)
+ serverid_to_group[ev["serverid"]] = groupnum
+ groupnum = serverid_to_group[ev["serverid"]]
+ if groupnum not in groupnum_to_rows:
+ groupnum_to_rows[groupnum] = []
+ rows = groupnum_to_rows[groupnum]
+ # find an empty slot in the rows
+ free_slot = None
+ for row,finished in enumerate(rows):
+ if finished is not None:
+ if ev["start_time"] > finished:
+ free_slot = row
+ break
+ if free_slot is None:
+ free_slot = len(rows)
+ rows.append(ev["finish_time"])
+ else:
+ rows[free_slot] = ev["finish_time"]
+ ev["row"] = (groupnum, free_slot)
+ new_events.append(ev)
+ del groupnum
+ # maybe also return serverid_to_group, groupnum_to_rows, and some
+ # indication of the highest finish_time
+ #
+ # actually, return the highest rownum for each groupnum
+ highest_rownums = [len(groupnum_to_rows[groupnum])
+ for groupnum in range(len(serverid_to_group))]
+ return new_events, highest_rownums
+
+ def child_event_json(self, ctx):
+ inevow.IRequest(ctx).setHeader("content-type", "text/plain")
+ data = { } # this will be returned to the GET
+ ds = self.download_status
+
+ data["misc"] = self._find_overlap(ds.misc_events,
+ "start_time", "finish_time")
+ data["read"] = self._find_overlap(ds.read_events,
+ "start_time", "finish_time")
+ data["segment"] = self._find_overlap(ds.segment_events,
+ "start_time", "finish_time")
+ # TODO: overlap on DYHB isn't very useful, and usually gets in the
+ # way. So don't do it.
+ data["dyhb"] = self._find_overlap(ds.dyhb_requests,
+ "start_time", "finish_time")
+ data["block"],data["block_rownums"] = self._find_overlap_requests(ds.block_requests)
+
+ server_info = {} # maps longname to {num,color,short}
+ server_shortnames = {} # maps servernum to shortname
+ for d_ev in ds.dyhb_requests:
+ s = d_ev["server"]
+ longname = s.get_longname()
+ if longname not in server_info:
+ num = len(server_info)
+ server_info[longname] = {"num": num,
+ "color": self.color(s),
+ "short": s.get_name() }
+ server_shortnames[str(num)] = s.get_name()
+
+ data["server_info"] = server_info
+ data["num_serverids"] = len(server_info)
+ # we'd prefer the keys of serverids[] to be ints, but this is JSON,
+ # so they get converted to strings. Stupid javascript.
+ data["serverids"] = server_shortnames
+ data["bounds"] = {"min": ds.first_timestamp, "max": ds.last_timestamp}
+ return simplejson.dumps(data, indent=1) + "\n"
+
+ def render_timeline_link(self, ctx, data):
+ from nevow import url
+ return T.a(href=url.URL.fromContext(ctx).child("timeline"))["timeline"]
+
+ def _rate_and_time(self, bytes, seconds):
+ time_s = self.render_time(None, seconds)
+ if seconds != 0:
+ rate = self.render_rate(None, 1.0 * bytes / seconds)
+ return T.span(title=rate)[time_s]
+ return T.span[time_s]
+
+ def render_events(self, ctx, data):
+ if not self.download_status.storage_index:
+ return
+ srt = self.short_relative_time
+ l = T.div()
+
+ t = T.table(align="left", class_="status-download-events")
+ t[T.tr[T.th["serverid"], T.th["sent"], T.th["received"],
+ T.th["shnums"], T.th["RTT"]]]
+ for d_ev in self.download_status.dyhb_requests:
+ server = d_ev["server"]
+ sent = d_ev["start_time"]
+ shnums = d_ev["response_shnums"]
+ received = d_ev["finish_time"]
+ rtt = None
+ if received is not None:
+ rtt = received - sent
+ if not shnums:
+ shnums = ["-"]
+ t[T.tr(style="background: %s" % self.color(server))[
+ [T.td[server.get_name()], T.td[srt(sent)], T.td[srt(received)],
+ T.td[",".join([str(shnum) for shnum in shnums])],
+ T.td[self.render_time(None, rtt)],
+ ]]]
+
+ l[T.h2["DYHB Requests:"], t]
+ l[T.br(clear="all")]
+
+ t = T.table(align="left",class_="status-download-events")
+ t[T.tr[T.th["range"], T.th["start"], T.th["finish"], T.th["got"],
+ T.th["time"], T.th["decrypttime"], T.th["pausedtime"],
+ T.th["speed"]]]
+ for r_ev in self.download_status.read_events:
+ start = r_ev["start"]
+ length = r_ev["length"]
+ bytes = r_ev["bytes_returned"]
+ decrypt_time = ""
+ if bytes:
+ decrypt_time = self._rate_and_time(bytes, r_ev["decrypt_time"])
+ speed, rtt = "",""
+ if r_ev["finish_time"] is not None:
+ rtt = r_ev["finish_time"] - r_ev["start_time"] - r_ev["paused_time"]
+ speed = self.render_rate(None, compute_rate(bytes, rtt))
+ rtt = self.render_time(None, rtt)
+ paused = self.render_time(None, r_ev["paused_time"])
+
+ t[T.tr[T.td["[%d:+%d]" % (start, length)],
+ T.td[srt(r_ev["start_time"])], T.td[srt(r_ev["finish_time"])],
+ T.td[bytes], T.td[rtt],
+ T.td[decrypt_time], T.td[paused],
+ T.td[speed],
+ ]]
+
+ l[T.h2["Read Events:"], t]
+ l[T.br(clear="all")]
+
+ t = T.table(align="left",class_="status-download-events")
+ t[T.tr[T.th["segnum"], T.th["start"], T.th["active"], T.th["finish"],
+ T.th["range"],
+ T.th["decodetime"], T.th["segtime"], T.th["speed"]]]
+ for s_ev in self.download_status.segment_events:
+ range_s = "-"
+ segtime_s = "-"
+ speed = "-"
+ decode_time = "-"
+ if s_ev["finish_time"] is not None:
+ if s_ev["success"]:
+ segtime = s_ev["finish_time"] - s_ev["active_time"]
+ segtime_s = self.render_time(None, segtime)
+ seglen = s_ev["segment_length"]
+ range_s = "[%d:+%d]" % (s_ev["segment_start"], seglen)
+ speed = self.render_rate(None, compute_rate(seglen, segtime))
+ decode_time = self._rate_and_time(seglen, s_ev["decode_time"])
+ else:
+ # error
+ range_s = "error"
+ else:
+ # not finished yet
+ pass
+
+ t[T.tr[T.td["seg%d" % s_ev["segment_number"]],
+ T.td[srt(s_ev["start_time"])],
+ T.td[srt(s_ev["active_time"])],
+ T.td[srt(s_ev["finish_time"])],
+ T.td[range_s],
+ T.td[decode_time],
+ T.td[segtime_s], T.td[speed]]]
+
+ l[T.h2["Segment Events:"], t]
+ l[T.br(clear="all")]
+ t = T.table(align="left",class_="status-download-events")
+ t[T.tr[T.th["serverid"], T.th["shnum"], T.th["range"],
+ T.th["txtime"], T.th["rxtime"],
+ T.th["received"], T.th["RTT"]]]
+ for r_ev in self.download_status.block_requests:
+ server = r_ev["server"]
+ rtt = None
+ if r_ev["finish_time"] is not None:
+ rtt = r_ev["finish_time"] - r_ev["start_time"]
+ color = self.color(server)
+ t[T.tr(style="background: %s" % color)[
+ T.td[server.get_name()], T.td[r_ev["shnum"]],
+ T.td["[%d:+%d]" % (r_ev["start"], r_ev["length"])],
+ T.td[srt(r_ev["start_time"])], T.td[srt(r_ev["finish_time"])],
+ T.td[r_ev["response_length"] or ""],
+ T.td[self.render_time(None, rtt)],
+ ]]
+
+ l[T.h2["Requests:"], t]
+ l[T.br(clear="all")]
+
+ return l
+
+ def color(self, server):
+ peerid = server.get_serverid() # binary
+ def m(c):
+ return min(ord(c) / 2 + 0x80, 0xff)
+ return "#%02x%02x%02x" % (m(peerid[0]), m(peerid[1]), m(peerid[2]))
+