]> git.rkrishnan.org Git - tahoe-lafs/tahoe-lafs.git/blob - src/allmydata/web/status.py
immutable: redefine the "sharemap" member of the upload results to be a map from...
[tahoe-lafs/tahoe-lafs.git] / src / allmydata / web / status.py
1
2 import time, pprint, itertools
3 import simplejson
4 from twisted.internet import defer
5 from nevow import rend, inevow, tags as T
6 from allmydata.util import base32, idlib
7 from allmydata.web.common import IClient, getxmlfile, abbreviate_time, \
8      abbreviate_rate, abbreviate_size, get_arg
9 from allmydata.interfaces import IUploadStatus, IDownloadStatus, \
10      IPublishStatus, IRetrieveStatus, IServermapUpdaterStatus
11
12 def plural(sequence_or_length):
13     if isinstance(sequence_or_length, int):
14         length = sequence_or_length
15     else:
16         length = len(sequence_or_length)
17     if length == 1:
18         return ""
19     return "s"
20
21 class RateAndTimeMixin:
22
23     def render_time(self, ctx, data):
24         return abbreviate_time(data)
25
26     def render_rate(self, ctx, data):
27         return abbreviate_rate(data)
28
29 class UploadResultsRendererMixin(RateAndTimeMixin):
30     # this requires a method named 'upload_results'
31
32     def render_pushed_shares(self, ctx, data):
33         d = self.upload_results()
34         d.addCallback(lambda res: res.pushed_shares)
35         return d
36
37     def render_preexisting_shares(self, ctx, data):
38         d = self.upload_results()
39         d.addCallback(lambda res: res.preexisting_shares)
40         return d
41
42     def render_sharemap(self, ctx, data):
43         d = self.upload_results()
44         d.addCallback(lambda res: res.sharemap)
45         def _render(sharemap):
46             if sharemap is None:
47                 return "None"
48             l = T.ul()
49             for shnum, peerids in sorted(sharemap.items()):
50                 for peerid in peerids:
51                     peerid_s = idlib.shortnodeid_b2a(peerid)
52                     l[T.li["%d -> %s" % (shnum, peerid_s)]]
53             return l
54         d.addCallback(_render)
55         return d
56
57     def render_servermap(self, ctx, data):
58         d = self.upload_results()
59         d.addCallback(lambda res: res.servermap)
60         def _render(servermap):
61             if servermap is None:
62                 return "None"
63             l = T.ul()
64             for peerid in sorted(servermap.keys()):
65                 peerid_s = idlib.shortnodeid_b2a(peerid)
66                 shares_s = ",".join(["#%d" % shnum
67                                      for shnum in servermap[peerid]])
68                 l[T.li["[%s] got share%s: %s" % (peerid_s,
69                                                  plural(servermap[peerid]),
70                                                  shares_s)]]
71             return l
72         d.addCallback(_render)
73         return d
74
75     def data_file_size(self, ctx, data):
76         d = self.upload_results()
77         d.addCallback(lambda res: res.file_size)
78         return d
79
80     def _get_time(self, name):
81         d = self.upload_results()
82         d.addCallback(lambda res: res.timings.get(name))
83         return d
84
85     def data_time_total(self, ctx, data):
86         return self._get_time("total")
87
88     def data_time_storage_index(self, ctx, data):
89         return self._get_time("storage_index")
90
91     def data_time_contacting_helper(self, ctx, data):
92         return self._get_time("contacting_helper")
93
94     def data_time_existence_check(self, ctx, data):
95         return self._get_time("existence_check")
96
97     def data_time_cumulative_fetch(self, ctx, data):
98         return self._get_time("cumulative_fetch")
99
100     def data_time_helper_total(self, ctx, data):
101         return self._get_time("helper_total")
102
103     def data_time_peer_selection(self, ctx, data):
104         return self._get_time("peer_selection")
105
106     def data_time_total_encode_and_push(self, ctx, data):
107         return self._get_time("total_encode_and_push")
108
109     def data_time_cumulative_encoding(self, ctx, data):
110         return self._get_time("cumulative_encoding")
111
112     def data_time_cumulative_sending(self, ctx, data):
113         return self._get_time("cumulative_sending")
114
115     def data_time_hashes_and_close(self, ctx, data):
116         return self._get_time("hashes_and_close")
117
118     def _get_rate(self, name):
119         d = self.upload_results()
120         def _convert(r):
121             file_size = r.file_size
122             time = r.timings.get(name)
123             if time is None:
124                 return None
125             try:
126                 return 1.0 * file_size / time
127             except ZeroDivisionError:
128                 return None
129         d.addCallback(_convert)
130         return d
131
132     def data_rate_total(self, ctx, data):
133         return self._get_rate("total")
134
135     def data_rate_storage_index(self, ctx, data):
136         return self._get_rate("storage_index")
137
138     def data_rate_encode(self, ctx, data):
139         return self._get_rate("cumulative_encoding")
140
141     def data_rate_push(self, ctx, data):
142         return self._get_rate("cumulative_sending")
143
144     def data_rate_encode_and_push(self, ctx, data):
145         d = self.upload_results()
146         def _convert(r):
147             file_size = r.file_size
148             time1 = r.timings.get("cumulative_encoding")
149             time2 = r.timings.get("cumulative_sending")
150             if (file_size is None or time1 is None or time2 is None):
151                 return None
152             try:
153                 return 1.0 * file_size / (time1+time2)
154             except ZeroDivisionError:
155                 return None
156         d.addCallback(_convert)
157         return d
158
159     def data_rate_ciphertext_fetch(self, ctx, data):
160         d = self.upload_results()
161         def _convert(r):
162             fetch_size = r.ciphertext_fetched
163             time = r.timings.get("cumulative_fetch")
164             if (fetch_size is None or time is None):
165                 return None
166             try:
167                 return 1.0 * fetch_size / time
168             except ZeroDivisionError:
169                 return None
170         d.addCallback(_convert)
171         return d
172
173 class UploadStatusPage(UploadResultsRendererMixin, rend.Page):
174     docFactory = getxmlfile("upload-status.xhtml")
175
176     def __init__(self, data):
177         rend.Page.__init__(self, data)
178         self.upload_status = data
179
180     def upload_results(self):
181         return defer.maybeDeferred(self.upload_status.get_results)
182
183     def render_results(self, ctx, data):
184         d = self.upload_results()
185         def _got_results(results):
186             if results:
187                 return ctx.tag
188             return ""
189         d.addCallback(_got_results)
190         return d
191
192     def render_started(self, ctx, data):
193         TIME_FORMAT = "%H:%M:%S %d-%b-%Y"
194         started_s = time.strftime(TIME_FORMAT,
195                                   time.localtime(data.get_started()))
196         return started_s
197
198     def render_si(self, ctx, data):
199         si_s = base32.b2a_or_none(data.get_storage_index())
200         if si_s is None:
201             si_s = "(None)"
202         return si_s
203
204     def render_helper(self, ctx, data):
205         return {True: "Yes",
206                 False: "No"}[data.using_helper()]
207
208     def render_total_size(self, ctx, data):
209         size = data.get_size()
210         if size is None:
211             return "(unknown)"
212         return size
213
214     def render_progress_hash(self, ctx, data):
215         progress = data.get_progress()[0]
216         # TODO: make an ascii-art bar
217         return "%.1f%%" % (100.0 * progress)
218
219     def render_progress_ciphertext(self, ctx, data):
220         progress = data.get_progress()[1]
221         # TODO: make an ascii-art bar
222         return "%.1f%%" % (100.0 * progress)
223
224     def render_progress_encode_push(self, ctx, data):
225         progress = data.get_progress()[2]
226         # TODO: make an ascii-art bar
227         return "%.1f%%" % (100.0 * progress)
228
229     def render_status(self, ctx, data):
230         return data.get_status()
231
232 class DownloadResultsRendererMixin(RateAndTimeMixin):
233     # this requires a method named 'download_results'
234
235     def render_servermap(self, ctx, data):
236         d = self.download_results()
237         d.addCallback(lambda res: res.servermap)
238         def _render(servermap):
239             if servermap is None:
240                 return "None"
241             l = T.ul()
242             for peerid in sorted(servermap.keys()):
243                 peerid_s = idlib.shortnodeid_b2a(peerid)
244                 shares_s = ",".join(["#%d" % shnum
245                                      for shnum in servermap[peerid]])
246                 l[T.li["[%s] has share%s: %s" % (peerid_s,
247                                                  plural(servermap[peerid]),
248                                                  shares_s)]]
249             return l
250         d.addCallback(_render)
251         return d
252
253     def render_servers_used(self, ctx, data):
254         d = self.download_results()
255         d.addCallback(lambda res: res.servers_used)
256         def _got(servers_used):
257             if not servers_used:
258                 return ""
259             peerids_s = ", ".join(["[%s]" % idlib.shortnodeid_b2a(peerid)
260                                    for peerid in servers_used])
261             return T.li["Servers Used: ", peerids_s]
262         d.addCallback(_got)
263         return d
264
265     def render_problems(self, ctx, data):
266         d = self.download_results()
267         d.addCallback(lambda res: res.server_problems)
268         def _got(server_problems):
269             if not server_problems:
270                 return ""
271             l = T.ul()
272             for peerid in sorted(server_problems.keys()):
273                 peerid_s = idlib.shortnodeid_b2a(peerid)
274                 l[T.li["[%s]: %s" % (peerid_s, server_problems[peerid])]]
275             return T.li["Server Problems:", l]
276         d.addCallback(_got)
277         return d
278
279     def data_file_size(self, ctx, data):
280         d = self.download_results()
281         d.addCallback(lambda res: res.file_size)
282         return d
283
284     def _get_time(self, name):
285         d = self.download_results()
286         d.addCallback(lambda res: res.timings.get(name))
287         return d
288
289     def data_time_total(self, ctx, data):
290         return self._get_time("total")
291
292     def data_time_peer_selection(self, ctx, data):
293         return self._get_time("peer_selection")
294
295     def data_time_uri_extension(self, ctx, data):
296         return self._get_time("uri_extension")
297
298     def data_time_hashtrees(self, ctx, data):
299         return self._get_time("hashtrees")
300
301     def data_time_segments(self, ctx, data):
302         return self._get_time("segments")
303
304     def data_time_cumulative_fetch(self, ctx, data):
305         return self._get_time("cumulative_fetch")
306
307     def data_time_cumulative_decode(self, ctx, data):
308         return self._get_time("cumulative_decode")
309
310     def data_time_cumulative_decrypt(self, ctx, data):
311         return self._get_time("cumulative_decrypt")
312
313     def data_time_paused(self, ctx, data):
314         return self._get_time("paused")
315
316     def _get_rate(self, name):
317         d = self.download_results()
318         def _convert(r):
319             file_size = r.file_size
320             time = r.timings.get(name)
321             if time is None:
322                 return None
323             try:
324                 return 1.0 * file_size / time
325             except ZeroDivisionError:
326                 return None
327         d.addCallback(_convert)
328         return d
329
330     def data_rate_total(self, ctx, data):
331         return self._get_rate("total")
332
333     def data_rate_segments(self, ctx, data):
334         return self._get_rate("segments")
335
336     def data_rate_fetch(self, ctx, data):
337         return self._get_rate("cumulative_fetch")
338
339     def data_rate_decode(self, ctx, data):
340         return self._get_rate("cumulative_decode")
341
342     def data_rate_decrypt(self, ctx, data):
343         return self._get_rate("cumulative_decrypt")
344
345     def render_server_timings(self, ctx, data):
346         d = self.download_results()
347         d.addCallback(lambda res: res.timings.get("fetch_per_server"))
348         def _render(per_server):
349             if per_server is None:
350                 return ""
351             l = T.ul()
352             for peerid in sorted(per_server.keys()):
353                 peerid_s = idlib.shortnodeid_b2a(peerid)
354                 times_s = ", ".join([self.render_time(None, t)
355                                      for t in per_server[peerid]])
356                 l[T.li["[%s]: %s" % (peerid_s, times_s)]]
357             return T.li["Per-Server Segment Fetch Response Times: ", l]
358         d.addCallback(_render)
359         return d
360
361 class DownloadStatusPage(DownloadResultsRendererMixin, rend.Page):
362     docFactory = getxmlfile("download-status.xhtml")
363
364     def __init__(self, data):
365         rend.Page.__init__(self, data)
366         self.download_status = data
367
368     def download_results(self):
369         return defer.maybeDeferred(self.download_status.get_results)
370
371     def render_results(self, ctx, data):
372         d = self.download_results()
373         def _got_results(results):
374             if results:
375                 return ctx.tag
376             return ""
377         d.addCallback(_got_results)
378         return d
379
380     def render_started(self, ctx, data):
381         TIME_FORMAT = "%H:%M:%S %d-%b-%Y"
382         started_s = time.strftime(TIME_FORMAT,
383                                   time.localtime(data.get_started()))
384         return started_s
385
386     def render_si(self, ctx, data):
387         si_s = base32.b2a_or_none(data.get_storage_index())
388         if si_s is None:
389             si_s = "(None)"
390         return si_s
391
392     def render_helper(self, ctx, data):
393         return {True: "Yes",
394                 False: "No"}[data.using_helper()]
395
396     def render_total_size(self, ctx, data):
397         size = data.get_size()
398         if size is None:
399             return "(unknown)"
400         return size
401
402     def render_progress(self, ctx, data):
403         progress = data.get_progress()
404         # TODO: make an ascii-art bar
405         return "%.1f%%" % (100.0 * progress)
406
407     def render_status(self, ctx, data):
408         return data.get_status()
409
410 class RetrieveStatusPage(rend.Page, RateAndTimeMixin):
411     docFactory = getxmlfile("retrieve-status.xhtml")
412
413     def __init__(self, data):
414         rend.Page.__init__(self, data)
415         self.retrieve_status = data
416
417     def render_started(self, ctx, data):
418         TIME_FORMAT = "%H:%M:%S %d-%b-%Y"
419         started_s = time.strftime(TIME_FORMAT,
420                                   time.localtime(data.get_started()))
421         return started_s
422
423     def render_si(self, ctx, data):
424         si_s = base32.b2a_or_none(data.get_storage_index())
425         if si_s is None:
426             si_s = "(None)"
427         return si_s
428
429     def render_helper(self, ctx, data):
430         return {True: "Yes",
431                 False: "No"}[data.using_helper()]
432
433     def render_current_size(self, ctx, data):
434         size = data.get_size()
435         if size is None:
436             size = "(unknown)"
437         return size
438
439     def render_progress(self, ctx, data):
440         progress = data.get_progress()
441         # TODO: make an ascii-art bar
442         return "%.1f%%" % (100.0 * progress)
443
444     def render_status(self, ctx, data):
445         return data.get_status()
446
447     def render_encoding(self, ctx, data):
448         k, n = data.get_encoding()
449         return ctx.tag["Encoding: %s of %s" % (k, n)]
450
451     def render_problems(self, ctx, data):
452         problems = data.problems
453         if not problems:
454             return ""
455         l = T.ul()
456         for peerid in sorted(problems.keys()):
457             peerid_s = idlib.shortnodeid_b2a(peerid)
458             l[T.li["[%s]: %s" % (peerid_s, problems[peerid])]]
459         return ctx.tag["Server Problems:", l]
460
461     def _get_rate(self, data, name):
462         file_size = self.retrieve_status.get_size()
463         time = self.retrieve_status.timings.get(name)
464         if time is None or file_size is None:
465             return None
466         try:
467             return 1.0 * file_size / time
468         except ZeroDivisionError:
469             return None
470
471     def data_time_total(self, ctx, data):
472         return self.retrieve_status.timings.get("total")
473     def data_rate_total(self, ctx, data):
474         return self._get_rate(data, "total")
475
476     def data_time_fetch(self, ctx, data):
477         return self.retrieve_status.timings.get("fetch")
478     def data_rate_fetch(self, ctx, data):
479         return self._get_rate(data, "fetch")
480
481     def data_time_decode(self, ctx, data):
482         return self.retrieve_status.timings.get("decode")
483     def data_rate_decode(self, ctx, data):
484         return self._get_rate(data, "decode")
485
486     def data_time_decrypt(self, ctx, data):
487         return self.retrieve_status.timings.get("decrypt")
488     def data_rate_decrypt(self, ctx, data):
489         return self._get_rate(data, "decrypt")
490
491     def render_server_timings(self, ctx, data):
492         per_server = self.retrieve_status.timings.get("fetch_per_server")
493         if not per_server:
494             return ""
495         l = T.ul()
496         for peerid in sorted(per_server.keys()):
497             peerid_s = idlib.shortnodeid_b2a(peerid)
498             times_s = ", ".join([self.render_time(None, t)
499                                  for t in per_server[peerid]])
500             l[T.li["[%s]: %s" % (peerid_s, times_s)]]
501         return T.li["Per-Server Fetch Response Times: ", l]
502
503
504 class PublishStatusPage(rend.Page, RateAndTimeMixin):
505     docFactory = getxmlfile("publish-status.xhtml")
506
507     def __init__(self, data):
508         rend.Page.__init__(self, data)
509         self.publish_status = data
510
511     def render_started(self, ctx, data):
512         TIME_FORMAT = "%H:%M:%S %d-%b-%Y"
513         started_s = time.strftime(TIME_FORMAT,
514                                   time.localtime(data.get_started()))
515         return started_s
516
517     def render_si(self, ctx, data):
518         si_s = base32.b2a_or_none(data.get_storage_index())
519         if si_s is None:
520             si_s = "(None)"
521         return si_s
522
523     def render_helper(self, ctx, data):
524         return {True: "Yes",
525                 False: "No"}[data.using_helper()]
526
527     def render_current_size(self, ctx, data):
528         size = data.get_size()
529         if size is None:
530             size = "(unknown)"
531         return size
532
533     def render_progress(self, ctx, data):
534         progress = data.get_progress()
535         # TODO: make an ascii-art bar
536         return "%.1f%%" % (100.0 * progress)
537
538     def render_status(self, ctx, data):
539         return data.get_status()
540
541     def render_encoding(self, ctx, data):
542         k, n = data.get_encoding()
543         return ctx.tag["Encoding: %s of %s" % (k, n)]
544
545     def render_sharemap(self, ctx, data):
546         servermap = data.get_servermap()
547         if servermap is None:
548             return ctx.tag["None"]
549         l = T.ul()
550         sharemap = servermap.make_sharemap()
551         for shnum in sorted(sharemap.keys()):
552             l[T.li["%d -> Placed on " % shnum,
553                    ", ".join(["[%s]" % idlib.shortnodeid_b2a(peerid)
554                               for peerid in sharemap[shnum]])]]
555         return ctx.tag["Sharemap:", l]
556
557     def render_problems(self, ctx, data):
558         problems = data.problems
559         if not problems:
560             return ""
561         l = T.ul()
562         for peerid in sorted(problems.keys()):
563             peerid_s = idlib.shortnodeid_b2a(peerid)
564             l[T.li["[%s]: %s" % (peerid_s, problems[peerid])]]
565         return ctx.tag["Server Problems:", l]
566
567     def _get_rate(self, data, name):
568         file_size = self.publish_status.get_size()
569         time = self.publish_status.timings.get(name)
570         if time is None:
571             return None
572         try:
573             return 1.0 * file_size / time
574         except ZeroDivisionError:
575             return None
576
577     def data_time_total(self, ctx, data):
578         return self.publish_status.timings.get("total")
579     def data_rate_total(self, ctx, data):
580         return self._get_rate(data, "total")
581
582     def data_time_setup(self, ctx, data):
583         return self.publish_status.timings.get("setup")
584
585     def data_time_encrypt(self, ctx, data):
586         return self.publish_status.timings.get("encrypt")
587     def data_rate_encrypt(self, ctx, data):
588         return self._get_rate(data, "encrypt")
589
590     def data_time_encode(self, ctx, data):
591         return self.publish_status.timings.get("encode")
592     def data_rate_encode(self, ctx, data):
593         return self._get_rate(data, "encode")
594
595     def data_time_pack(self, ctx, data):
596         return self.publish_status.timings.get("pack")
597     def data_rate_pack(self, ctx, data):
598         return self._get_rate(data, "pack")
599     def data_time_sign(self, ctx, data):
600         return self.publish_status.timings.get("sign")
601
602     def data_time_push(self, ctx, data):
603         return self.publish_status.timings.get("push")
604     def data_rate_push(self, ctx, data):
605         return self._get_rate(data, "push")
606
607     def render_server_timings(self, ctx, data):
608         per_server = self.publish_status.timings.get("send_per_server")
609         if not per_server:
610             return ""
611         l = T.ul()
612         for peerid in sorted(per_server.keys()):
613             peerid_s = idlib.shortnodeid_b2a(peerid)
614             times_s = ", ".join([self.render_time(None, t)
615                                  for t in per_server[peerid]])
616             l[T.li["[%s]: %s" % (peerid_s, times_s)]]
617         return T.li["Per-Server Response Times: ", l]
618
619 class MapupdateStatusPage(rend.Page, RateAndTimeMixin):
620     docFactory = getxmlfile("map-update-status.xhtml")
621
622     def __init__(self, data):
623         rend.Page.__init__(self, data)
624         self.update_status = data
625
626     def render_started(self, ctx, data):
627         TIME_FORMAT = "%H:%M:%S %d-%b-%Y"
628         started_s = time.strftime(TIME_FORMAT,
629                                   time.localtime(data.get_started()))
630         return started_s
631
632     def render_finished(self, ctx, data):
633         when = data.get_finished()
634         if not when:
635             return "not yet"
636         TIME_FORMAT = "%H:%M:%S %d-%b-%Y"
637         started_s = time.strftime(TIME_FORMAT,
638                                   time.localtime(data.get_finished()))
639         return started_s
640
641     def render_si(self, ctx, data):
642         si_s = base32.b2a_or_none(data.get_storage_index())
643         if si_s is None:
644             si_s = "(None)"
645         return si_s
646
647     def render_helper(self, ctx, data):
648         return {True: "Yes",
649                 False: "No"}[data.using_helper()]
650
651     def render_progress(self, ctx, data):
652         progress = data.get_progress()
653         # TODO: make an ascii-art bar
654         return "%.1f%%" % (100.0 * progress)
655
656     def render_status(self, ctx, data):
657         return data.get_status()
658
659     def render_problems(self, ctx, data):
660         problems = data.problems
661         if not problems:
662             return ""
663         l = T.ul()
664         for peerid in sorted(problems.keys()):
665             peerid_s = idlib.shortnodeid_b2a(peerid)
666             l[T.li["[%s]: %s" % (peerid_s, problems[peerid])]]
667         return ctx.tag["Server Problems:", l]
668
669     def render_privkey_from(self, ctx, data):
670         peerid = data.get_privkey_from()
671         if peerid:
672             return ctx.tag["Got privkey from: [%s]"
673                            % idlib.shortnodeid_b2a(peerid)]
674         else:
675             return ""
676
677     def data_time_total(self, ctx, data):
678         return self.update_status.timings.get("total")
679
680     def data_time_initial_queries(self, ctx, data):
681         return self.update_status.timings.get("initial_queries")
682
683     def data_time_cumulative_verify(self, ctx, data):
684         return self.update_status.timings.get("cumulative_verify")
685
686     def render_server_timings(self, ctx, data):
687         per_server = self.update_status.timings.get("per_server")
688         if not per_server:
689             return ""
690         l = T.ul()
691         for peerid in sorted(per_server.keys()):
692             peerid_s = idlib.shortnodeid_b2a(peerid)
693             times = []
694             for op,started,t in per_server[peerid]:
695                 #times.append("%s/%.4fs/%s/%s" % (op,
696                 #                              started,
697                 #                              self.render_time(None, started - self.update_status.get_started()),
698                 #                              self.render_time(None,t)))
699                 if op == "query":
700                     times.append( self.render_time(None, t) )
701                 elif op == "late":
702                     times.append( "late(" + self.render_time(None, t) + ")" )
703                 else:
704                     times.append( "privkey(" + self.render_time(None, t) + ")" )
705             times_s = ", ".join(times)
706             l[T.li["[%s]: %s" % (peerid_s, times_s)]]
707         return T.li["Per-Server Response Times: ", l]
708
709     def render_timing_chart(self, ctx, data):
710         imageurl = self._timing_chart()
711         return ctx.tag[imageurl]
712
713     def _timing_chart(self):
714         started = self.update_status.get_started()
715         total = self.update_status.timings.get("total")
716         per_server = self.update_status.timings.get("per_server")
717         base = "http://chart.apis.google.com/chart?"
718         pieces = ["cht=bhs", "chs=400x300"]
719         pieces.append("chco=ffffff,4d89f9,c6d9fd") # colors
720         data0 = []
721         data1 = []
722         data2 = []
723         peerids_s = []
724         top_abs = started
725         # we sort the queries by the time at which we sent the first request
726         sorttable = [ (times[0][1], peerid)
727                       for peerid, times in per_server.items() ]
728         sorttable.sort()
729         peerids = [t[1] for t in sorttable]
730
731         for peerid in peerids:
732             times = per_server[peerid]
733             peerid_s = idlib.shortnodeid_b2a(peerid)
734             peerids_s.append(peerid_s)
735             # for servermap updates, there are either one or two queries per
736             # peer. The second (if present) is to get the privkey.
737             op,q_started,q_elapsed = times[0]
738             data0.append("%.3f" % (q_started-started))
739             data1.append("%.3f" % q_elapsed)
740             top_abs = max(top_abs, q_started+q_elapsed)
741             if len(times) > 1:
742                 op,p_started,p_elapsed = times[0]
743                 data2.append("%.3f" % p_elapsed)
744                 top_abs = max(top_abs, p_started+p_elapsed)
745             else:
746                 data2.append("0.0")
747         finished = self.update_status.get_finished()
748         if finished:
749             top_abs = max(top_abs, finished)
750         top_rel = top_abs - started
751         chd = "chd=t:" + "|".join([",".join(data0),
752                                    ",".join(data1),
753                                    ",".join(data2)])
754         pieces.append(chd)
755         chds = "chds=0,%0.3f" % top_rel
756         pieces.append(chds)
757         pieces.append("chxt=x,y")
758         pieces.append("chxr=0,0.0,%0.3f" % top_rel)
759         pieces.append("chxl=1:|" + "|".join(reversed(peerids_s)))
760         # use up to 10 grid lines, at decimal multiples.
761         # mathutil.next_power_of_k doesn't handle numbers smaller than one,
762         # unfortunately.
763         #pieces.append("chg="
764
765         if total is not None:
766             finished_f = 1.0 * total / top_rel
767             pieces.append("chm=r,FF0000,0,%0.3f,%0.3f" % (finished_f,
768                                                           finished_f+0.01))
769         url = base + "&".join(pieces)
770         return T.img(src=url, align="right", float="right")
771
772
773 class Status(rend.Page):
774     docFactory = getxmlfile("status.xhtml")
775     addSlash = True
776
777     def renderHTTP(self, ctx):
778         t = get_arg(inevow.IRequest(ctx), "t")
779         if t == "json":
780             return self.json(ctx)
781         return rend.Page.renderHTTP(self, ctx)
782
783     def json(self, ctx):
784         inevow.IRequest(ctx).setHeader("content-type", "text/plain")
785         client = IClient(ctx)
786         data = {}
787         data["active"] = active = []
788         for s in self.data_active_operations(ctx, None):
789             si_s = base32.b2a_or_none(s.get_storage_index())
790             size = s.get_size()
791             status = s.get_status()
792             if IUploadStatus.providedBy(s):
793                 h,c,e = s.get_progress()
794                 active.append({"type": "upload",
795                                "storage-index-string": si_s,
796                                "total-size": size,
797                                "status": status,
798                                "progress-hash": h,
799                                "progress-ciphertext": c,
800                                "progress-encode-push": e,
801                                })
802             elif IDownloadStatus.providedBy(s):
803                 active.append({"type": "download",
804                                "storage-index-string": si_s,
805                                "total-size": size,
806                                "status": status,
807                                "progress": s.get_progress(),
808                                })
809
810         return simplejson.dumps(data, indent=1) + "\n"
811
812     def _get_all_statuses(self, client):
813         return itertools.chain(client.list_all_upload_statuses(),
814                                client.list_all_download_statuses(),
815                                client.list_all_mapupdate_statuses(),
816                                client.list_all_publish_statuses(),
817                                client.list_all_retrieve_statuses(),
818                                client.list_all_helper_statuses(),
819                                )
820
821     def data_active_operations(self, ctx, data):
822         client = IClient(ctx)
823         active = [s
824                   for s in self._get_all_statuses(client)
825                   if s.get_active()]
826         return active
827
828     def data_recent_operations(self, ctx, data):
829         client = IClient(ctx)
830         recent = [s
831                   for s in self._get_all_statuses(client)
832                   if not s.get_active()]
833         recent.sort(lambda a,b: cmp(a.get_started(), b.get_started()))
834         recent.reverse()
835         return recent
836
837     def render_row(self, ctx, data):
838         s = data
839
840         TIME_FORMAT = "%H:%M:%S %d-%b-%Y"
841         started_s = time.strftime(TIME_FORMAT,
842                                   time.localtime(s.get_started()))
843         ctx.fillSlots("started", started_s)
844
845         si_s = base32.b2a_or_none(s.get_storage_index())
846         if si_s is None:
847             si_s = "(None)"
848         ctx.fillSlots("si", si_s)
849         ctx.fillSlots("helper", {True: "Yes",
850                                  False: "No"}[s.using_helper()])
851
852         size = s.get_size()
853         if size is None:
854             size = "(unknown)"
855         elif isinstance(size, (int, long, float)):
856             size = abbreviate_size(size)
857         ctx.fillSlots("total_size", size)
858
859         progress = data.get_progress()
860         if IUploadStatus.providedBy(data):
861             link = "up-%d" % data.get_counter()
862             ctx.fillSlots("type", "upload")
863             # TODO: make an ascii-art bar
864             (chk, ciphertext, encandpush) = progress
865             progress_s = ("hash: %.1f%%, ciphertext: %.1f%%, encode: %.1f%%" %
866                           ( (100.0 * chk),
867                             (100.0 * ciphertext),
868                             (100.0 * encandpush) ))
869             ctx.fillSlots("progress", progress_s)
870         elif IDownloadStatus.providedBy(data):
871             link = "down-%d" % data.get_counter()
872             ctx.fillSlots("type", "download")
873             ctx.fillSlots("progress", "%.1f%%" % (100.0 * progress))
874         elif IPublishStatus.providedBy(data):
875             link = "publish-%d" % data.get_counter()
876             ctx.fillSlots("type", "publish")
877             ctx.fillSlots("progress", "%.1f%%" % (100.0 * progress))
878         elif IRetrieveStatus.providedBy(data):
879             ctx.fillSlots("type", "retrieve")
880             link = "retrieve-%d" % data.get_counter()
881             ctx.fillSlots("progress", "%.1f%%" % (100.0 * progress))
882         else:
883             assert IServermapUpdaterStatus.providedBy(data)
884             ctx.fillSlots("type", "mapupdate %s" % data.get_mode())
885             link = "mapupdate-%d" % data.get_counter()
886             ctx.fillSlots("progress", "%.1f%%" % (100.0 * progress))
887         ctx.fillSlots("status", T.a(href=link)[s.get_status()])
888         return ctx.tag
889
890     def childFactory(self, ctx, name):
891         client = IClient(ctx)
892         stype,count_s = name.split("-")
893         count = int(count_s)
894         if stype == "up":
895             for s in itertools.chain(client.list_all_upload_statuses(),
896                                      client.list_all_helper_statuses()):
897                 # immutable-upload helpers use the same status object as a
898                 # regular immutable-upload
899                 if s.get_counter() == count:
900                     return UploadStatusPage(s)
901         if stype == "down":
902             for s in client.list_all_download_statuses():
903                 if s.get_counter() == count:
904                     return DownloadStatusPage(s)
905         if stype == "mapupdate":
906             for s in client.list_all_mapupdate_statuses():
907                 if s.get_counter() == count:
908                     return MapupdateStatusPage(s)
909         if stype == "publish":
910             for s in client.list_all_publish_statuses():
911                 if s.get_counter() == count:
912                     return PublishStatusPage(s)
913         if stype == "retrieve":
914             for s in client.list_all_retrieve_statuses():
915                 if s.get_counter() == count:
916                     return RetrieveStatusPage(s)
917
918
919 class HelperStatus(rend.Page):
920     docFactory = getxmlfile("helper.xhtml")
921
922     def renderHTTP(self, ctx):
923         t = get_arg(inevow.IRequest(ctx), "t")
924         if t == "json":
925             return self.render_JSON(ctx)
926         # is there a better way to provide 'data' to all rendering methods?
927         helper = IClient(ctx).getServiceNamed("helper")
928         self.original = helper.get_stats()
929         return rend.Page.renderHTTP(self, ctx)
930
931     def render_JSON(self, ctx):
932         inevow.IRequest(ctx).setHeader("content-type", "text/plain")
933         try:
934             h = IClient(ctx).getServiceNamed("helper")
935         except KeyError:
936             return simplejson.dumps({}) + "\n"
937
938         stats = h.get_stats()
939         return simplejson.dumps(stats, indent=1) + "\n"
940
941     def render_active_uploads(self, ctx, data):
942         return data["chk_upload_helper.active_uploads"]
943
944     def render_incoming(self, ctx, data):
945         return "%d bytes in %d files" % (data["chk_upload_helper.incoming_size"],
946                                          data["chk_upload_helper.incoming_count"])
947
948     def render_encoding(self, ctx, data):
949         return "%d bytes in %d files" % (data["chk_upload_helper.encoding_size"],
950                                          data["chk_upload_helper.encoding_count"])
951
952     def render_upload_requests(self, ctx, data):
953         return str(data["chk_upload_helper.upload_requests"])
954
955     def render_upload_already_present(self, ctx, data):
956         return str(data["chk_upload_helper.upload_already_present"])
957
958     def render_upload_need_upload(self, ctx, data):
959         return str(data["chk_upload_helper.upload_need_upload"])
960
961     def render_upload_bytes_fetched(self, ctx, data):
962         return str(data["chk_upload_helper.fetched_bytes"])
963
964     def render_upload_bytes_encoded(self, ctx, data):
965         return str(data["chk_upload_helper.encoded_bytes"])
966
967
968 class Statistics(rend.Page):
969     docFactory = getxmlfile("statistics.xhtml")
970
971     def renderHTTP(self, ctx):
972         provider = IClient(ctx).stats_provider
973         stats = {'stats': {}, 'counters': {}}
974         if provider:
975             stats = provider.get_stats()
976         t = get_arg(inevow.IRequest(ctx), "t")
977         if t == "json":
978             inevow.IRequest(ctx).setHeader("content-type", "text/plain")
979             return simplejson.dumps(stats, indent=1) + "\n"
980         # is there a better way to provide 'data' to all rendering methods?
981         self.original = stats
982         return rend.Page.renderHTTP(self, ctx)
983
984     def render_load_average(self, ctx, data):
985         return str(data["stats"].get("load_monitor.avg_load"))
986
987     def render_peak_load(self, ctx, data):
988         return str(data["stats"].get("load_monitor.max_load"))
989
990     def render_uploads(self, ctx, data):
991         files = data["counters"].get("uploader.files_uploaded", 0)
992         bytes = data["counters"].get("uploader.bytes_uploaded", 0)
993         return ("%s files / %s bytes (%s)" %
994                 (files, bytes, abbreviate_size(bytes)))
995
996     def render_downloads(self, ctx, data):
997         files = data["counters"].get("downloader.files_downloaded", 0)
998         bytes = data["counters"].get("downloader.bytes_downloaded", 0)
999         return ("%s files / %s bytes (%s)" %
1000                 (files, bytes, abbreviate_size(bytes)))
1001
1002     def render_publishes(self, ctx, data):
1003         files = data["counters"].get("mutable.files_published", 0)
1004         bytes = data["counters"].get("mutable.bytes_published", 0)
1005         return "%s files / %s bytes (%s)" % (files, bytes,
1006                                              abbreviate_size(bytes))
1007
1008     def render_retrieves(self, ctx, data):
1009         files = data["counters"].get("mutable.files_retrieved", 0)
1010         bytes = data["counters"].get("mutable.bytes_retrieved", 0)
1011         return "%s files / %s bytes (%s)" % (files, bytes,
1012                                              abbreviate_size(bytes))
1013
1014     def render_raw(self, ctx, data):
1015         raw = pprint.pformat(data)
1016         return ctx.tag[raw]