]> git.rkrishnan.org Git - tahoe-lafs/tahoe-lafs.git/blob - src/allmydata/web/status.py
webapi: pass client through constructor arguments, remove IClient, should make it...
[tahoe-lafs/tahoe-lafs.git] / src / allmydata / web / status.py
1
2 import time, pprint, itertools
3 import simplejson
4 from twisted.internet import defer
5 from nevow import rend, inevow, tags as T
6 from allmydata.util import base32, idlib
7 from allmydata.web.common import getxmlfile, get_arg, \
8      abbreviate_time, abbreviate_rate, abbreviate_size
9 from allmydata.interfaces import IUploadStatus, IDownloadStatus, \
10      IPublishStatus, IRetrieveStatus, IServermapUpdaterStatus
11
12 def plural(sequence_or_length):
13     if isinstance(sequence_or_length, int):
14         length = sequence_or_length
15     else:
16         length = len(sequence_or_length)
17     if length == 1:
18         return ""
19     return "s"
20
21 class RateAndTimeMixin:
22
23     def render_time(self, ctx, data):
24         return abbreviate_time(data)
25
26     def render_rate(self, ctx, data):
27         return abbreviate_rate(data)
28
29 class UploadResultsRendererMixin(RateAndTimeMixin):
30     # this requires a method named 'upload_results'
31
32     def render_pushed_shares(self, ctx, data):
33         d = self.upload_results()
34         d.addCallback(lambda res: res.pushed_shares)
35         return d
36
37     def render_preexisting_shares(self, ctx, data):
38         d = self.upload_results()
39         d.addCallback(lambda res: res.preexisting_shares)
40         return d
41
42     def render_sharemap(self, ctx, data):
43         d = self.upload_results()
44         d.addCallback(lambda res: res.sharemap)
45         def _render(sharemap):
46             if sharemap is None:
47                 return "None"
48             l = T.ul()
49             for shnum, peerids in sorted(sharemap.items()):
50                 peerids = ', '.join([idlib.shortnodeid_b2a(i) for i in peerids])
51                 l[T.li["%d -> placed on [%s]" % (shnum, peerids)]]
52             return l
53         d.addCallback(_render)
54         return d
55
56     def render_servermap(self, ctx, data):
57         d = self.upload_results()
58         d.addCallback(lambda res: res.servermap)
59         def _render(servermap):
60             if servermap is None:
61                 return "None"
62             l = T.ul()
63             for peerid in sorted(servermap.keys()):
64                 peerid_s = idlib.shortnodeid_b2a(peerid)
65                 shares_s = ",".join(["#%d" % shnum
66                                      for shnum in servermap[peerid]])
67                 l[T.li["[%s] got share%s: %s" % (peerid_s,
68                                                  plural(servermap[peerid]),
69                                                  shares_s)]]
70             return l
71         d.addCallback(_render)
72         return d
73
74     def data_file_size(self, ctx, data):
75         d = self.upload_results()
76         d.addCallback(lambda res: res.file_size)
77         return d
78
79     def _get_time(self, name):
80         d = self.upload_results()
81         d.addCallback(lambda res: res.timings.get(name))
82         return d
83
84     def data_time_total(self, ctx, data):
85         return self._get_time("total")
86
87     def data_time_storage_index(self, ctx, data):
88         return self._get_time("storage_index")
89
90     def data_time_contacting_helper(self, ctx, data):
91         return self._get_time("contacting_helper")
92
93     def data_time_existence_check(self, ctx, data):
94         return self._get_time("existence_check")
95
96     def data_time_cumulative_fetch(self, ctx, data):
97         return self._get_time("cumulative_fetch")
98
99     def data_time_helper_total(self, ctx, data):
100         return self._get_time("helper_total")
101
102     def data_time_peer_selection(self, ctx, data):
103         return self._get_time("peer_selection")
104
105     def data_time_total_encode_and_push(self, ctx, data):
106         return self._get_time("total_encode_and_push")
107
108     def data_time_cumulative_encoding(self, ctx, data):
109         return self._get_time("cumulative_encoding")
110
111     def data_time_cumulative_sending(self, ctx, data):
112         return self._get_time("cumulative_sending")
113
114     def data_time_hashes_and_close(self, ctx, data):
115         return self._get_time("hashes_and_close")
116
117     def _get_rate(self, name):
118         d = self.upload_results()
119         def _convert(r):
120             file_size = r.file_size
121             time = r.timings.get(name)
122             if time is None:
123                 return None
124             try:
125                 return 1.0 * file_size / time
126             except ZeroDivisionError:
127                 return None
128         d.addCallback(_convert)
129         return d
130
131     def data_rate_total(self, ctx, data):
132         return self._get_rate("total")
133
134     def data_rate_storage_index(self, ctx, data):
135         return self._get_rate("storage_index")
136
137     def data_rate_encode(self, ctx, data):
138         return self._get_rate("cumulative_encoding")
139
140     def data_rate_push(self, ctx, data):
141         return self._get_rate("cumulative_sending")
142
143     def data_rate_encode_and_push(self, ctx, data):
144         d = self.upload_results()
145         def _convert(r):
146             file_size = r.file_size
147             time1 = r.timings.get("cumulative_encoding")
148             time2 = r.timings.get("cumulative_sending")
149             if (file_size is None or time1 is None or time2 is None):
150                 return None
151             try:
152                 return 1.0 * file_size / (time1+time2)
153             except ZeroDivisionError:
154                 return None
155         d.addCallback(_convert)
156         return d
157
158     def data_rate_ciphertext_fetch(self, ctx, data):
159         d = self.upload_results()
160         def _convert(r):
161             fetch_size = r.ciphertext_fetched
162             time = r.timings.get("cumulative_fetch")
163             if (fetch_size is None or time is None):
164                 return None
165             try:
166                 return 1.0 * fetch_size / time
167             except ZeroDivisionError:
168                 return None
169         d.addCallback(_convert)
170         return d
171
172 class UploadStatusPage(UploadResultsRendererMixin, rend.Page):
173     docFactory = getxmlfile("upload-status.xhtml")
174
175     def __init__(self, data):
176         rend.Page.__init__(self, data)
177         self.upload_status = data
178
179     def upload_results(self):
180         return defer.maybeDeferred(self.upload_status.get_results)
181
182     def render_results(self, ctx, data):
183         d = self.upload_results()
184         def _got_results(results):
185             if results:
186                 return ctx.tag
187             return ""
188         d.addCallback(_got_results)
189         return d
190
191     def render_started(self, ctx, data):
192         TIME_FORMAT = "%H:%M:%S %d-%b-%Y"
193         started_s = time.strftime(TIME_FORMAT,
194                                   time.localtime(data.get_started()))
195         return started_s
196
197     def render_si(self, ctx, data):
198         si_s = base32.b2a_or_none(data.get_storage_index())
199         if si_s is None:
200             si_s = "(None)"
201         return si_s
202
203     def render_helper(self, ctx, data):
204         return {True: "Yes",
205                 False: "No"}[data.using_helper()]
206
207     def render_total_size(self, ctx, data):
208         size = data.get_size()
209         if size is None:
210             return "(unknown)"
211         return size
212
213     def render_progress_hash(self, ctx, data):
214         progress = data.get_progress()[0]
215         # TODO: make an ascii-art bar
216         return "%.1f%%" % (100.0 * progress)
217
218     def render_progress_ciphertext(self, ctx, data):
219         progress = data.get_progress()[1]
220         # TODO: make an ascii-art bar
221         return "%.1f%%" % (100.0 * progress)
222
223     def render_progress_encode_push(self, ctx, data):
224         progress = data.get_progress()[2]
225         # TODO: make an ascii-art bar
226         return "%.1f%%" % (100.0 * progress)
227
228     def render_status(self, ctx, data):
229         return data.get_status()
230
231 class DownloadResultsRendererMixin(RateAndTimeMixin):
232     # this requires a method named 'download_results'
233
234     def render_servermap(self, ctx, data):
235         d = self.download_results()
236         d.addCallback(lambda res: res.servermap)
237         def _render(servermap):
238             if servermap is None:
239                 return "None"
240             l = T.ul()
241             for peerid in sorted(servermap.keys()):
242                 peerid_s = idlib.shortnodeid_b2a(peerid)
243                 shares_s = ",".join(["#%d" % shnum
244                                      for shnum in servermap[peerid]])
245                 l[T.li["[%s] has share%s: %s" % (peerid_s,
246                                                  plural(servermap[peerid]),
247                                                  shares_s)]]
248             return l
249         d.addCallback(_render)
250         return d
251
252     def render_servers_used(self, ctx, data):
253         d = self.download_results()
254         d.addCallback(lambda res: res.servers_used)
255         def _got(servers_used):
256             if not servers_used:
257                 return ""
258             peerids_s = ", ".join(["[%s]" % idlib.shortnodeid_b2a(peerid)
259                                    for peerid in servers_used])
260             return T.li["Servers Used: ", peerids_s]
261         d.addCallback(_got)
262         return d
263
264     def render_problems(self, ctx, data):
265         d = self.download_results()
266         d.addCallback(lambda res: res.server_problems)
267         def _got(server_problems):
268             if not server_problems:
269                 return ""
270             l = T.ul()
271             for peerid in sorted(server_problems.keys()):
272                 peerid_s = idlib.shortnodeid_b2a(peerid)
273                 l[T.li["[%s]: %s" % (peerid_s, server_problems[peerid])]]
274             return T.li["Server Problems:", l]
275         d.addCallback(_got)
276         return d
277
278     def data_file_size(self, ctx, data):
279         d = self.download_results()
280         d.addCallback(lambda res: res.file_size)
281         return d
282
283     def _get_time(self, name):
284         d = self.download_results()
285         d.addCallback(lambda res: res.timings.get(name))
286         return d
287
288     def data_time_total(self, ctx, data):
289         return self._get_time("total")
290
291     def data_time_peer_selection(self, ctx, data):
292         return self._get_time("peer_selection")
293
294     def data_time_uri_extension(self, ctx, data):
295         return self._get_time("uri_extension")
296
297     def data_time_hashtrees(self, ctx, data):
298         return self._get_time("hashtrees")
299
300     def data_time_segments(self, ctx, data):
301         return self._get_time("segments")
302
303     def data_time_cumulative_fetch(self, ctx, data):
304         return self._get_time("cumulative_fetch")
305
306     def data_time_cumulative_decode(self, ctx, data):
307         return self._get_time("cumulative_decode")
308
309     def data_time_cumulative_decrypt(self, ctx, data):
310         return self._get_time("cumulative_decrypt")
311
312     def data_time_paused(self, ctx, data):
313         return self._get_time("paused")
314
315     def _get_rate(self, name):
316         d = self.download_results()
317         def _convert(r):
318             file_size = r.file_size
319             time = r.timings.get(name)
320             if time is None:
321                 return None
322             try:
323                 return 1.0 * file_size / time
324             except ZeroDivisionError:
325                 return None
326         d.addCallback(_convert)
327         return d
328
329     def data_rate_total(self, ctx, data):
330         return self._get_rate("total")
331
332     def data_rate_segments(self, ctx, data):
333         return self._get_rate("segments")
334
335     def data_rate_fetch(self, ctx, data):
336         return self._get_rate("cumulative_fetch")
337
338     def data_rate_decode(self, ctx, data):
339         return self._get_rate("cumulative_decode")
340
341     def data_rate_decrypt(self, ctx, data):
342         return self._get_rate("cumulative_decrypt")
343
344     def render_server_timings(self, ctx, data):
345         d = self.download_results()
346         d.addCallback(lambda res: res.timings.get("fetch_per_server"))
347         def _render(per_server):
348             if per_server is None:
349                 return ""
350             l = T.ul()
351             for peerid in sorted(per_server.keys()):
352                 peerid_s = idlib.shortnodeid_b2a(peerid)
353                 times_s = ", ".join([self.render_time(None, t)
354                                      for t in per_server[peerid]])
355                 l[T.li["[%s]: %s" % (peerid_s, times_s)]]
356             return T.li["Per-Server Segment Fetch Response Times: ", l]
357         d.addCallback(_render)
358         return d
359
360 class DownloadStatusPage(DownloadResultsRendererMixin, rend.Page):
361     docFactory = getxmlfile("download-status.xhtml")
362
363     def __init__(self, data):
364         rend.Page.__init__(self, data)
365         self.download_status = data
366
367     def download_results(self):
368         return defer.maybeDeferred(self.download_status.get_results)
369
370     def render_results(self, ctx, data):
371         d = self.download_results()
372         def _got_results(results):
373             if results:
374                 return ctx.tag
375             return ""
376         d.addCallback(_got_results)
377         return d
378
379     def render_started(self, ctx, data):
380         TIME_FORMAT = "%H:%M:%S %d-%b-%Y"
381         started_s = time.strftime(TIME_FORMAT,
382                                   time.localtime(data.get_started()))
383         return started_s
384
385     def render_si(self, ctx, data):
386         si_s = base32.b2a_or_none(data.get_storage_index())
387         if si_s is None:
388             si_s = "(None)"
389         return si_s
390
391     def render_helper(self, ctx, data):
392         return {True: "Yes",
393                 False: "No"}[data.using_helper()]
394
395     def render_total_size(self, ctx, data):
396         size = data.get_size()
397         if size is None:
398             return "(unknown)"
399         return size
400
401     def render_progress(self, ctx, data):
402         progress = data.get_progress()
403         # TODO: make an ascii-art bar
404         return "%.1f%%" % (100.0 * progress)
405
406     def render_status(self, ctx, data):
407         return data.get_status()
408
409 class RetrieveStatusPage(rend.Page, RateAndTimeMixin):
410     docFactory = getxmlfile("retrieve-status.xhtml")
411
412     def __init__(self, data):
413         rend.Page.__init__(self, data)
414         self.retrieve_status = data
415
416     def render_started(self, ctx, data):
417         TIME_FORMAT = "%H:%M:%S %d-%b-%Y"
418         started_s = time.strftime(TIME_FORMAT,
419                                   time.localtime(data.get_started()))
420         return started_s
421
422     def render_si(self, ctx, data):
423         si_s = base32.b2a_or_none(data.get_storage_index())
424         if si_s is None:
425             si_s = "(None)"
426         return si_s
427
428     def render_helper(self, ctx, data):
429         return {True: "Yes",
430                 False: "No"}[data.using_helper()]
431
432     def render_current_size(self, ctx, data):
433         size = data.get_size()
434         if size is None:
435             size = "(unknown)"
436         return size
437
438     def render_progress(self, ctx, data):
439         progress = data.get_progress()
440         # TODO: make an ascii-art bar
441         return "%.1f%%" % (100.0 * progress)
442
443     def render_status(self, ctx, data):
444         return data.get_status()
445
446     def render_encoding(self, ctx, data):
447         k, n = data.get_encoding()
448         return ctx.tag["Encoding: %s of %s" % (k, n)]
449
450     def render_problems(self, ctx, data):
451         problems = data.problems
452         if not problems:
453             return ""
454         l = T.ul()
455         for peerid in sorted(problems.keys()):
456             peerid_s = idlib.shortnodeid_b2a(peerid)
457             l[T.li["[%s]: %s" % (peerid_s, problems[peerid])]]
458         return ctx.tag["Server Problems:", l]
459
460     def _get_rate(self, data, name):
461         file_size = self.retrieve_status.get_size()
462         time = self.retrieve_status.timings.get(name)
463         if time is None or file_size is None:
464             return None
465         try:
466             return 1.0 * file_size / time
467         except ZeroDivisionError:
468             return None
469
470     def data_time_total(self, ctx, data):
471         return self.retrieve_status.timings.get("total")
472     def data_rate_total(self, ctx, data):
473         return self._get_rate(data, "total")
474
475     def data_time_fetch(self, ctx, data):
476         return self.retrieve_status.timings.get("fetch")
477     def data_rate_fetch(self, ctx, data):
478         return self._get_rate(data, "fetch")
479
480     def data_time_decode(self, ctx, data):
481         return self.retrieve_status.timings.get("decode")
482     def data_rate_decode(self, ctx, data):
483         return self._get_rate(data, "decode")
484
485     def data_time_decrypt(self, ctx, data):
486         return self.retrieve_status.timings.get("decrypt")
487     def data_rate_decrypt(self, ctx, data):
488         return self._get_rate(data, "decrypt")
489
490     def render_server_timings(self, ctx, data):
491         per_server = self.retrieve_status.timings.get("fetch_per_server")
492         if not per_server:
493             return ""
494         l = T.ul()
495         for peerid in sorted(per_server.keys()):
496             peerid_s = idlib.shortnodeid_b2a(peerid)
497             times_s = ", ".join([self.render_time(None, t)
498                                  for t in per_server[peerid]])
499             l[T.li["[%s]: %s" % (peerid_s, times_s)]]
500         return T.li["Per-Server Fetch Response Times: ", l]
501
502
503 class PublishStatusPage(rend.Page, RateAndTimeMixin):
504     docFactory = getxmlfile("publish-status.xhtml")
505
506     def __init__(self, data):
507         rend.Page.__init__(self, data)
508         self.publish_status = data
509
510     def render_started(self, ctx, data):
511         TIME_FORMAT = "%H:%M:%S %d-%b-%Y"
512         started_s = time.strftime(TIME_FORMAT,
513                                   time.localtime(data.get_started()))
514         return started_s
515
516     def render_si(self, ctx, data):
517         si_s = base32.b2a_or_none(data.get_storage_index())
518         if si_s is None:
519             si_s = "(None)"
520         return si_s
521
522     def render_helper(self, ctx, data):
523         return {True: "Yes",
524                 False: "No"}[data.using_helper()]
525
526     def render_current_size(self, ctx, data):
527         size = data.get_size()
528         if size is None:
529             size = "(unknown)"
530         return size
531
532     def render_progress(self, ctx, data):
533         progress = data.get_progress()
534         # TODO: make an ascii-art bar
535         return "%.1f%%" % (100.0 * progress)
536
537     def render_status(self, ctx, data):
538         return data.get_status()
539
540     def render_encoding(self, ctx, data):
541         k, n = data.get_encoding()
542         return ctx.tag["Encoding: %s of %s" % (k, n)]
543
544     def render_sharemap(self, ctx, data):
545         servermap = data.get_servermap()
546         if servermap is None:
547             return ctx.tag["None"]
548         l = T.ul()
549         sharemap = servermap.make_sharemap()
550         for shnum in sorted(sharemap.keys()):
551             l[T.li["%d -> Placed on " % shnum,
552                    ", ".join(["[%s]" % idlib.shortnodeid_b2a(peerid)
553                               for peerid in sharemap[shnum]])]]
554         return ctx.tag["Sharemap:", l]
555
556     def render_problems(self, ctx, data):
557         problems = data.problems
558         if not problems:
559             return ""
560         l = T.ul()
561         for peerid in sorted(problems.keys()):
562             peerid_s = idlib.shortnodeid_b2a(peerid)
563             l[T.li["[%s]: %s" % (peerid_s, problems[peerid])]]
564         return ctx.tag["Server Problems:", l]
565
566     def _get_rate(self, data, name):
567         file_size = self.publish_status.get_size()
568         time = self.publish_status.timings.get(name)
569         if time is None:
570             return None
571         try:
572             return 1.0 * file_size / time
573         except ZeroDivisionError:
574             return None
575
576     def data_time_total(self, ctx, data):
577         return self.publish_status.timings.get("total")
578     def data_rate_total(self, ctx, data):
579         return self._get_rate(data, "total")
580
581     def data_time_setup(self, ctx, data):
582         return self.publish_status.timings.get("setup")
583
584     def data_time_encrypt(self, ctx, data):
585         return self.publish_status.timings.get("encrypt")
586     def data_rate_encrypt(self, ctx, data):
587         return self._get_rate(data, "encrypt")
588
589     def data_time_encode(self, ctx, data):
590         return self.publish_status.timings.get("encode")
591     def data_rate_encode(self, ctx, data):
592         return self._get_rate(data, "encode")
593
594     def data_time_pack(self, ctx, data):
595         return self.publish_status.timings.get("pack")
596     def data_rate_pack(self, ctx, data):
597         return self._get_rate(data, "pack")
598     def data_time_sign(self, ctx, data):
599         return self.publish_status.timings.get("sign")
600
601     def data_time_push(self, ctx, data):
602         return self.publish_status.timings.get("push")
603     def data_rate_push(self, ctx, data):
604         return self._get_rate(data, "push")
605
606     def render_server_timings(self, ctx, data):
607         per_server = self.publish_status.timings.get("send_per_server")
608         if not per_server:
609             return ""
610         l = T.ul()
611         for peerid in sorted(per_server.keys()):
612             peerid_s = idlib.shortnodeid_b2a(peerid)
613             times_s = ", ".join([self.render_time(None, t)
614                                  for t in per_server[peerid]])
615             l[T.li["[%s]: %s" % (peerid_s, times_s)]]
616         return T.li["Per-Server Response Times: ", l]
617
618 class MapupdateStatusPage(rend.Page, RateAndTimeMixin):
619     docFactory = getxmlfile("map-update-status.xhtml")
620
621     def __init__(self, data):
622         rend.Page.__init__(self, data)
623         self.update_status = data
624
625     def render_started(self, ctx, data):
626         TIME_FORMAT = "%H:%M:%S %d-%b-%Y"
627         started_s = time.strftime(TIME_FORMAT,
628                                   time.localtime(data.get_started()))
629         return started_s
630
631     def render_finished(self, ctx, data):
632         when = data.get_finished()
633         if not when:
634             return "not yet"
635         TIME_FORMAT = "%H:%M:%S %d-%b-%Y"
636         started_s = time.strftime(TIME_FORMAT,
637                                   time.localtime(data.get_finished()))
638         return started_s
639
640     def render_si(self, ctx, data):
641         si_s = base32.b2a_or_none(data.get_storage_index())
642         if si_s is None:
643             si_s = "(None)"
644         return si_s
645
646     def render_helper(self, ctx, data):
647         return {True: "Yes",
648                 False: "No"}[data.using_helper()]
649
650     def render_progress(self, ctx, data):
651         progress = data.get_progress()
652         # TODO: make an ascii-art bar
653         return "%.1f%%" % (100.0 * progress)
654
655     def render_status(self, ctx, data):
656         return data.get_status()
657
658     def render_problems(self, ctx, data):
659         problems = data.problems
660         if not problems:
661             return ""
662         l = T.ul()
663         for peerid in sorted(problems.keys()):
664             peerid_s = idlib.shortnodeid_b2a(peerid)
665             l[T.li["[%s]: %s" % (peerid_s, problems[peerid])]]
666         return ctx.tag["Server Problems:", l]
667
668     def render_privkey_from(self, ctx, data):
669         peerid = data.get_privkey_from()
670         if peerid:
671             return ctx.tag["Got privkey from: [%s]"
672                            % idlib.shortnodeid_b2a(peerid)]
673         else:
674             return ""
675
676     def data_time_total(self, ctx, data):
677         return self.update_status.timings.get("total")
678
679     def data_time_initial_queries(self, ctx, data):
680         return self.update_status.timings.get("initial_queries")
681
682     def data_time_cumulative_verify(self, ctx, data):
683         return self.update_status.timings.get("cumulative_verify")
684
685     def render_server_timings(self, ctx, data):
686         per_server = self.update_status.timings.get("per_server")
687         if not per_server:
688             return ""
689         l = T.ul()
690         for peerid in sorted(per_server.keys()):
691             peerid_s = idlib.shortnodeid_b2a(peerid)
692             times = []
693             for op,started,t in per_server[peerid]:
694                 #times.append("%s/%.4fs/%s/%s" % (op,
695                 #                              started,
696                 #                              self.render_time(None, started - self.update_status.get_started()),
697                 #                              self.render_time(None,t)))
698                 if op == "query":
699                     times.append( self.render_time(None, t) )
700                 elif op == "late":
701                     times.append( "late(" + self.render_time(None, t) + ")" )
702                 else:
703                     times.append( "privkey(" + self.render_time(None, t) + ")" )
704             times_s = ", ".join(times)
705             l[T.li["[%s]: %s" % (peerid_s, times_s)]]
706         return T.li["Per-Server Response Times: ", l]
707
708     def render_timing_chart(self, ctx, data):
709         imageurl = self._timing_chart()
710         return ctx.tag[imageurl]
711
712     def _timing_chart(self):
713         started = self.update_status.get_started()
714         total = self.update_status.timings.get("total")
715         per_server = self.update_status.timings.get("per_server")
716         base = "http://chart.apis.google.com/chart?"
717         pieces = ["cht=bhs", "chs=400x300"]
718         pieces.append("chco=ffffff,4d89f9,c6d9fd") # colors
719         data0 = []
720         data1 = []
721         data2 = []
722         peerids_s = []
723         top_abs = started
724         # we sort the queries by the time at which we sent the first request
725         sorttable = [ (times[0][1], peerid)
726                       for peerid, times in per_server.items() ]
727         sorttable.sort()
728         peerids = [t[1] for t in sorttable]
729
730         for peerid in peerids:
731             times = per_server[peerid]
732             peerid_s = idlib.shortnodeid_b2a(peerid)
733             peerids_s.append(peerid_s)
734             # for servermap updates, there are either one or two queries per
735             # peer. The second (if present) is to get the privkey.
736             op,q_started,q_elapsed = times[0]
737             data0.append("%.3f" % (q_started-started))
738             data1.append("%.3f" % q_elapsed)
739             top_abs = max(top_abs, q_started+q_elapsed)
740             if len(times) > 1:
741                 op,p_started,p_elapsed = times[0]
742                 data2.append("%.3f" % p_elapsed)
743                 top_abs = max(top_abs, p_started+p_elapsed)
744             else:
745                 data2.append("0.0")
746         finished = self.update_status.get_finished()
747         if finished:
748             top_abs = max(top_abs, finished)
749         top_rel = top_abs - started
750         chd = "chd=t:" + "|".join([",".join(data0),
751                                    ",".join(data1),
752                                    ",".join(data2)])
753         pieces.append(chd)
754         chds = "chds=0,%0.3f" % top_rel
755         pieces.append(chds)
756         pieces.append("chxt=x,y")
757         pieces.append("chxr=0,0.0,%0.3f" % top_rel)
758         pieces.append("chxl=1:|" + "|".join(reversed(peerids_s)))
759         # use up to 10 grid lines, at decimal multiples.
760         # mathutil.next_power_of_k doesn't handle numbers smaller than one,
761         # unfortunately.
762         #pieces.append("chg="
763
764         if total is not None:
765             finished_f = 1.0 * total / top_rel
766             pieces.append("chm=r,FF0000,0,%0.3f,%0.3f" % (finished_f,
767                                                           finished_f+0.01))
768         url = base + "&".join(pieces)
769         return T.img(src=url, align="right", float="right")
770
771
772 class Status(rend.Page):
773     docFactory = getxmlfile("status.xhtml")
774     addSlash = True
775
776     def __init__(self, client):
777         rend.Page.__init__(self, client)
778         self.client = client
779
780     def renderHTTP(self, ctx):
781         req = inevow.IRequest(ctx)
782         t = get_arg(req, "t")
783         if t == "json":
784             return self.json(req)
785         return rend.Page.renderHTTP(self, ctx)
786
787     def json(self, req):
788         req.setHeader("content-type", "text/plain")
789         data = {}
790         data["active"] = active = []
791         for s in self._get_active_operations():
792             si_s = base32.b2a_or_none(s.get_storage_index())
793             size = s.get_size()
794             status = s.get_status()
795             if IUploadStatus.providedBy(s):
796                 h,c,e = s.get_progress()
797                 active.append({"type": "upload",
798                                "storage-index-string": si_s,
799                                "total-size": size,
800                                "status": status,
801                                "progress-hash": h,
802                                "progress-ciphertext": c,
803                                "progress-encode-push": e,
804                                })
805             elif IDownloadStatus.providedBy(s):
806                 active.append({"type": "download",
807                                "storage-index-string": si_s,
808                                "total-size": size,
809                                "status": status,
810                                "progress": s.get_progress(),
811                                })
812
813         return simplejson.dumps(data, indent=1) + "\n"
814
815     def _get_all_statuses(self):
816         c = self.client
817         return itertools.chain(c.list_all_upload_statuses(),
818                                c.list_all_download_statuses(),
819                                c.list_all_mapupdate_statuses(),
820                                c.list_all_publish_statuses(),
821                                c.list_all_retrieve_statuses(),
822                                c.list_all_helper_statuses(),
823                                )
824
825     def data_active_operations(self, ctx, data):
826         return self._get_active_operations()
827
828     def _get_active_operations(self):
829         active = [s
830                   for s in self._get_all_statuses()
831                   if s.get_active()]
832         return active
833
834     def data_recent_operations(self, ctx, data):
835         return self._get_recent_operations()
836
837     def _get_recent_operations(self):
838         recent = [s
839                   for s in self._get_all_statuses()
840                   if not s.get_active()]
841         recent.sort(lambda a,b: cmp(a.get_started(), b.get_started()))
842         recent.reverse()
843         return recent
844
845     def render_row(self, ctx, data):
846         s = data
847
848         TIME_FORMAT = "%H:%M:%S %d-%b-%Y"
849         started_s = time.strftime(TIME_FORMAT,
850                                   time.localtime(s.get_started()))
851         ctx.fillSlots("started", started_s)
852
853         si_s = base32.b2a_or_none(s.get_storage_index())
854         if si_s is None:
855             si_s = "(None)"
856         ctx.fillSlots("si", si_s)
857         ctx.fillSlots("helper", {True: "Yes",
858                                  False: "No"}[s.using_helper()])
859
860         size = s.get_size()
861         if size is None:
862             size = "(unknown)"
863         elif isinstance(size, (int, long, float)):
864             size = abbreviate_size(size)
865         ctx.fillSlots("total_size", size)
866
867         progress = data.get_progress()
868         if IUploadStatus.providedBy(data):
869             link = "up-%d" % data.get_counter()
870             ctx.fillSlots("type", "upload")
871             # TODO: make an ascii-art bar
872             (chk, ciphertext, encandpush) = progress
873             progress_s = ("hash: %.1f%%, ciphertext: %.1f%%, encode: %.1f%%" %
874                           ( (100.0 * chk),
875                             (100.0 * ciphertext),
876                             (100.0 * encandpush) ))
877             ctx.fillSlots("progress", progress_s)
878         elif IDownloadStatus.providedBy(data):
879             link = "down-%d" % data.get_counter()
880             ctx.fillSlots("type", "download")
881             ctx.fillSlots("progress", "%.1f%%" % (100.0 * progress))
882         elif IPublishStatus.providedBy(data):
883             link = "publish-%d" % data.get_counter()
884             ctx.fillSlots("type", "publish")
885             ctx.fillSlots("progress", "%.1f%%" % (100.0 * progress))
886         elif IRetrieveStatus.providedBy(data):
887             ctx.fillSlots("type", "retrieve")
888             link = "retrieve-%d" % data.get_counter()
889             ctx.fillSlots("progress", "%.1f%%" % (100.0 * progress))
890         else:
891             assert IServermapUpdaterStatus.providedBy(data)
892             ctx.fillSlots("type", "mapupdate %s" % data.get_mode())
893             link = "mapupdate-%d" % data.get_counter()
894             ctx.fillSlots("progress", "%.1f%%" % (100.0 * progress))
895         ctx.fillSlots("status", T.a(href=link)[s.get_status()])
896         return ctx.tag
897
898     def childFactory(self, ctx, name):
899         client = self.client
900         stype,count_s = name.split("-")
901         count = int(count_s)
902         if stype == "up":
903             for s in itertools.chain(client.list_all_upload_statuses(),
904                                      client.list_all_helper_statuses()):
905                 # immutable-upload helpers use the same status object as a
906                 # regular immutable-upload
907                 if s.get_counter() == count:
908                     return UploadStatusPage(s)
909         if stype == "down":
910             for s in client.list_all_download_statuses():
911                 if s.get_counter() == count:
912                     return DownloadStatusPage(s)
913         if stype == "mapupdate":
914             for s in client.list_all_mapupdate_statuses():
915                 if s.get_counter() == count:
916                     return MapupdateStatusPage(s)
917         if stype == "publish":
918             for s in client.list_all_publish_statuses():
919                 if s.get_counter() == count:
920                     return PublishStatusPage(s)
921         if stype == "retrieve":
922             for s in client.list_all_retrieve_statuses():
923                 if s.get_counter() == count:
924                     return RetrieveStatusPage(s)
925
926
927 class HelperStatus(rend.Page):
928     docFactory = getxmlfile("helper.xhtml")
929
930     def __init__(self, helper):
931         rend.Page.__init__(self, helper)
932         self.helper = helper
933
934     def renderHTTP(self, ctx):
935         req = inevow.IRequest(ctx)
936         t = get_arg(req, "t")
937         if t == "json":
938             return self.render_JSON(req)
939         return rend.Page.renderHTTP(self, ctx)
940
941     def data_helper_stats(self, ctx, data):
942         return self.helper.get_stats()
943
944     def render_JSON(self, req):
945         req.setHeader("content-type", "text/plain")
946         if self.helper:
947             stats = self.helper.get_stats()
948             return simplejson.dumps(stats, indent=1) + "\n"
949         return simplejson.dumps({}) + "\n"
950
951     def render_active_uploads(self, ctx, data):
952         return data["chk_upload_helper.active_uploads"]
953
954     def render_incoming(self, ctx, data):
955         return "%d bytes in %d files" % (data["chk_upload_helper.incoming_size"],
956                                          data["chk_upload_helper.incoming_count"])
957
958     def render_encoding(self, ctx, data):
959         return "%d bytes in %d files" % (data["chk_upload_helper.encoding_size"],
960                                          data["chk_upload_helper.encoding_count"])
961
962     def render_upload_requests(self, ctx, data):
963         return str(data["chk_upload_helper.upload_requests"])
964
965     def render_upload_already_present(self, ctx, data):
966         return str(data["chk_upload_helper.upload_already_present"])
967
968     def render_upload_need_upload(self, ctx, data):
969         return str(data["chk_upload_helper.upload_need_upload"])
970
971     def render_upload_bytes_fetched(self, ctx, data):
972         return str(data["chk_upload_helper.fetched_bytes"])
973
974     def render_upload_bytes_encoded(self, ctx, data):
975         return str(data["chk_upload_helper.encoded_bytes"])
976
977
978 class Statistics(rend.Page):
979     docFactory = getxmlfile("statistics.xhtml")
980
981     def __init__(self, provider):
982         rend.Page.__init__(self, provider)
983         self.provider = provider
984
985     def renderHTTP(self, ctx):
986         req = inevow.IRequest(ctx)
987         t = get_arg(req, "t")
988         if t == "json":
989             stats = self.provider.get_stats()
990             req.setHeader("content-type", "text/plain")
991             return simplejson.dumps(stats, indent=1) + "\n"
992         return rend.Page.renderHTTP(self, ctx)
993
994     def data_get_stats(self, ctx, data):
995         return self.provider.get_stats()
996
997     def render_load_average(self, ctx, data):
998         return str(data["stats"].get("load_monitor.avg_load"))
999
1000     def render_peak_load(self, ctx, data):
1001         return str(data["stats"].get("load_monitor.max_load"))
1002
1003     def render_uploads(self, ctx, data):
1004         files = data["counters"].get("uploader.files_uploaded", 0)
1005         bytes = data["counters"].get("uploader.bytes_uploaded", 0)
1006         return ("%s files / %s bytes (%s)" %
1007                 (files, bytes, abbreviate_size(bytes)))
1008
1009     def render_downloads(self, ctx, data):
1010         files = data["counters"].get("downloader.files_downloaded", 0)
1011         bytes = data["counters"].get("downloader.bytes_downloaded", 0)
1012         return ("%s files / %s bytes (%s)" %
1013                 (files, bytes, abbreviate_size(bytes)))
1014
1015     def render_publishes(self, ctx, data):
1016         files = data["counters"].get("mutable.files_published", 0)
1017         bytes = data["counters"].get("mutable.bytes_published", 0)
1018         return "%s files / %s bytes (%s)" % (files, bytes,
1019                                              abbreviate_size(bytes))
1020
1021     def render_retrieves(self, ctx, data):
1022         files = data["counters"].get("mutable.files_retrieved", 0)
1023         bytes = data["counters"].get("mutable.bytes_retrieved", 0)
1024         return "%s files / %s bytes (%s)" % (files, bytes,
1025                                              abbreviate_size(bytes))
1026
1027     def render_raw(self, ctx, data):
1028         raw = pprint.pformat(data)
1029         return ctx.tag[raw]