]> git.rkrishnan.org Git - tahoe-lafs/tahoe-lafs.git/commitdiff
remove get_serverid from DownloadStatus.add_dyhb_request and customers
authorZooko O'Whielacronx <zooko@zooko.com>
Mon, 1 Aug 2011 18:54:01 +0000 (11:54 -0700)
committerZooko O'Whielacronx <zooko@zooko.com>
Mon, 1 Aug 2011 18:54:01 +0000 (11:54 -0700)
This patch is a rebase of a patch originally written by Brian. I didn't change any of the intent of Brian's patch, just ported it to current trunk.
refs #1363

src/allmydata/immutable/downloader/finder.py
src/allmydata/immutable/downloader/status.py
src/allmydata/test/test_web.py
src/allmydata/web/status.py

index 2670a20be2a306e465db0ce838cc49cca4f5325c..8bcdca76ff3522d0e564710ce81daf385d5a4ac7 100644 (file)
@@ -135,8 +135,7 @@ class ShareFinder:
         lp = self.log(format="sending DYHB to [%(name)s]", name=server.get_name(),
                       level=log.NOISY, umid="Io7pyg")
         time_sent = now()
-        d_ev = self._download_status.add_dyhb_request(server.get_serverid(),
-                                                      time_sent)
+        d_ev = self._download_status.add_dyhb_request(server, time_sent)
         # TODO: get the timer from a Server object, it knows best
         self.overdue_timers[req] = reactor.callLater(self.OVERDUE_TIMEOUT,
                                                      self.overdue, req)
index 977bf2e18f6a19700b2731031dec242b2568985a..8b1b4f1beca100bc007867cae7a0bfb25fff8a18 100644 (file)
@@ -107,7 +107,7 @@ class DownloadStatus:
 
         # self.dyhb_requests tracks "do you have a share" requests and
         # responses. It is a list of dicts:
-        #  serverid (binary)
+        #  server (instance of IServer)
         #  start_time
         #  success (None until resolved, then boolean)
         #  response_shnums (tuple, None until successful)
@@ -158,8 +158,8 @@ class DownloadStatus:
         self.segment_events.append(r)
         return SegmentEvent(r, self)
 
-    def add_dyhb_request(self, serverid, when):
-        r = { "serverid": serverid,
+    def add_dyhb_request(self, server, when):
+        r = { "server": server,
               "start_time": when,
               "success": None,
               "response_shnums": None,
index 951f5541beb6c95d2852b0ec07a9ced6318fc6be..2479c70f0cde77c4c453cbda9016c9d926870907 100644 (file)
@@ -74,12 +74,19 @@ class FakeUploader(service.Service):
     def get_helper_info(self):
         return (None, False)
 
+class FakeIServer:
+    def __init__(self, binaryserverid):
+        self.binaryserverid = binaryserverid
+    def get_name(self): return "short"
+    def get_longname(self): return "long"
+    def get_serverid(self): return self.binaryserverid
+
 def build_one_ds():
     ds = DownloadStatus("storage_index", 1234)
     now = time.time()
 
-    serverid_a = hashutil.tagged_hash("foo", "serverid_a")[:20]
-    serverid_b = hashutil.tagged_hash("foo", "serverid_b")[:20]
+    serverA = FakeIServer(hashutil.tagged_hash("foo", "serverid_a")[:20])
+    serverB = FakeIServer(hashutil.tagged_hash("foo", "serverid_a")[:20])
     storage_index = hashutil.storage_index_hash("SI")
     e0 = ds.add_segment_request(0, now)
     e0.activate(now+0.5)
@@ -96,18 +103,18 @@ def build_one_ds():
     e.activate(now)
     e.deliver(now, 0, 140, 0.5)
 
-    e = ds.add_dyhb_request(serverid_a, now)
+    e = ds.add_dyhb_request(serverA, now)
     e.finished([1,2], now+1)
-    e = ds.add_dyhb_request(serverid_b, now+2) # left unfinished
+    e = ds.add_dyhb_request(serverB, now+2) # left unfinished
 
     e = ds.add_read_event(0, 120, now)
     e.update(60, 0.5, 0.1) # bytes, decrypttime, pausetime
     e.finished(now+1)
     e = ds.add_read_event(120, 30, now+2) # left unfinished
 
-    e = ds.add_block_request(serverid_a, 1, 100, 20, now)
+    e = ds.add_block_request(serverA, 1, 100, 20, now)
     e.finished(20, now+1)
-    e = ds.add_block_request(serverid_a, 1, 120, 30, now+1) # left unfinished
+    e = ds.add_block_request(serverB, 1, 120, 30, now+1) # left unfinished
 
     # make sure that add_read_event() can come first too
     ds1 = DownloadStatus(storage_index, 1234)
@@ -575,7 +582,8 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
             # serverids[] keys are strings, since that's what JSON does, but
             # we'd really like them to be ints
             self.failUnlessEqual(data["serverids"]["0"], "phwr")
-            self.failUnlessEqual(data["serverids"]["1"], "cmpu")
+            self.failUnless(data["serverids"].has_key("1"), data["serverids"])
+            self.failUnlessEqual(data["serverids"]["1"], "cmpu", data["serverids"])
             self.failUnlessEqual(data["server_info"][phwr_id]["short"], "phwr")
             self.failUnlessEqual(data["server_info"][cmpu_id]["short"], "cmpu")
             self.failUnless("dyhb" in data)
index 3633b721dba0a20f5cc824369df8d64312f33976..fa31f8ecaab5a32dc761885310f120cafe43782b 100644 (file)
@@ -358,19 +358,21 @@ class DownloadStatusPage(DownloadResultsRendererMixin, rend.Page):
 
     def _find_overlap(self, events, start_key, end_key):
         # given a list of event dicts, return a new list in which each event
-        # has an extra "row" key (an int, starting at 0). This is a hint to
-        # our JS frontend about how to overlap the parts of the graph it is
-        # drawing.
+        # has an extra "row" key (an int, starting at 0), and if appropriate
+        # a "serverid" key (ascii-encoded server id), replacing the "server"
+        # key. This is a hint to our JS frontend about how to overlap the
+        # parts of the graph it is drawing.
 
-        # we must always make a copy, since we're going to be adding "row"
-        # keys and don't want to change the original objects. If we're
+        # we must always make a copy, since we're going to be adding keys
+        # and don't want to change the original objects. If we're
         # stringifying serverids, we'll also be changing the serverid keys.
         new_events = []
         rows = []
         for ev in events:
             ev = ev.copy()
-            if "serverid" in ev:
-                ev["serverid"] = base32.b2a(ev["serverid"])
+            if ev.has_key('server'):
+                ev["serverid"] = base32.b2a(ev["server"].get_serverid())
+                del ev["server"]
             # find an empty slot in the rows
             free_slot = None
             for row,finished in enumerate(rows):
@@ -410,6 +412,7 @@ class DownloadStatusPage(DownloadResultsRendererMixin, rend.Page):
             # DownloadStatus promises to give us events in temporal order
             ev = ev.copy()
             ev["serverid"] = base32.b2a(ev["server"].get_serverid())
+            del ev["server"]
             if ev["serverid"] not in serverid_to_group:
                 groupnum = len(serverid_to_group)
                 serverid_to_group[ev["serverid"]] = groupnum
@@ -493,18 +496,17 @@ class DownloadStatusPage(DownloadResultsRendererMixin, rend.Page):
         t[T.tr[T.th["serverid"], T.th["sent"], T.th["received"],
                T.th["shnums"], T.th["RTT"]]]
         for d_ev in self.download_status.dyhb_requests:
-            serverid = d_ev["serverid"]
+            server = d_ev["server"]
             sent = d_ev["start_time"]
             shnums = d_ev["response_shnums"]
             received = d_ev["finish_time"]
-            serverid_s = idlib.shortnodeid_b2a(serverid)
             rtt = None
             if received is not None:
                 rtt = received - sent
             if not shnums:
                 shnums = ["-"]
-            t[T.tr(style="background: %s" % self.color(serverid))[
-                [T.td[serverid_s], T.td[srt(sent)], T.td[srt(received)],
+            t[T.tr(style="background: %s" % self.color(server.get_serverid()))[
+                [T.td[server.get_name()], T.td[srt(sent)], T.td[srt(received)],
                  T.td[",".join([str(shnum) for shnum in shnums])],
                  T.td[self.render_time(None, rtt)],
                  ]]]