From a5ab6c060df282eae91e9f7bb095d54e76490f05 Mon Sep 17 00:00:00 2001
From: Brian Warner <warner@allmydata.com>
Date: Mon, 9 Feb 2009 14:45:43 -0700
Subject: [PATCH] helper #609: uploading client should ignore old helper's
 UploadResults, which were in a different format

---
 src/allmydata/immutable/offloaded.py | 14 ++++----------
 src/allmydata/immutable/upload.py    | 18 ++++++++++++++++++
 2 files changed, 22 insertions(+), 10 deletions(-)

diff --git a/src/allmydata/immutable/offloaded.py b/src/allmydata/immutable/offloaded.py
index 7ba3943e..775b71e6 100644
--- a/src/allmydata/immutable/offloaded.py
+++ b/src/allmydata/immutable/offloaded.py
@@ -10,7 +10,7 @@ from allmydata import interfaces, storage, uri
 from allmydata.immutable import upload
 from allmydata.immutable.layout import ReadBucketProxy
 from allmydata.util.assertutil import precondition
-from allmydata.util import idlib, log, observer, fileutil, hashutil
+from allmydata.util import idlib, log, observer, fileutil, hashutil, dictutil
 
 
 class NotEnoughWritersError(Exception):
@@ -33,7 +33,7 @@ class CHKCheckerAndUEBFetcher:
         self._peer_getter = peer_getter
         self._found_shares = set()
         self._storage_index = storage_index
-        self._sharemap = {}
+        self._sharemap = dictutil.DictOfSets()
         self._readers = set()
         self._ueb_hash = None
         self._ueb_data = None
@@ -69,9 +69,7 @@ class CHKCheckerAndUEBFetcher:
                  level=log.NOISY)
         self._found_shares.update(buckets.keys())
         for k in buckets:
-            if k not in self._sharemap:
-                self._sharemap[k] = []
-            self._sharemap[k].append(peerid)
+            self._sharemap.add(k, peerid)
         self._readers.update( [ (bucket, peerid)
                                 for bucket in buckets.values() ] )
 
@@ -632,11 +630,7 @@ class Helper(Referenceable, service.MultiService):
                 (sharemap, ueb_data, ueb_hash) = res
                 self.log("found file in grid", level=log.NOISY, parent=lp)
                 results.uri_extension_hash = ueb_hash
-                results.sharemap = {}
-                for shnum, peerids in sharemap.items():
-                    peers_s = ",".join(["[%s]" % idlib.shortnodeid_b2a(peerid)
-                                        for peerid in peerids])
-                    results.sharemap[shnum] = "Found on " + peers_s
+                results.sharemap = sharemap
                 results.uri_extension_data = ueb_data
                 results.preexisting_shares = len(sharemap)
                 results.pushed_shares = 0
diff --git a/src/allmydata/immutable/upload.py b/src/allmydata/immutable/upload.py
index 17155f44..28edda49 100644
--- a/src/allmydata/immutable/upload.py
+++ b/src/allmydata/immutable/upload.py
@@ -993,8 +993,26 @@ class AssistedUploader:
         self._upload_status.set_results(upload_results)
         return upload_results
 
+    def _convert_old_upload_results(self, upload_results):
+        # pre-1.3.0 helpers return upload results which contain a mapping
+        # from shnum to a single human-readable string, containing things
+        # like "Found on [x],[y],[z]" (for healthy files that were already in
+        # the grid), "Found on [x]" (for files that needed upload but which
+        # discovered pre-existing shares), and "Placed on [x]" (for newly
+        # uploaded shares). The 1.3.0 helper returns a mapping from shnum to
+        # set of binary serverid strings.
+
+        # the old results are too hard to deal with (they don't even contain
+        # as much information as the new results, since the nodeids are
+        # abbreviated), so if we detect old results, just clobber them.
+
+        sharemap = upload_results.sharemap
+        if str in [type(v) for v in sharemap.values()]:
+            upload_results.sharemap = None
+
     def _build_verifycap(self, upload_results):
         self.log("upload finished, building readcap")
+        self._convert_old_upload_results(upload_results)
         self._upload_status.set_status("Building Readcap")
         r = upload_results
         assert r.uri_extension_data["needed_shares"] == self._needed_shares
-- 
2.45.2