]> git.rkrishnan.org Git - tahoe-lafs/tahoe-lafs.git/commitdiff
mutable: start adding Repair tests, fix a simple bug
authorBrian Warner <warner@lothar.com>
Wed, 6 Aug 2008 06:12:39 +0000 (23:12 -0700)
committerBrian Warner <warner@lothar.com>
Wed, 6 Aug 2008 06:12:39 +0000 (23:12 -0700)
src/allmydata/mutable/publish.py
src/allmydata/test/test_mutable.py

index af02e6310597ce9633ff0286f54952b4cd761427..15df09193aaacc12165ec1412b7d1a6a37644f89 100644 (file)
@@ -11,7 +11,7 @@ from allmydata import hashtree, codec, storage
 from pycryptopp.cipher.aes import AES
 from foolscap.eventual import eventually
 
-from common import MODE_WRITE, DictOfSets, \
+from common import MODE_WRITE, MODE_CHECK, DictOfSets, \
      UncoordinatedWriteError, NotEnoughServersError
 from servermap import ServerMap
 from layout import pack_prefix, pack_share, unpack_header, pack_checkstring, \
@@ -153,7 +153,7 @@ class Publish:
         # servermap was updated in MODE_WRITE, so we can depend upon the
         # peerlist computed by that process instead of computing our own.
         if self._servermap:
-            assert self._servermap.last_update_mode == MODE_WRITE
+            assert self._servermap.last_update_mode in (MODE_WRITE, MODE_CHECK)
             # we will push a version that is one larger than anything present
             # in the grid, according to the servermap.
             self._new_seqnum = self._servermap.highest_seqnum() + 1
index 3fb2a61be366763d1d8fa0dcdd907016a13cff76..d4bc5a334b02e527b8374012ef3595c40d3a2d50 100644 (file)
@@ -12,7 +12,7 @@ from allmydata.util.idlib import shortnodeid_b2a
 from allmydata.util.hashutil import tagged_hash
 from allmydata.util.fileutil import make_dirs
 from allmydata.interfaces import IURI, IMutableFileURI, IUploadable, \
-     FileTooLargeError
+     FileTooLargeError, IRepairResults
 from foolscap.eventual import eventually, fireEventually
 from foolscap.logging import log
 import sha
@@ -1244,6 +1244,66 @@ class Checker(unittest.TestCase, CheckerMixin):
                       "test_verify_one_bad_encprivkey_uncheckable")
         return d
 
+class Repair(unittest.TestCase, CheckerMixin):
+    def setUp(self):
+        # publish a file and create shares, which can then be manipulated
+        # later.
+        self.CONTENTS = "New contents go here" * 1000
+        num_peers = 20
+        self._client = FakeClient(num_peers)
+        self._storage = self._client._storage
+        d = self._client.create_mutable_file(self.CONTENTS)
+        def _created(node):
+            self._fn = node
+        d.addCallback(_created)
+        return d
+
+    def get_shares(self, s):
+        all_shares = {} # maps (peerid, shnum) to share data
+        for peerid in s._peers:
+            shares = s._peers[peerid]
+            for shnum in shares:
+                data = shares[shnum]
+                all_shares[ (peerid, shnum) ] = data
+        return all_shares
+
+    def test_repair_nop(self):
+        initial_shares = self.get_shares(self._storage)
+
+        d = self._fn.check()
+        d.addCallback(self._fn.repair)
+        def _check_results(rres):
+            self.failUnless(IRepairResults.providedBy(rres))
+            # TODO: examine results
+
+            new_shares = self.get_shares(self._storage)
+            # all shares should be in the same place as before
+            self.failUnlessEqual(set(initial_shares.keys()),
+                                 set(new_shares.keys()))
+            # but they should all be at a newer seqnum. The IV will be
+            # different, so the roothash will be too.
+            for key in initial_shares:
+                (version0,
+                 seqnum0,
+                 root_hash0,
+                 IV0,
+                 k0, N0, segsize0, datalen0,
+                 o0) = unpack_header(initial_shares[key])
+                (version1,
+                 seqnum1,
+                 root_hash1,
+                 IV1,
+                 k1, N1, segsize1, datalen1,
+                 o1) = unpack_header(new_shares[key])
+                self.failUnlessEqual(version0, version1)
+                self.failUnlessEqual(seqnum0+1, seqnum1)
+                self.failUnlessEqual(k0, k1)
+                self.failUnlessEqual(N0, N1)
+                self.failUnlessEqual(segsize0, segsize1)
+                self.failUnlessEqual(datalen0, datalen1)
+        d.addCallback(_check_results)
+        return d
+
 
 class MultipleEncodings(unittest.TestCase):
     def setUp(self):