]> git.rkrishnan.org Git - tahoe-lafs/tahoe-lafs.git/commitdiff
deletion phase2a: improve creation of renew/cancel secrets. Still fake though.
authorBrian Warner <warner@lothar.com>
Tue, 28 Aug 2007 02:00:18 +0000 (19:00 -0700)
committerBrian Warner <warner@lothar.com>
Tue, 28 Aug 2007 02:00:18 +0000 (19:00 -0700)
src/allmydata/client.py
src/allmydata/test/test_upload.py
src/allmydata/upload.py
src/allmydata/util/hashutil.py

index f279df108fd111d2a38b81fb9c301a248df85de6..896c5e6194f284b8ebe1f44ff4479d84fa2f68dc 100644 (file)
@@ -168,3 +168,8 @@ class Client(node.Node, Referenceable):
         if self.introducer_client:
             return self.introducer_client.connected_to_introducer()
         return False
+
+    def get_renewal_secret(self):
+        return ""
+    def get_cancel_secret(self):
+        return ""
index b4df9f5af06f1617f3d6d2dda94349a958f89ac5..5c20d080c8a2405c00a6f2fc329ca33169e567f9 100644 (file)
@@ -137,6 +137,11 @@ class FakeClient:
     def get_encoding_parameters(self):
         return None
 
+    def get_renewal_secret(self):
+        return ""
+    def get_cancel_secret(self):
+        return ""
+
 DATA = """
 Once upon a time, there was a beautiful princess named Buttercup. She lived
 in a magical land where every file was stored securely among millions of
index 1179cbb8f7eab5ec921166b1aeb8101df2a1c636..425520f0373c51f5c8d1102227725f498c56a4c8 100644 (file)
@@ -6,7 +6,10 @@ from twisted.internet import defer
 from twisted.application import service
 from foolscap import Referenceable
 
-from allmydata.util import hashutil
+from allmydata.util.hashutil import file_renewal_secret_hash, \
+     file_cancel_secret_hash, bucket_renewal_secret_hash, \
+     bucket_cancel_secret_hash, plaintext_hasher, \
+     storage_index_chk_hash, plaintext_segment_hasher, key_hasher
 from allmydata import encode, storage, hashtree, uri
 from allmydata.interfaces import IUploadable, IUploader, IEncryptedUploadable
 from allmydata.Crypto.Cipher import AES
@@ -33,7 +36,8 @@ EXTENSION_SIZE = 1000
 class PeerTracker:
     def __init__(self, peerid, permutedid, connection,
                  sharesize, blocksize, num_segments, num_share_hashes,
-                 storage_index):
+                 storage_index,
+                 bucket_renewal_secret, bucket_cancel_secret):
         self.peerid = peerid
         self.permutedid = permutedid
         self.connection = connection # to an RIClient
@@ -52,12 +56,8 @@ class PeerTracker:
         self.storage_index = storage_index
         self._storageserver = None
 
-        h = hashutil.bucket_renewal_secret_hash
-        # XXX
-        self.my_secret = "secret"
-        self.renew_secret = h(self.my_secret, self.storage_index, self.peerid)
-        h = hashutil.bucket_cancel_secret_hash
-        self.cancel_secret = h(self.my_secret, self.storage_index, self.peerid)
+        self.renew_secret = bucket_renewal_secret
+        self.cancel_secret = bucket_cancel_secret
 
     def query(self, sharenums):
         if not self._storageserver:
@@ -120,10 +120,23 @@ class Tahoe3PeerSelector:
         ht = hashtree.IncompleteHashTree(total_shares)
         num_share_hashes = len(ht.needed_hashes(0, include_leaf=True))
 
+        client_renewal_secret = client.get_renewal_secret()
+        client_cancel_secret = client.get_cancel_secret()
+
+        file_renewal_secret = file_renewal_secret_hash(client_renewal_secret,
+                                                       storage_index)
+        file_cancel_secret = file_cancel_secret_hash(client_cancel_secret,
+                                                     storage_index)
+
         trackers = [ PeerTracker(peerid, permutedid, conn,
                                  share_size, block_size,
                                  num_segments, num_share_hashes,
-                                 storage_index)
+                                 storage_index,
+                                 bucket_renewal_secret_hash(file_renewal_secret,
+                                                            peerid),
+                                 bucket_cancel_secret_hash(file_cancel_secret,
+                                                           peerid),
+                                 )
                      for permutedid, peerid, conn in peers ]
         self.usable_peers = set(trackers) # this set shrinks over time
         self.used_peers = set() # while this set grows
@@ -258,7 +271,7 @@ class EncryptAnUploadable:
     def __init__(self, original):
         self.original = original
         self._encryptor = None
-        self._plaintext_hasher = hashutil.plaintext_hasher()
+        self._plaintext_hasher = plaintext_hasher()
         self._plaintext_segment_hasher = None
         self._plaintext_segment_hashes = []
         self._params = None
@@ -281,7 +294,7 @@ class EncryptAnUploadable:
             e = AES.new(key=key, mode=AES.MODE_CTR, counterstart="\x00"*16)
             self._encryptor = e
 
-            storage_index = hashutil.storage_index_chk_hash(key)
+            storage_index = storage_index_chk_hash(key)
             assert isinstance(storage_index, str)
             # There's no point to having the SI be longer than the key, so we
             # specify that it is truncated to the same 128 bits as the AES key.
@@ -305,7 +318,7 @@ class EncryptAnUploadable:
         if p:
             left = self._segment_size - self._plaintext_segment_hashed_bytes
             return p, left
-        p = hashutil.plaintext_segment_hasher()
+        p = plaintext_segment_hasher()
         self._plaintext_segment_hasher = p
         self._plaintext_segment_hashed_bytes = 0
         return p, self._segment_size
@@ -491,7 +504,7 @@ class ConvergentUploadMixin:
     def get_encryption_key(self):
         if self._key is None:
             f = self._filehandle
-            enckey_hasher = hashutil.key_hasher()
+            enckey_hasher = key_hasher()
             #enckey_hasher.update(encoding_parameters) # TODO
             f.seek(0)
             BLOCKSIZE = 64*1024
index 47ff0bf2b49a4eaf9a13958a617cec59e0226c15..54caf0a4b5021c955ee58c877cd495ac0fcc4cbf 100644 (file)
@@ -66,33 +66,26 @@ KEYLEN = 16
 def random_key():
     return os.urandom(KEYLEN)
 
-def file_renewal_secret_hash(my_secret, storage_index):
-    my_renewal_secret = tagged_hash(my_secret, "bucket_renewal_secret")
-    file_renewal_secret = tagged_pair_hash("file_renewal_secret",
-                                           my_renewal_secret, storage_index)
-    return file_renewal_secret
-
-def file_cancel_secret_hash(my_secret, storage_index):
-    my_cancel_secret = tagged_hash(my_secret, "bucket_cancel_secret")
-    file_cancel_secret = tagged_pair_hash("file_cancel_secret",
-                                          my_cancel_secret, storage_index)
-    return file_cancel_secret
-
-def bucket_renewal_secret_hash(my_secret, storage_index, peerid):
-    my_renewal_secret = tagged_hash(my_secret, "bucket_renewal_secret")
-    file_renewal_secret = tagged_pair_hash("file_renewal_secret",
-                                           my_renewal_secret, storage_index)
-    bucket_renewal_secret = tagged_pair_hash("bucket_renewal_secret",
-                                             file_renewal_secret, peerid)
-    return bucket_renewal_secret
-
-def bucket_cancel_secret_hash(my_secret, storage_index, peerid):
-    my_cancel_secret = tagged_hash(my_secret, "bucket_cancel_secret")
-    file_cancel_secret = tagged_pair_hash("file_cancel_secret",
-                                          my_cancel_secret, storage_index)
-    bucket_cancel_secret = tagged_pair_hash("bucket_cancel_secret",
-                                            file_cancel_secret, peerid)
-    return bucket_cancel_secret
+def my_renewal_secret_hash(my_secret):
+    return tagged_hash(my_secret, "bucket_renewal_secret")
+def my_cancel_secret_hash(my_secret):
+    return tagged_hash(my_secret, "bucket_cancel_secret")
+
+def file_renewal_secret_hash(client_renewal_secret, storage_index):
+    return tagged_pair_hash("file_renewal_secret",
+                            client_renewal_secret, storage_index)
+
+def file_cancel_secret_hash(client_cancel_secret, storage_index):
+    return tagged_pair_hash("file_cancel_secret",
+                            client_cancel_secret, storage_index)
+
+def bucket_renewal_secret_hash(file_renewal_secret, peerid):
+    return tagged_pair_hash("bucket_renewal_secret",
+                            file_renewal_secret, peerid)
+
+def bucket_cancel_secret_hash(file_cancel_secret, peerid):
+    return tagged_pair_hash("bucket_cancel_secret",
+                            file_cancel_secret, peerid)
 
 def dir_write_enabler_hash(write_key):
     return tagged_hash("allmydata_dir_write_enabler_v1", write_key)