From: Brian Warner Date: Tue, 28 Aug 2007 02:00:18 +0000 (-0700) Subject: deletion phase2a: improve creation of renew/cancel secrets. Still fake though. X-Git-Url: https://git.rkrishnan.org/pb.xhtml?a=commitdiff_plain;h=56afda11d16e9a66ac7476db36698933cc6cf50f;p=tahoe-lafs%2Ftahoe-lafs.git deletion phase2a: improve creation of renew/cancel secrets. Still fake though. --- diff --git a/src/allmydata/client.py b/src/allmydata/client.py index f279df10..896c5e61 100644 --- a/src/allmydata/client.py +++ b/src/allmydata/client.py @@ -168,3 +168,8 @@ class Client(node.Node, Referenceable): if self.introducer_client: return self.introducer_client.connected_to_introducer() return False + + def get_renewal_secret(self): + return "" + def get_cancel_secret(self): + return "" diff --git a/src/allmydata/test/test_upload.py b/src/allmydata/test/test_upload.py index b4df9f5a..5c20d080 100644 --- a/src/allmydata/test/test_upload.py +++ b/src/allmydata/test/test_upload.py @@ -137,6 +137,11 @@ class FakeClient: def get_encoding_parameters(self): return None + def get_renewal_secret(self): + return "" + def get_cancel_secret(self): + return "" + DATA = """ Once upon a time, there was a beautiful princess named Buttercup. She lived in a magical land where every file was stored securely among millions of diff --git a/src/allmydata/upload.py b/src/allmydata/upload.py index 1179cbb8..425520f0 100644 --- a/src/allmydata/upload.py +++ b/src/allmydata/upload.py @@ -6,7 +6,10 @@ from twisted.internet import defer from twisted.application import service from foolscap import Referenceable -from allmydata.util import hashutil +from allmydata.util.hashutil import file_renewal_secret_hash, \ + file_cancel_secret_hash, bucket_renewal_secret_hash, \ + bucket_cancel_secret_hash, plaintext_hasher, \ + storage_index_chk_hash, plaintext_segment_hasher, key_hasher from allmydata import encode, storage, hashtree, uri from allmydata.interfaces import IUploadable, IUploader, IEncryptedUploadable from allmydata.Crypto.Cipher import AES @@ -33,7 +36,8 @@ EXTENSION_SIZE = 1000 class PeerTracker: def __init__(self, peerid, permutedid, connection, sharesize, blocksize, num_segments, num_share_hashes, - storage_index): + storage_index, + bucket_renewal_secret, bucket_cancel_secret): self.peerid = peerid self.permutedid = permutedid self.connection = connection # to an RIClient @@ -52,12 +56,8 @@ class PeerTracker: self.storage_index = storage_index self._storageserver = None - h = hashutil.bucket_renewal_secret_hash - # XXX - self.my_secret = "secret" - self.renew_secret = h(self.my_secret, self.storage_index, self.peerid) - h = hashutil.bucket_cancel_secret_hash - self.cancel_secret = h(self.my_secret, self.storage_index, self.peerid) + self.renew_secret = bucket_renewal_secret + self.cancel_secret = bucket_cancel_secret def query(self, sharenums): if not self._storageserver: @@ -120,10 +120,23 @@ class Tahoe3PeerSelector: ht = hashtree.IncompleteHashTree(total_shares) num_share_hashes = len(ht.needed_hashes(0, include_leaf=True)) + client_renewal_secret = client.get_renewal_secret() + client_cancel_secret = client.get_cancel_secret() + + file_renewal_secret = file_renewal_secret_hash(client_renewal_secret, + storage_index) + file_cancel_secret = file_cancel_secret_hash(client_cancel_secret, + storage_index) + trackers = [ PeerTracker(peerid, permutedid, conn, share_size, block_size, num_segments, num_share_hashes, - storage_index) + storage_index, + bucket_renewal_secret_hash(file_renewal_secret, + peerid), + bucket_cancel_secret_hash(file_cancel_secret, + peerid), + ) for permutedid, peerid, conn in peers ] self.usable_peers = set(trackers) # this set shrinks over time self.used_peers = set() # while this set grows @@ -258,7 +271,7 @@ class EncryptAnUploadable: def __init__(self, original): self.original = original self._encryptor = None - self._plaintext_hasher = hashutil.plaintext_hasher() + self._plaintext_hasher = plaintext_hasher() self._plaintext_segment_hasher = None self._plaintext_segment_hashes = [] self._params = None @@ -281,7 +294,7 @@ class EncryptAnUploadable: e = AES.new(key=key, mode=AES.MODE_CTR, counterstart="\x00"*16) self._encryptor = e - storage_index = hashutil.storage_index_chk_hash(key) + storage_index = storage_index_chk_hash(key) assert isinstance(storage_index, str) # There's no point to having the SI be longer than the key, so we # specify that it is truncated to the same 128 bits as the AES key. @@ -305,7 +318,7 @@ class EncryptAnUploadable: if p: left = self._segment_size - self._plaintext_segment_hashed_bytes return p, left - p = hashutil.plaintext_segment_hasher() + p = plaintext_segment_hasher() self._plaintext_segment_hasher = p self._plaintext_segment_hashed_bytes = 0 return p, self._segment_size @@ -491,7 +504,7 @@ class ConvergentUploadMixin: def get_encryption_key(self): if self._key is None: f = self._filehandle - enckey_hasher = hashutil.key_hasher() + enckey_hasher = key_hasher() #enckey_hasher.update(encoding_parameters) # TODO f.seek(0) BLOCKSIZE = 64*1024 diff --git a/src/allmydata/util/hashutil.py b/src/allmydata/util/hashutil.py index 47ff0bf2..54caf0a4 100644 --- a/src/allmydata/util/hashutil.py +++ b/src/allmydata/util/hashutil.py @@ -66,33 +66,26 @@ KEYLEN = 16 def random_key(): return os.urandom(KEYLEN) -def file_renewal_secret_hash(my_secret, storage_index): - my_renewal_secret = tagged_hash(my_secret, "bucket_renewal_secret") - file_renewal_secret = tagged_pair_hash("file_renewal_secret", - my_renewal_secret, storage_index) - return file_renewal_secret - -def file_cancel_secret_hash(my_secret, storage_index): - my_cancel_secret = tagged_hash(my_secret, "bucket_cancel_secret") - file_cancel_secret = tagged_pair_hash("file_cancel_secret", - my_cancel_secret, storage_index) - return file_cancel_secret - -def bucket_renewal_secret_hash(my_secret, storage_index, peerid): - my_renewal_secret = tagged_hash(my_secret, "bucket_renewal_secret") - file_renewal_secret = tagged_pair_hash("file_renewal_secret", - my_renewal_secret, storage_index) - bucket_renewal_secret = tagged_pair_hash("bucket_renewal_secret", - file_renewal_secret, peerid) - return bucket_renewal_secret - -def bucket_cancel_secret_hash(my_secret, storage_index, peerid): - my_cancel_secret = tagged_hash(my_secret, "bucket_cancel_secret") - file_cancel_secret = tagged_pair_hash("file_cancel_secret", - my_cancel_secret, storage_index) - bucket_cancel_secret = tagged_pair_hash("bucket_cancel_secret", - file_cancel_secret, peerid) - return bucket_cancel_secret +def my_renewal_secret_hash(my_secret): + return tagged_hash(my_secret, "bucket_renewal_secret") +def my_cancel_secret_hash(my_secret): + return tagged_hash(my_secret, "bucket_cancel_secret") + +def file_renewal_secret_hash(client_renewal_secret, storage_index): + return tagged_pair_hash("file_renewal_secret", + client_renewal_secret, storage_index) + +def file_cancel_secret_hash(client_cancel_secret, storage_index): + return tagged_pair_hash("file_cancel_secret", + client_cancel_secret, storage_index) + +def bucket_renewal_secret_hash(file_renewal_secret, peerid): + return tagged_pair_hash("bucket_renewal_secret", + file_renewal_secret, peerid) + +def bucket_cancel_secret_hash(file_cancel_secret, peerid): + return tagged_pair_hash("bucket_cancel_secret", + file_cancel_secret, peerid) def dir_write_enabler_hash(write_key): return tagged_hash("allmydata_dir_write_enabler_v1", write_key)