3 from twisted.trial import unittest
4 from twisted.application import service
6 from foolscap import Tub, eventual
7 from foolscap.logging import log
9 from allmydata import offloaded, storage
10 from allmydata.immutable import upload
11 from allmydata.util import hashutil, fileutil, mathutil
12 from pycryptopp.cipher.aes import AES
16 DATA = "I need help\n" * 1000
18 class CHKUploadHelper_fake(offloaded.CHKUploadHelper):
19 def start_encrypted(self, eu):
22 d2 = eu.get_all_encoding_parameters()
23 def _got_parms(parms):
24 needed_shares, happy, total_shares, segsize = parms
25 ueb_data = {"needed_shares": needed_shares,
26 "total_shares": total_shares,
27 "segment_size": segsize,
30 self._results.uri_extension_data = ueb_data
31 return (hashutil.uri_extension_hash(""),
32 needed_shares, total_shares, size)
33 d2.addCallback(_got_parms)
35 d.addCallback(_got_size)
38 class CHKUploadHelper_already_uploaded(offloaded.CHKUploadHelper):
40 res = upload.UploadResults()
41 res.uri_extension_hash = hashutil.uri_extension_hash("")
43 # we're pretending that the file they're trying to upload was already
44 # present in the grid. We return some information about the file, so
45 # the client can decide if they like the way it looks. The parameters
46 # used here are chosen to match the defaults.
47 PARAMS = FakeClient.DEFAULT_ENCODING_PARAMETERS
48 ueb_data = {"needed_shares": PARAMS["k"],
49 "total_shares": PARAMS["n"],
50 "segment_size": min(PARAMS["max_segment_size"], len(DATA)),
53 res.uri_extension_data = ueb_data
56 class FakeClient(service.MultiService):
57 DEFAULT_ENCODING_PARAMETERS = {"k":25,
60 "max_segment_size": 1*MiB,
63 def log(self, *args, **kwargs):
64 return log.msg(*args, **kwargs)
65 def get_encoding_parameters(self):
66 return self.DEFAULT_ENCODING_PARAMETERS
67 def get_permuted_peers(self, service_name, storage_index):
70 def flush_but_dont_ignore(res):
71 d = eventual.flushEventualQueue()
77 def wait_a_few_turns(ignored=None):
78 d = eventual.fireEventually()
79 d.addCallback(eventual.fireEventually)
80 d.addCallback(eventual.fireEventually)
81 d.addCallback(eventual.fireEventually)
82 d.addCallback(eventual.fireEventually)
83 d.addCallback(eventual.fireEventually)
86 def upload_data(uploader, data, convergence):
87 u = upload.Data(data, convergence=convergence)
88 return uploader.upload(u)
90 class AssistedUpload(unittest.TestCase):
96 t.setServiceParent(self.s)
98 # we never actually use this for network traffic, so it can use a
100 t.setLocation("bogus:1234")
102 def setUpHelper(self, basedir):
103 fileutil.make_dirs(basedir)
104 self.helper = h = offloaded.Helper(basedir)
105 h.chk_upload_helper_class = CHKUploadHelper_fake
106 h.setServiceParent(self.s)
107 self.helper_furl = self.tub.registerReference(h)
110 d = self.s.stopService()
111 d.addCallback(eventual.fireEventually)
112 d.addBoth(flush_but_dont_ignore)
117 self.basedir = "helper/AssistedUpload/test_one"
118 self.setUpHelper(self.basedir)
119 u = upload.Uploader(self.helper_furl)
120 u.setServiceParent(self.s)
122 d = wait_a_few_turns()
127 return upload_data(u, DATA, convergence="some convergence string")
128 d.addCallback(_ready)
129 def _uploaded(results):
132 d.addCallback(_uploaded)
134 def _check_empty(res):
135 files = os.listdir(os.path.join(self.basedir, "CHK_encoding"))
136 self.failUnlessEqual(files, [])
137 files = os.listdir(os.path.join(self.basedir, "CHK_incoming"))
138 self.failUnlessEqual(files, [])
139 d.addCallback(_check_empty)
143 def test_previous_upload_failed(self):
144 self.basedir = "helper/AssistedUpload/test_previous_upload_failed"
145 self.setUpHelper(self.basedir)
147 # we want to make sure that an upload which fails (leaving the
148 # ciphertext in the CHK_encoding/ directory) does not prevent a later
149 # attempt to upload that file from working. We simulate this by
150 # populating the directory manually. The hardest part is guessing the
153 k = FakeClient.DEFAULT_ENCODING_PARAMETERS["k"]
154 n = FakeClient.DEFAULT_ENCODING_PARAMETERS["n"]
155 max_segsize = FakeClient.DEFAULT_ENCODING_PARAMETERS["max_segment_size"]
156 segsize = min(max_segsize, len(DATA))
157 # this must be a multiple of 'required_shares'==k
158 segsize = mathutil.next_multiple(segsize, k)
160 key = hashutil.convergence_hash(k, n, segsize, DATA, "test convergence string")
161 assert len(key) == 16
163 SI = hashutil.storage_index_hash(key)
164 SI_s = storage.si_b2a(SI)
165 encfile = os.path.join(self.basedir, "CHK_encoding", SI_s)
166 f = open(encfile, "wb")
167 f.write(encryptor.process(DATA))
170 u = upload.Uploader(self.helper_furl)
171 u.setServiceParent(self.s)
173 d = wait_a_few_turns()
177 return upload_data(u, DATA, convergence="test convergence string")
178 d.addCallback(_ready)
179 def _uploaded(results):
182 d.addCallback(_uploaded)
184 def _check_empty(res):
185 files = os.listdir(os.path.join(self.basedir, "CHK_encoding"))
186 self.failUnlessEqual(files, [])
187 files = os.listdir(os.path.join(self.basedir, "CHK_incoming"))
188 self.failUnlessEqual(files, [])
189 d.addCallback(_check_empty)
193 def test_already_uploaded(self):
194 self.basedir = "helper/AssistedUpload/test_already_uploaded"
195 self.setUpHelper(self.basedir)
196 self.helper.chk_upload_helper_class = CHKUploadHelper_already_uploaded
197 u = upload.Uploader(self.helper_furl)
198 u.setServiceParent(self.s)
200 d = wait_a_few_turns()
205 return upload_data(u, DATA, convergence="some convergence string")
206 d.addCallback(_ready)
207 def _uploaded(results):
210 d.addCallback(_uploaded)
212 def _check_empty(res):
213 files = os.listdir(os.path.join(self.basedir, "CHK_encoding"))
214 self.failUnlessEqual(files, [])
215 files = os.listdir(os.path.join(self.basedir, "CHK_incoming"))
216 self.failUnlessEqual(files, [])
217 d.addCallback(_check_empty)