2 from twisted.trial import unittest
3 from twisted.application import service
5 from foolscap import Tub, eventual
6 from foolscap.logging import log
8 from allmydata import offloaded, storage
9 from allmydata.immutable import upload
10 from allmydata.util import hashutil, fileutil, mathutil
11 from pycryptopp.cipher.aes import AES
15 DATA = "I need help\n" * 1000
17 class CHKUploadHelper_fake(offloaded.CHKUploadHelper):
18 def start_encrypted(self, eu):
21 d2 = eu.get_all_encoding_parameters()
22 def _got_parms(parms):
23 needed_shares, happy, total_shares, segsize = parms
24 ueb_data = {"needed_shares": needed_shares,
25 "total_shares": total_shares,
26 "segment_size": segsize,
29 self._results.uri_extension_data = ueb_data
30 return (hashutil.uri_extension_hash(""),
31 needed_shares, total_shares, size)
32 d2.addCallback(_got_parms)
34 d.addCallback(_got_size)
37 class CHKUploadHelper_already_uploaded(offloaded.CHKUploadHelper):
39 res = upload.UploadResults()
40 res.uri_extension_hash = hashutil.uri_extension_hash("")
42 # we're pretending that the file they're trying to upload was already
43 # present in the grid. We return some information about the file, so
44 # the client can decide if they like the way it looks. The parameters
45 # used here are chosen to match the defaults.
46 PARAMS = FakeClient.DEFAULT_ENCODING_PARAMETERS
47 ueb_data = {"needed_shares": PARAMS["k"],
48 "total_shares": PARAMS["n"],
49 "segment_size": min(PARAMS["max_segment_size"], len(DATA)),
52 res.uri_extension_data = ueb_data
55 class FakeClient(service.MultiService):
56 DEFAULT_ENCODING_PARAMETERS = {"k":25,
59 "max_segment_size": 1*MiB,
62 def log(self, *args, **kwargs):
63 return log.msg(*args, **kwargs)
64 def get_encoding_parameters(self):
65 return self.DEFAULT_ENCODING_PARAMETERS
66 def get_permuted_peers(self, service_name, storage_index):
69 def flush_but_dont_ignore(res):
70 d = eventual.flushEventualQueue()
76 def wait_a_few_turns(ignored=None):
77 d = eventual.fireEventually()
78 d.addCallback(eventual.fireEventually)
79 d.addCallback(eventual.fireEventually)
80 d.addCallback(eventual.fireEventually)
81 d.addCallback(eventual.fireEventually)
82 d.addCallback(eventual.fireEventually)
85 def upload_data(uploader, data, convergence):
86 u = upload.Data(data, convergence=convergence)
87 return uploader.upload(u)
89 class AssistedUpload(unittest.TestCase):
95 t.setServiceParent(self.s)
97 # we never actually use this for network traffic, so it can use a
99 t.setLocation("bogus:1234")
101 def setUpHelper(self, basedir):
102 fileutil.make_dirs(basedir)
103 self.helper = h = offloaded.Helper(basedir)
104 h.chk_upload_helper_class = CHKUploadHelper_fake
105 h.setServiceParent(self.s)
106 self.helper_furl = self.tub.registerReference(h)
109 d = self.s.stopService()
110 d.addCallback(eventual.fireEventually)
111 d.addBoth(flush_but_dont_ignore)
116 self.basedir = "helper/AssistedUpload/test_one"
117 self.setUpHelper(self.basedir)
118 u = upload.Uploader(self.helper_furl)
119 u.setServiceParent(self.s)
121 d = wait_a_few_turns()
126 return upload_data(u, DATA, convergence="some convergence string")
127 d.addCallback(_ready)
128 def _uploaded(results):
131 d.addCallback(_uploaded)
133 def _check_empty(res):
134 files = os.listdir(os.path.join(self.basedir, "CHK_encoding"))
135 self.failUnlessEqual(files, [])
136 files = os.listdir(os.path.join(self.basedir, "CHK_incoming"))
137 self.failUnlessEqual(files, [])
138 d.addCallback(_check_empty)
142 def test_previous_upload_failed(self):
143 self.basedir = "helper/AssistedUpload/test_previous_upload_failed"
144 self.setUpHelper(self.basedir)
146 # we want to make sure that an upload which fails (leaving the
147 # ciphertext in the CHK_encoding/ directory) does not prevent a later
148 # attempt to upload that file from working. We simulate this by
149 # populating the directory manually. The hardest part is guessing the
152 k = FakeClient.DEFAULT_ENCODING_PARAMETERS["k"]
153 n = FakeClient.DEFAULT_ENCODING_PARAMETERS["n"]
154 max_segsize = FakeClient.DEFAULT_ENCODING_PARAMETERS["max_segment_size"]
155 segsize = min(max_segsize, len(DATA))
156 # this must be a multiple of 'required_shares'==k
157 segsize = mathutil.next_multiple(segsize, k)
159 key = hashutil.convergence_hash(k, n, segsize, DATA, "test convergence string")
160 assert len(key) == 16
162 SI = hashutil.storage_index_hash(key)
163 SI_s = storage.si_b2a(SI)
164 encfile = os.path.join(self.basedir, "CHK_encoding", SI_s)
165 f = open(encfile, "wb")
166 f.write(encryptor.process(DATA))
169 u = upload.Uploader(self.helper_furl)
170 u.setServiceParent(self.s)
172 d = wait_a_few_turns()
176 return upload_data(u, DATA, convergence="test convergence string")
177 d.addCallback(_ready)
178 def _uploaded(results):
181 d.addCallback(_uploaded)
183 def _check_empty(res):
184 files = os.listdir(os.path.join(self.basedir, "CHK_encoding"))
185 self.failUnlessEqual(files, [])
186 files = os.listdir(os.path.join(self.basedir, "CHK_incoming"))
187 self.failUnlessEqual(files, [])
188 d.addCallback(_check_empty)
192 def test_already_uploaded(self):
193 self.basedir = "helper/AssistedUpload/test_already_uploaded"
194 self.setUpHelper(self.basedir)
195 self.helper.chk_upload_helper_class = CHKUploadHelper_already_uploaded
196 u = upload.Uploader(self.helper_furl)
197 u.setServiceParent(self.s)
199 d = wait_a_few_turns()
204 return upload_data(u, DATA, convergence="some convergence string")
205 d.addCallback(_ready)
206 def _uploaded(results):
209 d.addCallback(_uploaded)
211 def _check_empty(res):
212 files = os.listdir(os.path.join(self.basedir, "CHK_encoding"))
213 self.failUnlessEqual(files, [])
214 files = os.listdir(os.path.join(self.basedir, "CHK_incoming"))
215 self.failUnlessEqual(files, [])
216 d.addCallback(_check_empty)