3 from twisted.trial import unittest
4 from twisted.application import service
6 from foolscap import Tub, eventual
7 from foolscap.logging import log
9 from allmydata import offloaded, storage, upload
10 from allmydata.util import hashutil, fileutil, mathutil
11 from pycryptopp.cipher.aes import AES
15 DATA = "I need help\n" * 1000
17 class CHKUploadHelper_fake(offloaded.CHKUploadHelper):
18 def start_encrypted(self, eu):
21 d2 = eu.get_all_encoding_parameters()
22 def _got_parms(parms):
23 needed_shares, happy, total_shares, segsize = parms
24 ueb_data = {"needed_shares": needed_shares,
25 "total_shares": total_shares,
26 "segment_size": segsize,
29 self._results.uri_extension_data = ueb_data
30 return (hashutil.uri_extension_hash(""),
31 needed_shares, total_shares, size)
32 d2.addCallback(_got_parms)
34 d.addCallback(_got_size)
37 class CHKUploadHelper_already_uploaded(offloaded.CHKUploadHelper):
39 res = upload.UploadResults()
40 res.uri_extension_hash = hashutil.uri_extension_hash("")
42 # we're pretending that the file they're trying to upload was already
43 # present in the grid. We return some information about the file, so
44 # the client can decide if they like the way it looks. The parameters
45 # used here are chosen to match the defaults.
46 PARAMS = FakeClient.DEFAULT_ENCODING_PARAMETERS
47 ueb_data = {"needed_shares": PARAMS["k"],
48 "total_shares": PARAMS["n"],
49 "segment_size": min(PARAMS["max_segment_size"], len(DATA)),
52 res.uri_extension_data = ueb_data
55 class FakeClient(service.MultiService):
56 DEFAULT_ENCODING_PARAMETERS = {"k":25,
59 "max_segment_size": 1*MiB,
61 def log(self, *args, **kwargs):
62 return log.msg(*args, **kwargs)
63 def get_encoding_parameters(self):
64 return self.DEFAULT_ENCODING_PARAMETERS
65 def get_permuted_peers(self, service_name, storage_index):
68 def flush_but_dont_ignore(res):
69 d = eventual.flushEventualQueue()
75 def upload_data(uploader, data, convergence):
76 u = upload.Data(data, convergence=convergence)
77 return uploader.upload(u)
79 class AssistedUpload(unittest.TestCase):
85 t.setServiceParent(self.s)
87 # we never actually use this for network traffic, so it can use a
89 t.setLocation("bogus:1234")
91 def setUpHelper(self, basedir):
92 fileutil.make_dirs(basedir)
93 self.helper = h = offloaded.Helper(basedir)
94 h.chk_upload_helper_class = CHKUploadHelper_fake
95 h.setServiceParent(self.s)
96 self.helper_furl = self.tub.registerReference(h)
99 d = self.s.stopService()
100 d.addCallback(eventual.fireEventually)
101 d.addBoth(flush_but_dont_ignore)
106 self.basedir = "helper/AssistedUpload/test_one"
107 self.setUpHelper(self.basedir)
108 u = upload.Uploader(self.helper_furl)
109 u.setServiceParent(self.s)
112 d = eventual.fireEventually()
113 d.addCallback(eventual.fireEventually)
114 d.addCallback(eventual.fireEventually)
119 return upload_data(u, DATA, convergence="some convergence string")
120 d.addCallback(_ready)
121 def _uploaded(results):
124 d.addCallback(_uploaded)
126 def _check_empty(res):
127 files = os.listdir(os.path.join(self.basedir, "CHK_encoding"))
128 self.failUnlessEqual(files, [])
129 files = os.listdir(os.path.join(self.basedir, "CHK_incoming"))
130 self.failUnlessEqual(files, [])
131 d.addCallback(_check_empty)
135 def test_previous_upload_failed(self):
136 self.basedir = "helper/AssistedUpload/test_previous_upload_failed"
137 self.setUpHelper(self.basedir)
139 # we want to make sure that an upload which fails (leaving the
140 # ciphertext in the CHK_encoding/ directory) does not prevent a later
141 # attempt to upload that file from working. We simulate this by
142 # populating the directory manually. The hardest part is guessing the
145 k = FakeClient.DEFAULT_ENCODING_PARAMETERS["k"]
146 n = FakeClient.DEFAULT_ENCODING_PARAMETERS["n"]
147 max_segsize = FakeClient.DEFAULT_ENCODING_PARAMETERS["max_segment_size"]
148 segsize = min(max_segsize, len(DATA))
149 # this must be a multiple of 'required_shares'==k
150 segsize = mathutil.next_multiple(segsize, k)
152 key = hashutil.convergence_hash(k, n, segsize, DATA, "test convergence string")
153 assert len(key) == 16
155 SI = hashutil.storage_index_hash(key)
156 SI_s = storage.si_b2a(SI)
157 encfile = os.path.join(self.basedir, "CHK_encoding", SI_s)
158 f = open(encfile, "wb")
159 f.write(encryptor.process(DATA))
162 u = upload.Uploader(self.helper_furl)
163 u.setServiceParent(self.s)
166 d = eventual.fireEventually()
167 d.addCallback(eventual.fireEventually)
168 d.addCallback(eventual.fireEventually)
172 return upload_data(u, DATA, convergence="test convergence string")
173 d.addCallback(_ready)
174 def _uploaded(results):
177 d.addCallback(_uploaded)
179 def _check_empty(res):
180 files = os.listdir(os.path.join(self.basedir, "CHK_encoding"))
181 self.failUnlessEqual(files, [])
182 files = os.listdir(os.path.join(self.basedir, "CHK_incoming"))
183 self.failUnlessEqual(files, [])
184 d.addCallback(_check_empty)
188 def test_already_uploaded(self):
189 self.basedir = "helper/AssistedUpload/test_already_uploaded"
190 self.setUpHelper(self.basedir)
191 self.helper.chk_upload_helper_class = CHKUploadHelper_already_uploaded
192 u = upload.Uploader(self.helper_furl)
193 u.setServiceParent(self.s)
196 d = eventual.fireEventually()
197 d.addCallback(eventual.fireEventually)
198 d.addCallback(eventual.fireEventually)
203 return upload_data(u, DATA, convergence="some convergence string")
204 d.addCallback(_ready)
205 def _uploaded(results):
208 d.addCallback(_uploaded)
210 def _check_empty(res):
211 files = os.listdir(os.path.join(self.basedir, "CHK_encoding"))
212 self.failUnlessEqual(files, [])
213 files = os.listdir(os.path.join(self.basedir, "CHK_incoming"))
214 self.failUnlessEqual(files, [])
215 d.addCallback(_check_empty)