3 from twisted.trial import unittest
4 from twisted.application import service
6 from foolscap import Tub, eventual
7 from foolscap.logging import log
9 from allmydata import offloaded, storage
10 from allmydata.immutable import upload
11 from allmydata.util import hashutil, fileutil, mathutil
12 from pycryptopp.cipher.aes import AES
16 DATA = "I need help\n" * 1000
18 class CHKUploadHelper_fake(offloaded.CHKUploadHelper):
19 def start_encrypted(self, eu):
22 d2 = eu.get_all_encoding_parameters()
23 def _got_parms(parms):
24 needed_shares, happy, total_shares, segsize = parms
25 ueb_data = {"needed_shares": needed_shares,
26 "total_shares": total_shares,
27 "segment_size": segsize,
30 self._results.uri_extension_data = ueb_data
31 return (hashutil.uri_extension_hash(""),
32 needed_shares, total_shares, size)
33 d2.addCallback(_got_parms)
35 d.addCallback(_got_size)
38 class CHKUploadHelper_already_uploaded(offloaded.CHKUploadHelper):
40 res = upload.UploadResults()
41 res.uri_extension_hash = hashutil.uri_extension_hash("")
43 # we're pretending that the file they're trying to upload was already
44 # present in the grid. We return some information about the file, so
45 # the client can decide if they like the way it looks. The parameters
46 # used here are chosen to match the defaults.
47 PARAMS = FakeClient.DEFAULT_ENCODING_PARAMETERS
48 ueb_data = {"needed_shares": PARAMS["k"],
49 "total_shares": PARAMS["n"],
50 "segment_size": min(PARAMS["max_segment_size"], len(DATA)),
53 res.uri_extension_data = ueb_data
56 class FakeClient(service.MultiService):
57 DEFAULT_ENCODING_PARAMETERS = {"k":25,
60 "max_segment_size": 1*MiB,
63 def log(self, *args, **kwargs):
64 return log.msg(*args, **kwargs)
65 def get_encoding_parameters(self):
66 return self.DEFAULT_ENCODING_PARAMETERS
67 def get_permuted_peers(self, service_name, storage_index):
70 def flush_but_dont_ignore(res):
71 d = eventual.flushEventualQueue()
77 def upload_data(uploader, data, convergence):
78 u = upload.Data(data, convergence=convergence)
79 return uploader.upload(u)
81 class AssistedUpload(unittest.TestCase):
87 t.setServiceParent(self.s)
89 # we never actually use this for network traffic, so it can use a
91 t.setLocation("bogus:1234")
93 def setUpHelper(self, basedir):
94 fileutil.make_dirs(basedir)
95 self.helper = h = offloaded.Helper(basedir)
96 h.chk_upload_helper_class = CHKUploadHelper_fake
97 h.setServiceParent(self.s)
98 self.helper_furl = self.tub.registerReference(h)
101 d = self.s.stopService()
102 d.addCallback(eventual.fireEventually)
103 d.addBoth(flush_but_dont_ignore)
108 self.basedir = "helper/AssistedUpload/test_one"
109 self.setUpHelper(self.basedir)
110 u = upload.Uploader(self.helper_furl)
111 u.setServiceParent(self.s)
114 d = eventual.fireEventually()
115 d.addCallback(eventual.fireEventually)
116 d.addCallback(eventual.fireEventually)
121 return upload_data(u, DATA, convergence="some convergence string")
122 d.addCallback(_ready)
123 def _uploaded(results):
126 d.addCallback(_uploaded)
128 def _check_empty(res):
129 files = os.listdir(os.path.join(self.basedir, "CHK_encoding"))
130 self.failUnlessEqual(files, [])
131 files = os.listdir(os.path.join(self.basedir, "CHK_incoming"))
132 self.failUnlessEqual(files, [])
133 d.addCallback(_check_empty)
137 def test_previous_upload_failed(self):
138 self.basedir = "helper/AssistedUpload/test_previous_upload_failed"
139 self.setUpHelper(self.basedir)
141 # we want to make sure that an upload which fails (leaving the
142 # ciphertext in the CHK_encoding/ directory) does not prevent a later
143 # attempt to upload that file from working. We simulate this by
144 # populating the directory manually. The hardest part is guessing the
147 k = FakeClient.DEFAULT_ENCODING_PARAMETERS["k"]
148 n = FakeClient.DEFAULT_ENCODING_PARAMETERS["n"]
149 max_segsize = FakeClient.DEFAULT_ENCODING_PARAMETERS["max_segment_size"]
150 segsize = min(max_segsize, len(DATA))
151 # this must be a multiple of 'required_shares'==k
152 segsize = mathutil.next_multiple(segsize, k)
154 key = hashutil.convergence_hash(k, n, segsize, DATA, "test convergence string")
155 assert len(key) == 16
157 SI = hashutil.storage_index_hash(key)
158 SI_s = storage.si_b2a(SI)
159 encfile = os.path.join(self.basedir, "CHK_encoding", SI_s)
160 f = open(encfile, "wb")
161 f.write(encryptor.process(DATA))
164 u = upload.Uploader(self.helper_furl)
165 u.setServiceParent(self.s)
168 d = eventual.fireEventually()
169 d.addCallback(eventual.fireEventually)
170 d.addCallback(eventual.fireEventually)
174 return upload_data(u, DATA, convergence="test convergence string")
175 d.addCallback(_ready)
176 def _uploaded(results):
179 d.addCallback(_uploaded)
181 def _check_empty(res):
182 files = os.listdir(os.path.join(self.basedir, "CHK_encoding"))
183 self.failUnlessEqual(files, [])
184 files = os.listdir(os.path.join(self.basedir, "CHK_incoming"))
185 self.failUnlessEqual(files, [])
186 d.addCallback(_check_empty)
190 def test_already_uploaded(self):
191 self.basedir = "helper/AssistedUpload/test_already_uploaded"
192 self.setUpHelper(self.basedir)
193 self.helper.chk_upload_helper_class = CHKUploadHelper_already_uploaded
194 u = upload.Uploader(self.helper_furl)
195 u.setServiceParent(self.s)
198 d = eventual.fireEventually()
199 d.addCallback(eventual.fireEventually)
200 d.addCallback(eventual.fireEventually)
205 return upload_data(u, DATA, convergence="some convergence string")
206 d.addCallback(_ready)
207 def _uploaded(results):
210 d.addCallback(_uploaded)
212 def _check_empty(res):
213 files = os.listdir(os.path.join(self.basedir, "CHK_encoding"))
214 self.failUnlessEqual(files, [])
215 files = os.listdir(os.path.join(self.basedir, "CHK_incoming"))
216 self.failUnlessEqual(files, [])
217 d.addCallback(_check_empty)