]> git.rkrishnan.org Git - tahoe-lafs/tahoe-lafs.git/blob - src/allmydata/test/test_helper.py
move encode/upload/download/checker.py into a new immutable/ directory. No behavior...
[tahoe-lafs/tahoe-lafs.git] / src / allmydata / test / test_helper.py
1
2 import os
3 from twisted.trial import unittest
4 from twisted.application import service
5
6 from foolscap import Tub, eventual
7 from foolscap.logging import log
8
9 from allmydata import offloaded, storage
10 from allmydata.immutable import upload
11 from allmydata.util import hashutil, fileutil, mathutil
12 from pycryptopp.cipher.aes import AES
13
14 MiB = 1024*1024
15
16 DATA = "I need help\n" * 1000
17
18 class CHKUploadHelper_fake(offloaded.CHKUploadHelper):
19     def start_encrypted(self, eu):
20         d = eu.get_size()
21         def _got_size(size):
22             d2 = eu.get_all_encoding_parameters()
23             def _got_parms(parms):
24                 needed_shares, happy, total_shares, segsize = parms
25                 ueb_data = {"needed_shares": needed_shares,
26                             "total_shares": total_shares,
27                             "segment_size": segsize,
28                             "size": size,
29                             }
30                 self._results.uri_extension_data = ueb_data
31                 return (hashutil.uri_extension_hash(""),
32                         needed_shares, total_shares, size)
33             d2.addCallback(_got_parms)
34             return d2
35         d.addCallback(_got_size)
36         return d
37
38 class CHKUploadHelper_already_uploaded(offloaded.CHKUploadHelper):
39     def start(self):
40         res = upload.UploadResults()
41         res.uri_extension_hash = hashutil.uri_extension_hash("")
42
43         # we're pretending that the file they're trying to upload was already
44         # present in the grid. We return some information about the file, so
45         # the client can decide if they like the way it looks. The parameters
46         # used here are chosen to match the defaults.
47         PARAMS = FakeClient.DEFAULT_ENCODING_PARAMETERS
48         ueb_data = {"needed_shares": PARAMS["k"],
49                     "total_shares": PARAMS["n"],
50                     "segment_size": min(PARAMS["max_segment_size"], len(DATA)),
51                     "size": len(DATA),
52                     }
53         res.uri_extension_data = ueb_data
54         return (res, None)
55
56 class FakeClient(service.MultiService):
57     DEFAULT_ENCODING_PARAMETERS = {"k":25,
58                                    "happy": 75,
59                                    "n": 100,
60                                    "max_segment_size": 1*MiB,
61                                    }
62     stats_provider = None
63     def log(self, *args, **kwargs):
64         return log.msg(*args, **kwargs)
65     def get_encoding_parameters(self):
66         return self.DEFAULT_ENCODING_PARAMETERS
67     def get_permuted_peers(self, service_name, storage_index):
68         return []
69
70 def flush_but_dont_ignore(res):
71     d = eventual.flushEventualQueue()
72     def _done(ignored):
73         return res
74     d.addCallback(_done)
75     return d
76
77 def upload_data(uploader, data, convergence):
78     u = upload.Data(data, convergence=convergence)
79     return uploader.upload(u)
80
81 class AssistedUpload(unittest.TestCase):
82     def setUp(self):
83         self.s = FakeClient()
84         self.s.startService()
85
86         self.tub = t = Tub()
87         t.setServiceParent(self.s)
88         self.s.tub = t
89         # we never actually use this for network traffic, so it can use a
90         # bogus host/port
91         t.setLocation("bogus:1234")
92
93     def setUpHelper(self, basedir):
94         fileutil.make_dirs(basedir)
95         self.helper = h = offloaded.Helper(basedir)
96         h.chk_upload_helper_class = CHKUploadHelper_fake
97         h.setServiceParent(self.s)
98         self.helper_furl = self.tub.registerReference(h)
99
100     def tearDown(self):
101         d = self.s.stopService()
102         d.addCallback(eventual.fireEventually)
103         d.addBoth(flush_but_dont_ignore)
104         return d
105
106
107     def test_one(self):
108         self.basedir = "helper/AssistedUpload/test_one"
109         self.setUpHelper(self.basedir)
110         u = upload.Uploader(self.helper_furl)
111         u.setServiceParent(self.s)
112
113         # wait a few turns
114         d = eventual.fireEventually()
115         d.addCallback(eventual.fireEventually)
116         d.addCallback(eventual.fireEventually)
117
118         def _ready(res):
119             assert u._helper
120
121             return upload_data(u, DATA, convergence="some convergence string")
122         d.addCallback(_ready)
123         def _uploaded(results):
124             uri = results.uri
125             assert "CHK" in uri
126         d.addCallback(_uploaded)
127
128         def _check_empty(res):
129             files = os.listdir(os.path.join(self.basedir, "CHK_encoding"))
130             self.failUnlessEqual(files, [])
131             files = os.listdir(os.path.join(self.basedir, "CHK_incoming"))
132             self.failUnlessEqual(files, [])
133         d.addCallback(_check_empty)
134
135         return d
136
137     def test_previous_upload_failed(self):
138         self.basedir = "helper/AssistedUpload/test_previous_upload_failed"
139         self.setUpHelper(self.basedir)
140
141         # we want to make sure that an upload which fails (leaving the
142         # ciphertext in the CHK_encoding/ directory) does not prevent a later
143         # attempt to upload that file from working. We simulate this by
144         # populating the directory manually. The hardest part is guessing the
145         # storage index.
146
147         k = FakeClient.DEFAULT_ENCODING_PARAMETERS["k"]
148         n = FakeClient.DEFAULT_ENCODING_PARAMETERS["n"]
149         max_segsize = FakeClient.DEFAULT_ENCODING_PARAMETERS["max_segment_size"]
150         segsize = min(max_segsize, len(DATA))
151         # this must be a multiple of 'required_shares'==k
152         segsize = mathutil.next_multiple(segsize, k)
153
154         key = hashutil.convergence_hash(k, n, segsize, DATA, "test convergence string")
155         assert len(key) == 16
156         encryptor = AES(key)
157         SI = hashutil.storage_index_hash(key)
158         SI_s = storage.si_b2a(SI)
159         encfile = os.path.join(self.basedir, "CHK_encoding", SI_s)
160         f = open(encfile, "wb")
161         f.write(encryptor.process(DATA))
162         f.close()
163
164         u = upload.Uploader(self.helper_furl)
165         u.setServiceParent(self.s)
166
167         # wait a few turns
168         d = eventual.fireEventually()
169         d.addCallback(eventual.fireEventually)
170         d.addCallback(eventual.fireEventually)
171
172         def _ready(res):
173             assert u._helper
174             return upload_data(u, DATA, convergence="test convergence string")
175         d.addCallback(_ready)
176         def _uploaded(results):
177             uri = results.uri
178             assert "CHK" in uri
179         d.addCallback(_uploaded)
180
181         def _check_empty(res):
182             files = os.listdir(os.path.join(self.basedir, "CHK_encoding"))
183             self.failUnlessEqual(files, [])
184             files = os.listdir(os.path.join(self.basedir, "CHK_incoming"))
185             self.failUnlessEqual(files, [])
186         d.addCallback(_check_empty)
187
188         return d
189
190     def test_already_uploaded(self):
191         self.basedir = "helper/AssistedUpload/test_already_uploaded"
192         self.setUpHelper(self.basedir)
193         self.helper.chk_upload_helper_class = CHKUploadHelper_already_uploaded
194         u = upload.Uploader(self.helper_furl)
195         u.setServiceParent(self.s)
196
197         # wait a few turns
198         d = eventual.fireEventually()
199         d.addCallback(eventual.fireEventually)
200         d.addCallback(eventual.fireEventually)
201
202         def _ready(res):
203             assert u._helper
204
205             return upload_data(u, DATA, convergence="some convergence string")
206         d.addCallback(_ready)
207         def _uploaded(results):
208             uri = results.uri
209             assert "CHK" in uri
210         d.addCallback(_uploaded)
211
212         def _check_empty(res):
213             files = os.listdir(os.path.join(self.basedir, "CHK_encoding"))
214             self.failUnlessEqual(files, [])
215             files = os.listdir(os.path.join(self.basedir, "CHK_incoming"))
216             self.failUnlessEqual(files, [])
217         d.addCallback(_check_empty)
218
219         return d