]> git.rkrishnan.org Git - tahoe-lafs/tahoe-lafs.git/blob - src/allmydata/test/test_helper.py
use base62 encoding for storage indexes, on disk and in verifier caps, and in logging...
[tahoe-lafs/tahoe-lafs.git] / src / allmydata / test / test_helper.py
1
2 import os
3 from twisted.trial import unittest
4 from twisted.application import service
5
6 from foolscap import Tub, eventual
7 from foolscap.logging import log
8
9 from allmydata import offloaded, storage, upload
10 from allmydata.util import hashutil, fileutil, idlib, mathutil
11 from pycryptopp.cipher.aes import AES
12
13 MiB = 1024*1024
14
15 DATA = "I need help\n" * 1000
16
17 class CHKUploadHelper_fake(offloaded.CHKUploadHelper):
18     def start_encrypted(self, eu):
19         d = eu.get_size()
20         def _got_size(size):
21             d2 = eu.get_all_encoding_parameters()
22             def _got_parms(parms):
23                 needed_shares, happy, total_shares, segsize = parms
24                 ueb_data = {"needed_shares": needed_shares,
25                             "total_shares": total_shares,
26                             "segment_size": segsize,
27                             "size": size,
28                             }
29                 self._results.uri_extension_data = ueb_data
30                 return (hashutil.uri_extension_hash(""),
31                         needed_shares, total_shares, size)
32             d2.addCallback(_got_parms)
33             return d2
34         d.addCallback(_got_size)
35         return d
36
37 class CHKUploadHelper_already_uploaded(offloaded.CHKUploadHelper):
38     def start(self):
39         res = upload.UploadResults()
40         res.uri_extension_hash = hashutil.uri_extension_hash("")
41
42         # we're pretending that the file they're trying to upload was already
43         # present in the grid. We return some information about the file, so
44         # the client can decide if they like the way it looks. The parameters
45         # used here are chosen to match the defaults.
46         PARAMS = FakeClient.DEFAULT_ENCODING_PARAMETERS
47         ueb_data = {"needed_shares": PARAMS["k"],
48                     "total_shares": PARAMS["n"],
49                     "segment_size": min(PARAMS["max_segment_size"], len(DATA)),
50                     "size": len(DATA),
51                     }
52         res.uri_extension_data = ueb_data
53         return (res, None)
54
55 class FakeClient(service.MultiService):
56     DEFAULT_ENCODING_PARAMETERS = {"k":25,
57                                    "happy": 75,
58                                    "n": 100,
59                                    "max_segment_size": 1*MiB,
60                                    }
61     def log(self, *args, **kwargs):
62         return log.msg(*args, **kwargs)
63     def get_encoding_parameters(self):
64         return self.DEFAULT_ENCODING_PARAMETERS
65     def get_permuted_peers(self, service_name, storage_index):
66         return []
67
68 def flush_but_dont_ignore(res):
69     d = eventual.flushEventualQueue()
70     def _done(ignored):
71         return res
72     d.addCallback(_done)
73     return d
74
75 def upload_data(uploader, data):
76     u = upload.Data(data)
77     return uploader.upload(u)
78
79 class AssistedUpload(unittest.TestCase):
80     def setUp(self):
81         self.s = FakeClient()
82         self.s.startService()
83
84         self.tub = t = Tub()
85         t.setServiceParent(self.s)
86         self.s.tub = t
87         # we never actually use this for network traffic, so it can use a
88         # bogus host/port
89         t.setLocation("bogus:1234")
90
91     def setUpHelper(self, basedir):
92         fileutil.make_dirs(basedir)
93         self.helper = h = offloaded.Helper(basedir)
94         h.chk_upload_helper_class = CHKUploadHelper_fake
95         h.setServiceParent(self.s)
96         self.helper_furl = self.tub.registerReference(h)
97
98     def tearDown(self):
99         d = self.s.stopService()
100         d.addCallback(eventual.fireEventually)
101         d.addBoth(flush_but_dont_ignore)
102         return d
103
104
105     def test_one(self):
106         self.basedir = "helper/AssistedUpload/test_one"
107         self.setUpHelper(self.basedir)
108         u = upload.Uploader(self.helper_furl)
109         u.setServiceParent(self.s)
110
111         # wait a few turns
112         d = eventual.fireEventually()
113         d.addCallback(eventual.fireEventually)
114         d.addCallback(eventual.fireEventually)
115
116         def _ready(res):
117             assert u._helper
118
119             return upload_data(u, DATA)
120         d.addCallback(_ready)
121         def _uploaded(results):
122             uri = results.uri
123             assert "CHK" in uri
124         d.addCallback(_uploaded)
125
126         def _check_empty(res):
127             files = os.listdir(os.path.join(self.basedir, "CHK_encoding"))
128             self.failUnlessEqual(files, [])
129             files = os.listdir(os.path.join(self.basedir, "CHK_incoming"))
130             self.failUnlessEqual(files, [])
131         d.addCallback(_check_empty)
132
133         return d
134
135     def test_previous_upload_failed(self):
136         self.basedir = "helper/AssistedUpload/test_previous_upload_failed"
137         self.setUpHelper(self.basedir)
138
139         # we want to make sure that an upload which fails (leaving the
140         # ciphertext in the CHK_encoding/ directory) does not prevent a later
141         # attempt to upload that file from working. We simulate this by
142         # populating the directory manually. The hardest part is guessing the
143         # storage index.
144
145         k = FakeClient.DEFAULT_ENCODING_PARAMETERS["k"]
146         n = FakeClient.DEFAULT_ENCODING_PARAMETERS["n"]
147         max_segsize = FakeClient.DEFAULT_ENCODING_PARAMETERS["max_segment_size"]
148         segsize = min(max_segsize, len(DATA))
149         # this must be a multiple of 'required_shares'==k
150         segsize = mathutil.next_multiple(segsize, k)
151
152         key = hashutil.content_hash_key_hash(k, n, segsize, DATA)
153         assert len(key) == 16
154         encryptor = AES(key)
155         SI = hashutil.storage_index_hash(key)
156         SI_s = storage.si_b2a(SI)
157         encfile = os.path.join(self.basedir, "CHK_encoding", SI_s)
158         f = open(encfile, "wb")
159         f.write(encryptor.process(DATA))
160         f.close()
161
162         u = upload.Uploader(self.helper_furl)
163         u.setServiceParent(self.s)
164
165         # wait a few turns
166         d = eventual.fireEventually()
167         d.addCallback(eventual.fireEventually)
168         d.addCallback(eventual.fireEventually)
169
170         def _ready(res):
171             assert u._helper
172             return upload_data(u, DATA)
173         d.addCallback(_ready)
174         def _uploaded(results):
175             uri = results.uri
176             assert "CHK" in uri
177         d.addCallback(_uploaded)
178
179         def _check_empty(res):
180             files = os.listdir(os.path.join(self.basedir, "CHK_encoding"))
181             self.failUnlessEqual(files, [])
182             files = os.listdir(os.path.join(self.basedir, "CHK_incoming"))
183             self.failUnlessEqual(files, [])
184         d.addCallback(_check_empty)
185
186         return d
187
188     def test_already_uploaded(self):
189         self.basedir = "helper/AssistedUpload/test_already_uploaded"
190         self.setUpHelper(self.basedir)
191         self.helper.chk_upload_helper_class = CHKUploadHelper_already_uploaded
192         u = upload.Uploader(self.helper_furl)
193         u.setServiceParent(self.s)
194
195         # wait a few turns
196         d = eventual.fireEventually()
197         d.addCallback(eventual.fireEventually)
198         d.addCallback(eventual.fireEventually)
199
200         def _ready(res):
201             assert u._helper
202
203             return upload_data(u, DATA)
204         d.addCallback(_ready)
205         def _uploaded(results):
206             uri = results.uri
207             assert "CHK" in uri
208         d.addCallback(_uploaded)
209
210         def _check_empty(res):
211             files = os.listdir(os.path.join(self.basedir, "CHK_encoding"))
212             self.failUnlessEqual(files, [])
213             files = os.listdir(os.path.join(self.basedir, "CHK_incoming"))
214             self.failUnlessEqual(files, [])
215         d.addCallback(_check_empty)
216
217         return d