1 from allmydata.test import common
2 from allmydata.interfaces import NotEnoughSharesError
3 from allmydata.util.consumer import download_to_data
4 from allmydata import uri
5 from twisted.internet import defer
6 from twisted.trial import unittest
9 from foolscap.api import eventually
10 from allmydata.util import log
12 from allmydata.immutable.downloader import finder
16 class MockNode(object):
17 def __init__(self, check_reneging, check_fetch_failed):
19 self.finished_d = defer.Deferred()
20 self.segment_size = 78
21 self.guessed_segment_size = 78
22 self._no_more_shares = False
23 self.check_reneging = check_reneging
24 self.check_fetch_failed = check_fetch_failed
27 self.share_hash_tree = mock.Mock()
28 self.share_hash_tree.needed_hashes.return_value = False
29 self.on_want_more_shares = None
31 def when_finished(self):
32 return self.finished_d
33 def get_num_segments(self):
35 def _calculate_sizes(self, guessed_segment_size):
36 return {'block_size': 4, 'num_segments': 5}
37 def no_more_shares(self):
38 self._no_more_shares = True
39 def got_shares(self, shares):
40 if self.check_reneging:
41 if self._no_more_shares:
42 self.finished_d.errback(unittest.FailTest("The node was told by the share finder that it is destined to remain hungry, then was given another share."))
44 self.got += len(shares)
45 log.msg("yyy 3 %s.got_shares(%s) got: %s" % (self, shares, self.got))
47 self.finished_d.callback(True)
48 def get_desired_ciphertext_hashes(self, *args, **kwargs):
50 def fetch_failed(self, *args, **kwargs):
51 if self.check_fetch_failed:
53 self.finished_d.errback(unittest.FailTest("The node was told by the segment fetcher that the download failed."))
54 self.finished_d = None
55 def want_more_shares(self):
56 if self.on_want_more_shares:
57 self.on_want_more_shares()
58 def process_blocks(self, *args, **kwargs):
60 self.finished_d.callback(None)
62 class TestShareFinder(unittest.TestCase):
63 def test_no_reneging_on_no_more_shares_ever(self):
66 # Suppose that K=3 and you send two DYHB requests, the first
67 # response offers two shares, and then the last offers one
68 # share. If you tell your share consumer "no more shares,
69 # ever", and then immediately tell them "oh, and here's
70 # another share", then you lose.
72 rcap = uri.CHKFileURI('a'*32, 'a'*32, 3, 99, 100)
73 vcap = rcap.get_verify_cap()
75 class MockServer(object):
76 def __init__(self, buckets):
78 'http://allmydata.org/tahoe/protocols/storage/v1': {
79 "tolerates-immutable-read-overrun": True
82 self.buckets = buckets
83 self.d = defer.Deferred()
85 def callRemote(self, methname, *args, **kwargs):
88 # Even after the 3rd answer we're still hungry because
89 # we're interested in finding a share on a 3rd server
90 # so we don't have to download more than one share
91 # from the first server. This is actually necessary to
93 def _give_buckets_and_hunger_again():
94 d.callback(self.buckets)
96 eventually(_give_buckets_and_hunger_again)
98 class MockIServer(object):
99 def __init__(self, serverid, rref):
100 self.serverid = serverid
102 def get_serverid(self):
107 return "name-%s" % self.serverid
108 def get_version(self):
109 return self.rref.version
111 mockserver1 = MockServer({1: mock.Mock(), 2: mock.Mock()})
112 mockserver2 = MockServer({})
113 mockserver3 = MockServer({3: mock.Mock()})
114 mockstoragebroker = mock.Mock()
115 servers = [ MockIServer("ms1", mockserver1),
116 MockIServer("ms2", mockserver2),
117 MockIServer("ms3", mockserver3), ]
118 mockstoragebroker.get_servers_for_psi.return_value = servers
119 mockdownloadstatus = mock.Mock()
120 mocknode = MockNode(check_reneging=True, check_fetch_failed=True)
122 s = finder.ShareFinder(mockstoragebroker, vcap, mocknode, mockdownloadstatus)
130 return mocknode.when_finished()
132 class Test(common.ShareManglingMixin, common.ShouldFailMixin, unittest.TestCase):
133 def test_test_code(self):
134 # The following process of stashing the shares, running
135 # replace_shares, and asserting that the new set of shares equals the
136 # old is more to test this test code than to test the Tahoe code...
137 d = defer.succeed(None)
138 d.addCallback(self.find_all_shares)
143 d.addCallback(_stash_it)
145 # The following process of deleting 8 of the shares and asserting
146 # that you can't download it is more to test this test code than to
147 # test the Tahoe code...
148 def _then_delete_8(unused=None):
149 self.replace_shares(stash[0], storage_index=self.uri.get_storage_index())
151 self._delete_a_share()
152 d.addCallback(_then_delete_8)
154 def _then_download(unused=None):
155 d2 = download_to_data(self.n)
157 def _after_download_callb(result):
158 self.fail() # should have gotten an errback instead
160 def _after_download_errb(failure):
161 failure.trap(NotEnoughSharesError)
162 return None # success!
163 d2.addCallbacks(_after_download_callb, _after_download_errb)
165 d.addCallback(_then_download)
169 def test_download(self):
170 """ Basic download. (This functionality is more or less already
171 tested by test code in other modules, but this module is also going
172 to test some more specific things about immutable download.)
174 d = defer.succeed(None)
175 before_download_reads = self._count_reads()
176 def _after_download(unused=None):
177 after_download_reads = self._count_reads()
178 #print before_download_reads, after_download_reads
179 self.failIf(after_download_reads-before_download_reads > 41,
180 (after_download_reads, before_download_reads))
181 d.addCallback(self._download_and_check_plaintext)
182 d.addCallback(_after_download)
185 def test_download_from_only_3_remaining_shares(self):
186 """ Test download after 7 random shares (of the 10) have been
188 d = defer.succeed(None)
189 def _then_delete_7(unused=None):
191 self._delete_a_share()
192 before_download_reads = self._count_reads()
193 d.addCallback(_then_delete_7)
194 def _after_download(unused=None):
195 after_download_reads = self._count_reads()
196 #print before_download_reads, after_download_reads
197 self.failIf(after_download_reads-before_download_reads > 41, (after_download_reads, before_download_reads))
198 d.addCallback(self._download_and_check_plaintext)
199 d.addCallback(_after_download)
202 def test_download_from_only_3_shares_with_good_crypttext_hash(self):
203 """ Test download after 7 random shares (of the 10) have had their
204 crypttext hash tree corrupted."""
205 d = defer.succeed(None)
206 def _then_corrupt_7(unused=None):
208 random.shuffle(shnums)
210 self._corrupt_a_share(None, common._corrupt_offset_of_block_hashes_to_truncate_crypttext_hashes, i)
211 #before_download_reads = self._count_reads()
212 d.addCallback(_then_corrupt_7)
213 d.addCallback(self._download_and_check_plaintext)
216 def test_download_abort_if_too_many_missing_shares(self):
217 """ Test that download gives up quickly when it realizes there aren't
218 enough shares out there."""
220 self._delete_a_share()
221 d = self.shouldFail(NotEnoughSharesError, "delete 8", None,
222 download_to_data, self.n)
223 # the new downloader pipelines a bunch of read requests in parallel,
224 # so don't bother asserting anything about the number of reads
227 def test_download_abort_if_too_many_corrupted_shares(self):
228 """Test that download gives up quickly when it realizes there aren't
229 enough uncorrupted shares out there. It should be able to tell
230 because the corruption occurs in the sharedata version number, which
232 d = defer.succeed(None)
233 def _then_corrupt_8(unused=None):
235 random.shuffle(shnums)
236 for shnum in shnums[:8]:
237 self._corrupt_a_share(None, common._corrupt_sharedata_version_number, shnum)
238 d.addCallback(_then_corrupt_8)
240 before_download_reads = self._count_reads()
241 def _attempt_to_download(unused=None):
242 d2 = download_to_data(self.n)
245 self.fail("Should have gotten an error from attempt to download, not %r" % (res,))
247 self.failUnless(f.check(NotEnoughSharesError))
248 d2.addCallbacks(_callb, _errb)
251 d.addCallback(_attempt_to_download)
253 def _after_attempt(unused=None):
254 after_download_reads = self._count_reads()
255 #print before_download_reads, after_download_reads
256 # To pass this test, you are required to give up before reading
257 # all of the share data. Actually, we could give up sooner than
258 # 45 reads, but currently our download code does 45 reads. This
259 # test then serves as a "performance regression detector" -- if
260 # you change download code so that it takes *more* reads, then
261 # this test will fail.
262 self.failIf(after_download_reads-before_download_reads > 45,
263 (after_download_reads, before_download_reads))
264 d.addCallback(_after_attempt)
268 # XXX extend these tests to show bad behavior of various kinds from servers:
269 # raising exception from each remove_foo() method, for example
271 # XXX test disconnect DeadReferenceError from get_buckets and get_block_whatsit
273 # TODO: delete this whole file