3 from twisted.trial import unittest
4 from twisted.internet import defer
5 from allmydata import check_results, uri
6 from allmydata.web import check_results as web_check_results
7 from allmydata.storage_client import StorageFarmBroker, NativeStorageServer
8 from allmydata.monitor import Monitor
9 from allmydata.test.no_network import GridTestMixin
10 from allmydata.immutable.upload import Data
11 from allmydata.test.common_web import WebRenderingMixin
12 from allmydata.mutable.publish import MutableData
15 def get_storage_broker(self):
16 return self.storage_broker
18 class WebResultsRendering(unittest.TestCase, WebRenderingMixin):
20 def create_fake_client(self):
21 sb = StorageFarmBroker(None, True)
22 for (peerid, nickname) in [("\x00"*20, "peer-0"),
23 ("\xff"*20, "peer-f"),
24 ("\x11"*20, "peer-11")] :
26 "service-name": "storage",
27 "anonymous-storage-FURL": "pb://abcde@nowhere/fake",
28 "permutation-seed-base32": "",
29 "nickname": unicode(nickname),
30 "app-versions": {}, # need #466 and v2 introducer
32 "oldest-supported": "oldest",
34 s = NativeStorageServer(peerid, ann)
35 sb.test_add_server(peerid, s)
40 def render_json(self, page):
41 d = self.render1(page, args={"output": ["json"]})
44 def test_literal(self):
45 c = self.create_fake_client()
46 lcr = web_check_results.LiteralCheckResults(c)
50 s = self.remove_tags(html)
51 self.failUnlessIn("Literal files are always healthy", s)
53 d.addCallback(lambda ignored:
54 self.render1(lcr, args={"return_to": ["FOOURL"]}))
55 def _check_return_to(html):
56 s = self.remove_tags(html)
57 self.failUnlessIn("Literal files are always healthy", s)
58 self.failUnlessIn('<a href="FOOURL">Return to file.</a>',
60 d.addCallback(_check_return_to)
61 d.addCallback(lambda ignored: self.render_json(lcr))
62 def _check_json(json):
63 j = simplejson.loads(json)
64 self.failUnlessEqual(j["storage-index"], "")
65 self.failUnlessEqual(j["results"]["healthy"], True)
66 d.addCallback(_check_json)
70 c = self.create_fake_client()
71 serverid_1 = "\x00"*20
72 serverid_f = "\xff"*20
73 u = uri.CHKFileURI("\x00"*16, "\x00"*32, 3, 10, 1234)
74 cr = check_results.CheckResults(u, u.get_storage_index())
76 cr.set_needs_rebalancing(False)
77 cr.set_summary("groovy")
78 data = { "count-shares-needed": 3,
79 "count-shares-expected": 9,
80 "count-shares-good": 10,
81 "count-good-share-hosts": 11,
82 "list-corrupt-shares": [],
83 "count-wrong-shares": 0,
84 "sharemap": {"shareid1": [serverid_1, serverid_f]},
85 "count-recoverable-versions": 1,
86 "count-unrecoverable-versions": 0,
87 "servers-responding": [],
91 w = web_check_results.CheckResults(c, cr)
92 html = self.render2(w)
93 s = self.remove_tags(html)
94 self.failUnlessIn("File Check Results for SI=2k6avp", s) # abbreviated
95 self.failUnlessIn("Healthy : groovy", s)
96 self.failUnlessIn("Share Counts: need 3-of-9, have 10", s)
97 self.failUnlessIn("Hosts with good shares: 11", s)
98 self.failUnlessIn("Corrupt shares: none", s)
99 self.failUnlessIn("Wrong Shares: 0", s)
100 self.failUnlessIn("Recoverable Versions: 1", s)
101 self.failUnlessIn("Unrecoverable Versions: 0", s)
103 cr.set_healthy(False)
104 cr.set_recoverable(True)
105 cr.set_summary("ungroovy")
106 html = self.render2(w)
107 s = self.remove_tags(html)
108 self.failUnlessIn("File Check Results for SI=2k6avp", s) # abbreviated
109 self.failUnlessIn("Not Healthy! : ungroovy", s)
111 cr.set_healthy(False)
112 cr.set_recoverable(False)
113 cr.set_summary("rather dead")
114 data["list-corrupt-shares"] = [(serverid_1, u.get_storage_index(), 2)]
116 html = self.render2(w)
117 s = self.remove_tags(html)
118 self.failUnlessIn("File Check Results for SI=2k6avp", s) # abbreviated
119 self.failUnlessIn("Not Recoverable! : rather dead", s)
120 self.failUnlessIn("Corrupt shares: Share ID Nickname Node ID sh#2 peer-0 aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", s)
122 html = self.render2(w)
123 s = self.remove_tags(html)
124 self.failUnlessIn("File Check Results for SI=2k6avp", s) # abbreviated
125 self.failUnlessIn("Not Recoverable! : rather dead", s)
127 html = self.render2(w, args={"return_to": ["FOOURL"]})
128 self.failUnlessIn('<a href="FOOURL">Return to file/directory.</a>',
131 d = self.render_json(w)
132 def _check_json(jdata):
133 j = simplejson.loads(jdata)
134 self.failUnlessEqual(j["summary"], "rather dead")
135 self.failUnlessEqual(j["storage-index"],
136 "2k6avpjga3dho3zsjo6nnkt7n4")
137 expected = {'needs-rebalancing': False,
138 'count-shares-expected': 9,
140 'count-unrecoverable-versions': 0,
141 'count-shares-needed': 3,
142 'sharemap': {"shareid1":
143 ["aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
144 "77777777777777777777777777777777"]},
145 'count-recoverable-versions': 1,
146 'list-corrupt-shares':
147 [["aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
148 "2k6avpjga3dho3zsjo6nnkt7n4", 2]],
149 'count-good-share-hosts': 11,
150 'count-wrong-shares': 0,
151 'count-shares-good': 10,
152 'count-corrupt-shares': 0,
153 'servers-responding': [],
154 'recoverable': False,
156 self.failUnlessEqual(j["results"], expected)
157 d.addCallback(_check_json)
158 d.addCallback(lambda ignored: self.render1(w))
160 s = self.remove_tags(html)
161 self.failUnlessIn("File Check Results for SI=2k6avp", s)
162 self.failUnlessIn("Not Recoverable! : rather dead", s)
163 d.addCallback(_check)
167 def test_check_and_repair(self):
168 c = self.create_fake_client()
169 serverid_1 = "\x00"*20
170 serverid_f = "\xff"*20
171 u = uri.CHKFileURI("\x00"*16, "\x00"*32, 3, 10, 1234)
173 pre_cr = check_results.CheckResults(u, u.get_storage_index())
174 pre_cr.set_healthy(False)
175 pre_cr.set_recoverable(True)
176 pre_cr.set_needs_rebalancing(False)
177 pre_cr.set_summary("illing")
178 data = { "count-shares-needed": 3,
179 "count-shares-expected": 10,
180 "count-shares-good": 6,
181 "count-good-share-hosts": 7,
182 "list-corrupt-shares": [],
183 "count-wrong-shares": 0,
184 "sharemap": {"shareid1": [serverid_1, serverid_f]},
185 "count-recoverable-versions": 1,
186 "count-unrecoverable-versions": 0,
187 "servers-responding": [],
189 pre_cr.set_data(data)
191 post_cr = check_results.CheckResults(u, u.get_storage_index())
192 post_cr.set_healthy(True)
193 post_cr.set_recoverable(True)
194 post_cr.set_needs_rebalancing(False)
195 post_cr.set_summary("groovy")
196 data = { "count-shares-needed": 3,
197 "count-shares-expected": 10,
198 "count-shares-good": 10,
199 "count-good-share-hosts": 11,
200 "list-corrupt-shares": [],
201 "count-wrong-shares": 0,
202 "sharemap": {"shareid1": [serverid_1, serverid_f]},
203 "count-recoverable-versions": 1,
204 "count-unrecoverable-versions": 0,
205 "servers-responding": [],
207 post_cr.set_data(data)
209 crr = check_results.CheckAndRepairResults(u.get_storage_index())
210 crr.pre_repair_results = pre_cr
211 crr.post_repair_results = post_cr
212 crr.repair_attempted = False
214 w = web_check_results.CheckAndRepairResults(c, crr)
215 html = self.render2(w)
216 s = self.remove_tags(html)
218 self.failUnlessIn("File Check-And-Repair Results for SI=2k6avp", s)
219 self.failUnlessIn("Healthy : groovy", s)
220 self.failUnlessIn("No repair necessary", s)
221 self.failUnlessIn("Post-Repair Checker Results:", s)
222 self.failUnlessIn("Share Counts: need 3-of-10, have 10", s)
224 crr.repair_attempted = True
225 crr.repair_successful = True
226 html = self.render2(w)
227 s = self.remove_tags(html)
229 self.failUnlessIn("File Check-And-Repair Results for SI=2k6avp", s)
230 self.failUnlessIn("Healthy : groovy", s)
231 self.failUnlessIn("Repair successful", s)
232 self.failUnlessIn("Post-Repair Checker Results:", s)
234 crr.repair_attempted = True
235 crr.repair_successful = False
236 post_cr.set_healthy(False)
237 post_cr.set_summary("better")
238 html = self.render2(w)
239 s = self.remove_tags(html)
241 self.failUnlessIn("File Check-And-Repair Results for SI=2k6avp", s)
242 self.failUnlessIn("Not Healthy! : better", s)
243 self.failUnlessIn("Repair unsuccessful", s)
244 self.failUnlessIn("Post-Repair Checker Results:", s)
246 crr.repair_attempted = True
247 crr.repair_successful = False
248 post_cr.set_healthy(False)
249 post_cr.set_recoverable(False)
250 post_cr.set_summary("worse")
251 html = self.render2(w)
252 s = self.remove_tags(html)
254 self.failUnlessIn("File Check-And-Repair Results for SI=2k6avp", s)
255 self.failUnlessIn("Not Recoverable! : worse", s)
256 self.failUnlessIn("Repair unsuccessful", s)
257 self.failUnlessIn("Post-Repair Checker Results:", s)
259 d = self.render_json(w)
261 j = simplejson.loads(data)
262 self.failUnlessEqual(j["repair-attempted"], True)
263 self.failUnlessEqual(j["storage-index"],
264 "2k6avpjga3dho3zsjo6nnkt7n4")
265 self.failUnlessEqual(j["pre-repair-results"]["summary"], "illing")
266 self.failUnlessEqual(j["post-repair-results"]["summary"], "worse")
267 d.addCallback(_got_json)
269 w2 = web_check_results.CheckAndRepairResults(c, None)
270 d.addCallback(lambda ignored: self.render_json(w2))
271 def _got_lit_results(data):
272 j = simplejson.loads(data)
273 self.failUnlessEqual(j["repair-attempted"], False)
274 self.failUnlessEqual(j["storage-index"], "")
275 d.addCallback(_got_lit_results)
278 class AddLease(GridTestMixin, unittest.TestCase):
279 # test for #875, in which failures in the add-lease call cause
280 # false-negatives in the checker
283 self.basedir = "checker/AddLease/875"
284 self.set_up_grid(num_servers=1)
285 c0 = self.g.clients[0]
286 c0.DEFAULT_ENCODING_PARAMETERS['happy'] = 1
289 d = c0.upload(Data(DATA, convergence=""))
290 def _stash_immutable(ur):
291 self.imm = c0.create_node_from_uri(ur.uri)
292 d.addCallback(_stash_immutable)
293 d.addCallback(lambda ign:
294 c0.create_mutable_file(MutableData("contents")))
295 def _stash_mutable(node):
297 d.addCallback(_stash_mutable)
299 def _check_cr(cr, which):
300 self.failUnless(cr.is_healthy(), which)
302 # these two should work normally
303 d.addCallback(lambda ign: self.imm.check(Monitor(), add_lease=True))
304 d.addCallback(_check_cr, "immutable-normal")
305 d.addCallback(lambda ign: self.mut.check(Monitor(), add_lease=True))
306 d.addCallback(_check_cr, "mutable-normal")
308 really_did_break = []
309 # now break the server's remote_add_lease call
310 def _break_add_lease(ign):
311 def broken_add_lease(*args, **kwargs):
312 really_did_break.append(1)
313 raise KeyError("intentional failure, should be ignored")
314 assert self.g.servers_by_number[0].remote_add_lease
315 self.g.servers_by_number[0].remote_add_lease = broken_add_lease
316 d.addCallback(_break_add_lease)
318 # and confirm that the files still look healthy
319 d.addCallback(lambda ign: self.mut.check(Monitor(), add_lease=True))
320 d.addCallback(_check_cr, "mutable-broken")
321 d.addCallback(lambda ign: self.imm.check(Monitor(), add_lease=True))
322 d.addCallback(_check_cr, "immutable-broken")
324 d.addCallback(lambda ign: self.failUnless(really_did_break))
327 class CounterHolder(object):
329 self._num_active_block_fetches = 0
330 self._max_active_block_fetches = 0
332 from allmydata.immutable.checker import ValidatedReadBucketProxy
333 class MockVRBP(ValidatedReadBucketProxy):
334 def __init__(self, sharenum, bucket, share_hash_tree, num_blocks, block_size, share_size, counterholder):
335 ValidatedReadBucketProxy.__init__(self, sharenum, bucket,
336 share_hash_tree, num_blocks,
337 block_size, share_size)
338 self.counterholder = counterholder
340 def get_block(self, blocknum):
341 self.counterholder._num_active_block_fetches += 1
342 if self.counterholder._num_active_block_fetches > self.counterholder._max_active_block_fetches:
343 self.counterholder._max_active_block_fetches = self.counterholder._num_active_block_fetches
344 d = ValidatedReadBucketProxy.get_block(self, blocknum)
345 def _mark_no_longer_active(res):
346 self.counterholder._num_active_block_fetches -= 1
348 d.addBoth(_mark_no_longer_active)
351 class TooParallel(GridTestMixin, unittest.TestCase):
352 # bug #1395: immutable verifier was aggressively parallized, checking all
353 # blocks of all shares at the same time, blowing our memory budget and
354 # crashing with MemoryErrors on >1GB files.
356 def test_immutable(self):
357 import allmydata.immutable.checker
358 origVRBP = allmydata.immutable.checker.ValidatedReadBucketProxy
360 self.basedir = "checker/TooParallel/immutable"
362 # If any code asks to instantiate a ValidatedReadBucketProxy,
363 # we give them a MockVRBP which is configured to use our
365 counterholder = CounterHolder()
366 def make_mock_VRBP(*args, **kwargs):
367 return MockVRBP(counterholder=counterholder, *args, **kwargs)
368 allmydata.immutable.checker.ValidatedReadBucketProxy = make_mock_VRBP
370 d = defer.succeed(None)
372 self.set_up_grid(num_servers=4)
373 self.c0 = self.g.clients[0]
374 self.c0.DEFAULT_ENCODING_PARAMETERS = { "k": 1,
377 "max_segment_size": 5,
380 DATA = "data" * 100 # 400/5 = 80 blocks
381 return self.c0.upload(Data(DATA, convergence=""))
382 d.addCallback(_start)
384 n = self.c0.create_node_from_uri(ur.uri)
385 return n.check(Monitor(), verify=True)
386 d.addCallback(_do_check)
388 # the verifier works on all 4 shares in parallel, but only
389 # fetches one block from each share at a time, so we expect to
390 # see 4 parallel fetches
391 self.failUnlessEqual(counterholder._max_active_block_fetches, 4)
392 d.addCallback(_check)
394 allmydata.immutable.checker.ValidatedReadBucketProxy = origVRBP
399 test_immutable.timeout = 80